VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstOneByte.cpp.h@ 100839

Last change on this file since 100839 was 100839, checked in by vboxsync, 20 months ago

VMM/IEM: More conversion from IEM_MC_MEM_MAP to IEM_MC_MEM_MAP_XXX. bugref:10369

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 485.0 KB
Line 
1/* $Id: IEMAllInstOneByte.cpp.h 100839 2023-08-09 17:30:14Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*******************************************************************************
30* Global Variables *
31*******************************************************************************/
32extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
33
34/* Instruction group definitions: */
35
36/** @defgroup og_gen General
37 * @{ */
38 /** @defgroup og_gen_arith Arithmetic
39 * @{ */
40 /** @defgroup og_gen_arith_bin Binary numbers */
41 /** @defgroup og_gen_arith_dec Decimal numbers */
42 /** @} */
43/** @} */
44
45/** @defgroup og_stack Stack
46 * @{ */
47 /** @defgroup og_stack_sreg Segment registers */
48/** @} */
49
50/** @defgroup og_prefix Prefixes */
51/** @defgroup og_escapes Escape bytes */
52
53
54
55/** @name One byte opcodes.
56 * @{
57 */
58
59/**
60 * Body for instructions like ADD, AND, OR, TEST, CMP, ++ with a byte
61 * memory/register as the destination.
62 *
63 * Used with IEMOP_BODY_BINARY_rm_r8_NO_LOCK or IEMOP_BODY_BINARY_rm_r8_LOCKED.
64 */
65#define IEMOP_BODY_BINARY_rm_r8_RW(a_fnNormalU8) \
66 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
67 \
68 /* \
69 * If rm is denoting a register, no more instruction bytes. \
70 */ \
71 if (IEM_IS_MODRM_REG_MODE(bRm)) \
72 { \
73 IEM_MC_BEGIN(3, 0); \
74 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
75 IEM_MC_ARG(uint8_t, u8Src, 1); \
76 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
77 \
78 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
79 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
80 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
81 IEM_MC_REF_EFLAGS(pEFlags); \
82 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
83 \
84 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
85 IEM_MC_END(); \
86 } \
87 else \
88 { \
89 /* \
90 * We're accessing memory. \
91 * Note! We're putting the eflags on the stack here so we can commit them \
92 * after the memory. \
93 */ \
94 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
95 { \
96 IEM_MC_BEGIN(3, 3); \
97 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
98 IEM_MC_ARG(uint8_t, u8Src, 1); \
99 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
101 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
102 \
103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
104 IEMOP_HLP_DONE_DECODING(); \
105 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
106 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
107 IEM_MC_FETCH_EFLAGS(EFlags); \
108 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
109 \
110 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
111 IEM_MC_COMMIT_EFLAGS(EFlags); \
112 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
113 IEM_MC_END(); \
114 } \
115 else \
116 { \
117 (void)0
118
119/**
120 * Body for instructions like TEST & CMP, ++ with a byte memory/registers as
121 * operands.
122 *
123 * Used with IEMOP_BODY_BINARY_rm_r8_NO_LOCK or IEMOP_BODY_BINARY_rm_r8_LOCKED.
124 */
125#define IEMOP_BODY_BINARY_rm_r8_RO(a_fnNormalU8) \
126 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
127 \
128 /* \
129 * If rm is denoting a register, no more instruction bytes. \
130 */ \
131 if (IEM_IS_MODRM_REG_MODE(bRm)) \
132 { \
133 IEM_MC_BEGIN(3, 0); \
134 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
135 IEM_MC_ARG(uint8_t, u8Src, 1); \
136 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
137 \
138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
139 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
140 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
141 IEM_MC_REF_EFLAGS(pEFlags); \
142 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
143 \
144 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
145 IEM_MC_END(); \
146 } \
147 else \
148 { \
149 /* \
150 * We're accessing memory. \
151 * Note! We're putting the eflags on the stack here so we can commit them \
152 * after the memory. \
153 */ \
154 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
155 { \
156 IEM_MC_BEGIN(3, 3); \
157 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
158 IEM_MC_ARG(uint8_t, u8Src, 1); \
159 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
160 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
161 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
162 \
163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
164 IEMOP_HLP_DONE_DECODING(); \
165 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
166 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
167 IEM_MC_FETCH_EFLAGS(EFlags); \
168 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
169 \
170 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu8Dst, bUnmapInfo); \
171 IEM_MC_COMMIT_EFLAGS(EFlags); \
172 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
173 IEM_MC_END(); \
174 } \
175 else \
176 { \
177 (void)0
178
179#define IEMOP_BODY_BINARY_rm_r8_NO_LOCK() \
180 IEMOP_HLP_DONE_DECODING(); \
181 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
182 } \
183 } \
184 (void)0
185
186#define IEMOP_BODY_BINARY_rm_r8_LOCKED(a_fnLockedU8) \
187 IEM_MC_BEGIN(3, 3); \
188 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
189 IEM_MC_ARG(uint8_t, u8Src, 1); \
190 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
192 IEM_MC_LOCAL(uint8_t, bMapInfoDst); \
193 \
194 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
195 IEMOP_HLP_DONE_DECODING(); \
196 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bMapInfoDst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
197 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
198 IEM_MC_FETCH_EFLAGS(EFlags); \
199 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
200 \
201 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bMapInfoDst); \
202 IEM_MC_COMMIT_EFLAGS(EFlags); \
203 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
204 IEM_MC_END(); \
205 } \
206 } \
207 (void)0
208
209/**
210 * Body for byte instructions like ADD, AND, OR, ++ with a register as the
211 * destination.
212 */
213#define IEMOP_BODY_BINARY_r8_rm(a_fnNormalU8) \
214 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
215 \
216 /* \
217 * If rm is denoting a register, no more instruction bytes. \
218 */ \
219 if (IEM_IS_MODRM_REG_MODE(bRm)) \
220 { \
221 IEM_MC_BEGIN(3, 0); \
222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
223 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
224 IEM_MC_ARG(uint8_t, u8Src, 1); \
225 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
226 \
227 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
228 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
229 IEM_MC_REF_EFLAGS(pEFlags); \
230 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
231 \
232 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
233 IEM_MC_END(); \
234 } \
235 else \
236 { \
237 /* \
238 * We're accessing memory. \
239 */ \
240 IEM_MC_BEGIN(3, 1); \
241 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
242 IEM_MC_ARG(uint8_t, u8Src, 1); \
243 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
244 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
245 \
246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
248 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
249 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
250 IEM_MC_REF_EFLAGS(pEFlags); \
251 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
252 \
253 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
254 IEM_MC_END(); \
255 } \
256 (void)0
257
258
259/**
260 * Body for word/dword/qword instructions like ADD, AND, OR, ++ with
261 * memory/register as the destination.
262 */
263#define IEMOP_BODY_BINARY_rm_rv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
264 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
265 \
266 /* \
267 * If rm is denoting a register, no more instruction bytes. \
268 */ \
269 if (IEM_IS_MODRM_REG_MODE(bRm)) \
270 { \
271 switch (pVCpu->iem.s.enmEffOpSize) \
272 { \
273 case IEMMODE_16BIT: \
274 IEM_MC_BEGIN(3, 0); \
275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
276 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
277 IEM_MC_ARG(uint16_t, u16Src, 1); \
278 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
279 \
280 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
281 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
282 IEM_MC_REF_EFLAGS(pEFlags); \
283 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
284 \
285 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
286 IEM_MC_END(); \
287 break; \
288 \
289 case IEMMODE_32BIT: \
290 IEM_MC_BEGIN(3, 0); \
291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
292 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
293 IEM_MC_ARG(uint32_t, u32Src, 1); \
294 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
295 \
296 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
297 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
298 IEM_MC_REF_EFLAGS(pEFlags); \
299 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
300 \
301 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
302 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
303 IEM_MC_END(); \
304 break; \
305 \
306 case IEMMODE_64BIT: \
307 IEM_MC_BEGIN(3, 0); \
308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
309 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
310 IEM_MC_ARG(uint64_t, u64Src, 1); \
311 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
312 \
313 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
314 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
315 IEM_MC_REF_EFLAGS(pEFlags); \
316 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
317 \
318 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
319 IEM_MC_END(); \
320 break; \
321 \
322 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
323 } \
324 } \
325 else \
326 { \
327 /* \
328 * We're accessing memory. \
329 * Note! We're putting the eflags on the stack here so we can commit them \
330 * after the memory. \
331 */ \
332 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
333 { \
334 switch (pVCpu->iem.s.enmEffOpSize) \
335 { \
336 case IEMMODE_16BIT: \
337 IEM_MC_BEGIN(3, 3); \
338 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
339 IEM_MC_ARG(uint16_t, u16Src, 1); \
340 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
341 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
342 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
343 \
344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
345 IEMOP_HLP_DONE_DECODING(); \
346 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
347 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
348 IEM_MC_FETCH_EFLAGS(EFlags); \
349 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
350 \
351 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
352 IEM_MC_COMMIT_EFLAGS(EFlags); \
353 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
354 IEM_MC_END(); \
355 break; \
356 \
357 case IEMMODE_32BIT: \
358 IEM_MC_BEGIN(3, 3); \
359 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
360 IEM_MC_ARG(uint32_t, u32Src, 1); \
361 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
362 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
363 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
364 \
365 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
366 IEMOP_HLP_DONE_DECODING(); \
367 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
368 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
369 IEM_MC_FETCH_EFLAGS(EFlags); \
370 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
371 \
372 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
373 IEM_MC_COMMIT_EFLAGS(EFlags); \
374 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
375 IEM_MC_END(); \
376 break; \
377 \
378 case IEMMODE_64BIT: \
379 IEM_MC_BEGIN(3, 3); \
380 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
381 IEM_MC_ARG(uint64_t, u64Src, 1); \
382 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
383 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
384 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
385 \
386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
387 IEMOP_HLP_DONE_DECODING(); \
388 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
389 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
390 IEM_MC_FETCH_EFLAGS(EFlags); \
391 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
392 \
393 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
394 IEM_MC_COMMIT_EFLAGS(EFlags); \
395 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
396 IEM_MC_END(); \
397 break; \
398 \
399 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
400 } \
401 } \
402 else \
403 { \
404 (void)0
405/* Separate macro to work around parsing issue in IEMAllInstPython.py */
406#define IEMOP_BODY_BINARY_rm_rv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
407 switch (pVCpu->iem.s.enmEffOpSize) \
408 { \
409 case IEMMODE_16BIT: \
410 IEM_MC_BEGIN(3, 3); \
411 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
412 IEM_MC_ARG(uint16_t, u16Src, 1); \
413 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
415 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
416 \
417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
418 IEMOP_HLP_DONE_DECODING(); \
419 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
420 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
421 IEM_MC_FETCH_EFLAGS(EFlags); \
422 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
423 \
424 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
425 IEM_MC_COMMIT_EFLAGS(EFlags); \
426 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
427 IEM_MC_END(); \
428 break; \
429 \
430 case IEMMODE_32BIT: \
431 IEM_MC_BEGIN(3, 3); \
432 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
433 IEM_MC_ARG(uint32_t, u32Src, 1); \
434 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
436 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
437 \
438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
439 IEMOP_HLP_DONE_DECODING(); \
440 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
441 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
442 IEM_MC_FETCH_EFLAGS(EFlags); \
443 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
444 \
445 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo /* CMP,TEST */); \
446 IEM_MC_COMMIT_EFLAGS(EFlags); \
447 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
448 IEM_MC_END(); \
449 break; \
450 \
451 case IEMMODE_64BIT: \
452 IEM_MC_BEGIN(3, 3); \
453 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
454 IEM_MC_ARG(uint64_t, u64Src, 1); \
455 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
456 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
457 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
458 \
459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
460 IEMOP_HLP_DONE_DECODING(); \
461 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
462 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
463 IEM_MC_FETCH_EFLAGS(EFlags); \
464 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
465 \
466 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
467 IEM_MC_COMMIT_EFLAGS(EFlags); \
468 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
469 IEM_MC_END(); \
470 break; \
471 \
472 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
473 } \
474 } \
475 } \
476 (void)0
477
478/**
479 * Body for read-only word/dword/qword instructions like TEST and CMP with
480 * memory/register as the destination.
481 */
482#define IEMOP_BODY_BINARY_rm_rv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
483 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
484 \
485 /* \
486 * If rm is denoting a register, no more instruction bytes. \
487 */ \
488 if (IEM_IS_MODRM_REG_MODE(bRm)) \
489 { \
490 switch (pVCpu->iem.s.enmEffOpSize) \
491 { \
492 case IEMMODE_16BIT: \
493 IEM_MC_BEGIN(3, 0); \
494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
495 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
496 IEM_MC_ARG(uint16_t, u16Src, 1); \
497 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
498 \
499 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
500 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
501 IEM_MC_REF_EFLAGS(pEFlags); \
502 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
503 \
504 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
505 IEM_MC_END(); \
506 break; \
507 \
508 case IEMMODE_32BIT: \
509 IEM_MC_BEGIN(3, 0); \
510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
511 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
512 IEM_MC_ARG(uint32_t, u32Src, 1); \
513 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
514 \
515 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
516 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
517 IEM_MC_REF_EFLAGS(pEFlags); \
518 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
519 \
520 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
521 IEM_MC_END(); \
522 break; \
523 \
524 case IEMMODE_64BIT: \
525 IEM_MC_BEGIN(3, 0); \
526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
527 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
528 IEM_MC_ARG(uint64_t, u64Src, 1); \
529 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
530 \
531 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
532 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
533 IEM_MC_REF_EFLAGS(pEFlags); \
534 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
535 \
536 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
537 IEM_MC_END(); \
538 break; \
539 \
540 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
541 } \
542 } \
543 else \
544 { \
545 /* \
546 * We're accessing memory. \
547 * Note! We're putting the eflags on the stack here so we can commit them \
548 * after the memory. \
549 */ \
550 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
551 { \
552 switch (pVCpu->iem.s.enmEffOpSize) \
553 { \
554 case IEMMODE_16BIT: \
555 IEM_MC_BEGIN(3, 3); \
556 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
557 IEM_MC_ARG(uint16_t, u16Src, 1); \
558 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
560 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
561 \
562 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
563 IEMOP_HLP_DONE_DECODING(); \
564 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
565 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
566 IEM_MC_FETCH_EFLAGS(EFlags); \
567 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
568 \
569 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
570 IEM_MC_COMMIT_EFLAGS(EFlags); \
571 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
572 IEM_MC_END(); \
573 break; \
574 \
575 case IEMMODE_32BIT: \
576 IEM_MC_BEGIN(3, 3); \
577 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
578 IEM_MC_ARG(uint32_t, u32Src, 1); \
579 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
580 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
581 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
582 \
583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
584 IEMOP_HLP_DONE_DECODING(); \
585 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
586 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
587 IEM_MC_FETCH_EFLAGS(EFlags); \
588 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
589 \
590 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
591 IEM_MC_COMMIT_EFLAGS(EFlags); \
592 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
593 IEM_MC_END(); \
594 break; \
595 \
596 case IEMMODE_64BIT: \
597 IEM_MC_BEGIN(3, 3); \
598 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
599 IEM_MC_ARG(uint64_t, u64Src, 1); \
600 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
602 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
603 \
604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
605 IEMOP_HLP_DONE_DECODING(); \
606 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
607 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
608 IEM_MC_FETCH_EFLAGS(EFlags); \
609 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
610 \
611 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
612 IEM_MC_COMMIT_EFLAGS(EFlags); \
613 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
614 IEM_MC_END(); \
615 break; \
616 \
617 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
618 } \
619 } \
620 else \
621 { \
622 IEMOP_HLP_DONE_DECODING(); \
623 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
624 } \
625 } \
626 (void)0
627
628
629/**
630 * Body for instructions like ADD, AND, OR, ++ with working on AL with
631 * a byte immediate.
632 */
633#define IEMOP_BODY_BINARY_AL_Ib(a_fnNormalU8) \
634 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
635 \
636 IEM_MC_BEGIN(3, 0); \
637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
638 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
639 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1); \
640 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
641 \
642 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX); \
643 IEM_MC_REF_EFLAGS(pEFlags); \
644 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
645 \
646 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
647 IEM_MC_END()
648
649/**
650 * Body for instructions like ADD, AND, OR, ++ with working on
651 * AX/EAX/RAX with a word/dword immediate.
652 */
653#define IEMOP_BODY_BINARY_rAX_Iz(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fModifiesDstReg) \
654 switch (pVCpu->iem.s.enmEffOpSize) \
655 { \
656 case IEMMODE_16BIT: \
657 { \
658 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
659 \
660 IEM_MC_BEGIN(3, 0); \
661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
662 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
663 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1); \
664 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
665 \
666 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX); \
667 IEM_MC_REF_EFLAGS(pEFlags); \
668 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
669 \
670 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
671 IEM_MC_END(); \
672 } \
673 \
674 case IEMMODE_32BIT: \
675 { \
676 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
677 \
678 IEM_MC_BEGIN(3, 0); \
679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
680 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
681 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1); \
682 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
683 \
684 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX); \
685 IEM_MC_REF_EFLAGS(pEFlags); \
686 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
687 \
688 if (a_fModifiesDstReg) \
689 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
690 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
691 IEM_MC_END(); \
692 } \
693 \
694 case IEMMODE_64BIT: \
695 { \
696 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
697 \
698 IEM_MC_BEGIN(3, 0); \
699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
700 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
701 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1); \
702 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
703 \
704 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX); \
705 IEM_MC_REF_EFLAGS(pEFlags); \
706 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
707 \
708 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
709 IEM_MC_END(); \
710 } \
711 \
712 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
713 } \
714 (void)0
715
716
717
718/* Instruction specification format - work in progress: */
719
720/**
721 * @opcode 0x00
722 * @opmnemonic add
723 * @op1 rm:Eb
724 * @op2 reg:Gb
725 * @opmaps one
726 * @openc ModR/M
727 * @opflmodify cf,pf,af,zf,sf,of
728 * @ophints harmless ignores_op_sizes
729 * @opstats add_Eb_Gb
730 * @opgroup og_gen_arith_bin
731 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
732 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
733 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
734 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
735 */
736FNIEMOP_DEF(iemOp_add_Eb_Gb)
737{
738 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
739 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_add_u8);
740 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_add_u8_locked);
741}
742
743
744/**
745 * @opcode 0x01
746 * @opgroup og_gen_arith_bin
747 * @opflmodify cf,pf,af,zf,sf,of
748 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
749 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
750 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
751 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
752 */
753FNIEMOP_DEF(iemOp_add_Ev_Gv)
754{
755 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
756 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
757 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
758}
759
760
761/**
762 * @opcode 0x02
763 * @opgroup og_gen_arith_bin
764 * @opflmodify cf,pf,af,zf,sf,of
765 * @opcopytests iemOp_add_Eb_Gb
766 */
767FNIEMOP_DEF(iemOp_add_Gb_Eb)
768{
769 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
770 IEMOP_BODY_BINARY_r8_rm(iemAImpl_add_u8);
771}
772
773
774/**
775 * @opcode 0x03
776 * @opgroup og_gen_arith_bin
777 * @opflmodify cf,pf,af,zf,sf,of
778 * @opcopytests iemOp_add_Ev_Gv
779 */
780FNIEMOP_DEF(iemOp_add_Gv_Ev)
781{
782 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
783 IEMOP_BODY_BINARY_rv_rm(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
784}
785
786
787/**
788 * @opcode 0x04
789 * @opgroup og_gen_arith_bin
790 * @opflmodify cf,pf,af,zf,sf,of
791 * @opcopytests iemOp_add_Eb_Gb
792 */
793FNIEMOP_DEF(iemOp_add_Al_Ib)
794{
795 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
796 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_add_u8);
797}
798
799
800/**
801 * @opcode 0x05
802 * @opgroup og_gen_arith_bin
803 * @opflmodify cf,pf,af,zf,sf,of
804 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
805 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
806 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
807 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
808 */
809FNIEMOP_DEF(iemOp_add_eAX_Iz)
810{
811 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
812 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
813}
814
815
816/**
817 * @opcode 0x06
818 * @opgroup og_stack_sreg
819 */
820FNIEMOP_DEF(iemOp_push_ES)
821{
822 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
823 IEMOP_HLP_NO_64BIT();
824 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
825}
826
827
828/**
829 * @opcode 0x07
830 * @opgroup og_stack_sreg
831 */
832FNIEMOP_DEF(iemOp_pop_ES)
833{
834 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
835 IEMOP_HLP_NO_64BIT();
836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
837 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE, iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
838}
839
840
841/**
842 * @opcode 0x08
843 * @opgroup og_gen_arith_bin
844 * @opflmodify cf,pf,af,zf,sf,of
845 * @opflundef af
846 * @opflclear of,cf
847 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
848 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
849 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
850 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
851 */
852FNIEMOP_DEF(iemOp_or_Eb_Gb)
853{
854 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
855 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
856 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_or_u8);
857 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_or_u8_locked);
858}
859
860
861/*
862 * @opcode 0x09
863 * @opgroup og_gen_arith_bin
864 * @opflmodify cf,pf,af,zf,sf,of
865 * @opflundef af
866 * @opflclear of,cf
867 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
868 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
869 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
870 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
871 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
872 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
873 */
874FNIEMOP_DEF(iemOp_or_Ev_Gv)
875{
876 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
877 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
878 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
879 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
880}
881
882
883/**
884 * @opcode 0x0a
885 * @opgroup og_gen_arith_bin
886 * @opflmodify cf,pf,af,zf,sf,of
887 * @opflundef af
888 * @opflclear of,cf
889 * @opcopytests iemOp_or_Eb_Gb
890 */
891FNIEMOP_DEF(iemOp_or_Gb_Eb)
892{
893 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
894 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
895 IEMOP_BODY_BINARY_r8_rm(iemAImpl_or_u8);
896}
897
898
899/**
900 * @opcode 0x0b
901 * @opgroup og_gen_arith_bin
902 * @opflmodify cf,pf,af,zf,sf,of
903 * @opflundef af
904 * @opflclear of,cf
905 * @opcopytests iemOp_or_Ev_Gv
906 */
907FNIEMOP_DEF(iemOp_or_Gv_Ev)
908{
909 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
910 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
911 IEMOP_BODY_BINARY_rv_rm(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
912}
913
914
915/**
916 * @opcode 0x0c
917 * @opgroup og_gen_arith_bin
918 * @opflmodify cf,pf,af,zf,sf,of
919 * @opflundef af
920 * @opflclear of,cf
921 * @opcopytests iemOp_or_Eb_Gb
922 */
923FNIEMOP_DEF(iemOp_or_Al_Ib)
924{
925 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
926 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
927 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_or_u8);
928}
929
930
931/**
932 * @opcode 0x0d
933 * @opgroup og_gen_arith_bin
934 * @opflmodify cf,pf,af,zf,sf,of
935 * @opflundef af
936 * @opflclear of,cf
937 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
938 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
939 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
940 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
941 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
942 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
943 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
944 */
945FNIEMOP_DEF(iemOp_or_eAX_Iz)
946{
947 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
948 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
949 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
950}
951
952
953/**
954 * @opcode 0x0e
955 * @opgroup og_stack_sreg
956 */
957FNIEMOP_DEF(iemOp_push_CS)
958{
959 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, 0);
960 IEMOP_HLP_NO_64BIT();
961 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
962}
963
964
965/**
966 * @opcode 0x0f
967 * @opmnemonic EscTwo0f
968 * @openc two0f
969 * @opdisenum OP_2B_ESC
970 * @ophints harmless
971 * @opgroup og_escapes
972 */
973FNIEMOP_DEF(iemOp_2byteEscape)
974{
975#if 0 /// @todo def VBOX_STRICT
976 /* Sanity check the table the first time around. */
977 static bool s_fTested = false;
978 if (RT_LIKELY(s_fTested)) { /* likely */ }
979 else
980 {
981 s_fTested = true;
982 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
983 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
984 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
985 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
986 }
987#endif
988
989 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
990 {
991 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
992 IEMOP_HLP_MIN_286();
993 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
994 }
995 /* @opdone */
996
997 /*
998 * On the 8086 this is a POP CS instruction.
999 * For the time being we don't specify this this.
1000 */
1001 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, IEMOPHINT_SKIP_PYTHON);
1002 IEMOP_HLP_NO_64BIT();
1003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1004 /** @todo eliminate END_TB here */
1005 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_END_TB,
1006 iemCImpl_pop_Sreg, X86_SREG_CS, pVCpu->iem.s.enmEffOpSize);
1007}
1008
1009/**
1010 * @opcode 0x10
1011 * @opgroup og_gen_arith_bin
1012 * @opfltest cf
1013 * @opflmodify cf,pf,af,zf,sf,of
1014 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1015 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1016 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1017 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1018 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1019 */
1020FNIEMOP_DEF(iemOp_adc_Eb_Gb)
1021{
1022 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1023 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_adc_u8);
1024 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_adc_u8_locked);
1025}
1026
1027
1028/**
1029 * @opcode 0x11
1030 * @opgroup og_gen_arith_bin
1031 * @opfltest cf
1032 * @opflmodify cf,pf,af,zf,sf,of
1033 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1034 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1035 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1036 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1037 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1038 */
1039FNIEMOP_DEF(iemOp_adc_Ev_Gv)
1040{
1041 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1042 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
1043 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
1044}
1045
1046
1047/**
1048 * @opcode 0x12
1049 * @opgroup og_gen_arith_bin
1050 * @opfltest cf
1051 * @opflmodify cf,pf,af,zf,sf,of
1052 * @opcopytests iemOp_adc_Eb_Gb
1053 */
1054FNIEMOP_DEF(iemOp_adc_Gb_Eb)
1055{
1056 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1057 IEMOP_BODY_BINARY_r8_rm(iemAImpl_adc_u8);
1058}
1059
1060
1061/**
1062 * @opcode 0x13
1063 * @opgroup og_gen_arith_bin
1064 * @opfltest cf
1065 * @opflmodify cf,pf,af,zf,sf,of
1066 * @opcopytests iemOp_adc_Ev_Gv
1067 */
1068FNIEMOP_DEF(iemOp_adc_Gv_Ev)
1069{
1070 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1071 IEMOP_BODY_BINARY_rv_rm(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
1072}
1073
1074
1075/**
1076 * @opcode 0x14
1077 * @opgroup og_gen_arith_bin
1078 * @opfltest cf
1079 * @opflmodify cf,pf,af,zf,sf,of
1080 * @opcopytests iemOp_adc_Eb_Gb
1081 */
1082FNIEMOP_DEF(iemOp_adc_Al_Ib)
1083{
1084 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1085 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_adc_u8);
1086}
1087
1088
1089/**
1090 * @opcode 0x15
1091 * @opgroup og_gen_arith_bin
1092 * @opfltest cf
1093 * @opflmodify cf,pf,af,zf,sf,of
1094 * @opcopytests iemOp_adc_Ev_Gv
1095 */
1096FNIEMOP_DEF(iemOp_adc_eAX_Iz)
1097{
1098 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1099 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
1100}
1101
1102
1103/**
1104 * @opcode 0x16
1105 */
1106FNIEMOP_DEF(iemOp_push_SS)
1107{
1108 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1109 IEMOP_HLP_NO_64BIT();
1110 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
1111}
1112
1113
1114/**
1115 * @opcode 0x17
1116 * @opgroup og_gen_arith_bin
1117 * @opfltest cf
1118 * @opflmodify cf,pf,af,zf,sf,of
1119 */
1120FNIEMOP_DEF(iemOp_pop_SS)
1121{
1122 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
1123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1124 IEMOP_HLP_NO_64BIT();
1125 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE, iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
1126}
1127
1128
1129/**
1130 * @opcode 0x18
1131 * @opgroup og_gen_arith_bin
1132 * @opfltest cf
1133 * @opflmodify cf,pf,af,zf,sf,of
1134 */
1135FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
1136{
1137 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1138 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_sbb_u8);
1139 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sbb_u8_locked);
1140}
1141
1142
1143/**
1144 * @opcode 0x19
1145 * @opgroup og_gen_arith_bin
1146 * @opfltest cf
1147 * @opflmodify cf,pf,af,zf,sf,of
1148 */
1149FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
1150{
1151 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1152 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
1153 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
1154}
1155
1156
1157/**
1158 * @opcode 0x1a
1159 * @opgroup og_gen_arith_bin
1160 * @opfltest cf
1161 * @opflmodify cf,pf,af,zf,sf,of
1162 */
1163FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
1164{
1165 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1166 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sbb_u8);
1167}
1168
1169
1170/**
1171 * @opcode 0x1b
1172 * @opgroup og_gen_arith_bin
1173 * @opfltest cf
1174 * @opflmodify cf,pf,af,zf,sf,of
1175 */
1176FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
1177{
1178 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1179 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
1180}
1181
1182
1183/**
1184 * @opcode 0x1c
1185 * @opgroup og_gen_arith_bin
1186 * @opfltest cf
1187 * @opflmodify cf,pf,af,zf,sf,of
1188 */
1189FNIEMOP_DEF(iemOp_sbb_Al_Ib)
1190{
1191 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1192 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sbb_u8);
1193}
1194
1195
1196/**
1197 * @opcode 0x1d
1198 * @opgroup og_gen_arith_bin
1199 * @opfltest cf
1200 * @opflmodify cf,pf,af,zf,sf,of
1201 */
1202FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
1203{
1204 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1205 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
1206}
1207
1208
1209/**
1210 * @opcode 0x1e
1211 * @opgroup og_stack_sreg
1212 */
1213FNIEMOP_DEF(iemOp_push_DS)
1214{
1215 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1216 IEMOP_HLP_NO_64BIT();
1217 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
1218}
1219
1220
1221/**
1222 * @opcode 0x1f
1223 * @opgroup og_stack_sreg
1224 */
1225FNIEMOP_DEF(iemOp_pop_DS)
1226{
1227 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1229 IEMOP_HLP_NO_64BIT();
1230 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE, iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
1231}
1232
1233
1234/**
1235 * @opcode 0x20
1236 * @opgroup og_gen_arith_bin
1237 * @opflmodify cf,pf,af,zf,sf,of
1238 * @opflundef af
1239 * @opflclear of,cf
1240 */
1241FNIEMOP_DEF(iemOp_and_Eb_Gb)
1242{
1243 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1244 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1245 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_and_u8);
1246 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_and_u8_locked);
1247}
1248
1249
1250/**
1251 * @opcode 0x21
1252 * @opgroup og_gen_arith_bin
1253 * @opflmodify cf,pf,af,zf,sf,of
1254 * @opflundef af
1255 * @opflclear of,cf
1256 */
1257FNIEMOP_DEF(iemOp_and_Ev_Gv)
1258{
1259 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1260 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1261 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
1262 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
1263}
1264
1265
1266/**
1267 * @opcode 0x22
1268 * @opgroup og_gen_arith_bin
1269 * @opflmodify cf,pf,af,zf,sf,of
1270 * @opflundef af
1271 * @opflclear of,cf
1272 */
1273FNIEMOP_DEF(iemOp_and_Gb_Eb)
1274{
1275 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1276 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1277 IEMOP_BODY_BINARY_r8_rm(iemAImpl_and_u8);
1278}
1279
1280
1281/**
1282 * @opcode 0x23
1283 * @opgroup og_gen_arith_bin
1284 * @opflmodify cf,pf,af,zf,sf,of
1285 * @opflundef af
1286 * @opflclear of,cf
1287 */
1288FNIEMOP_DEF(iemOp_and_Gv_Ev)
1289{
1290 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1291 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1292 IEMOP_BODY_BINARY_rv_rm(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1293}
1294
1295
1296/**
1297 * @opcode 0x24
1298 * @opgroup og_gen_arith_bin
1299 * @opflmodify cf,pf,af,zf,sf,of
1300 * @opflundef af
1301 * @opflclear of,cf
1302 */
1303FNIEMOP_DEF(iemOp_and_Al_Ib)
1304{
1305 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
1306 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1307 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_and_u8);
1308}
1309
1310
1311/**
1312 * @opcode 0x25
1313 * @opgroup og_gen_arith_bin
1314 * @opflmodify cf,pf,af,zf,sf,of
1315 * @opflundef af
1316 * @opflclear of,cf
1317 */
1318FNIEMOP_DEF(iemOp_and_eAX_Iz)
1319{
1320 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1321 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1322 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1323}
1324
1325
1326/**
1327 * @opcode 0x26
1328 * @opmnemonic SEG
1329 * @op1 ES
1330 * @opgroup og_prefix
1331 * @openc prefix
1332 * @opdisenum OP_SEG
1333 * @ophints harmless
1334 */
1335FNIEMOP_DEF(iemOp_seg_ES)
1336{
1337 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
1338 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
1339 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
1340
1341 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1342 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1343}
1344
1345
1346/**
1347 * @opcode 0x27
1348 * @opfltest af,cf
1349 * @opflmodify cf,pf,af,zf,sf,of
1350 * @opflundef of
1351 */
1352FNIEMOP_DEF(iemOp_daa)
1353{
1354 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1355 IEMOP_HLP_NO_64BIT();
1356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1357 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1358 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_daa);
1359}
1360
1361
1362/**
1363 * @opcode 0x28
1364 * @opgroup og_gen_arith_bin
1365 * @opflmodify cf,pf,af,zf,sf,of
1366 */
1367FNIEMOP_DEF(iemOp_sub_Eb_Gb)
1368{
1369 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1370 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_sub_u8);
1371 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sub_u8_locked);
1372}
1373
1374
1375/**
1376 * @opcode 0x29
1377 * @opgroup og_gen_arith_bin
1378 * @opflmodify cf,pf,af,zf,sf,of
1379 */
1380FNIEMOP_DEF(iemOp_sub_Ev_Gv)
1381{
1382 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1383 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
1384 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
1385}
1386
1387
1388/**
1389 * @opcode 0x2a
1390 * @opgroup og_gen_arith_bin
1391 * @opflmodify cf,pf,af,zf,sf,of
1392 */
1393FNIEMOP_DEF(iemOp_sub_Gb_Eb)
1394{
1395 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1396 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sub_u8);
1397}
1398
1399
1400/**
1401 * @opcode 0x2b
1402 * @opgroup og_gen_arith_bin
1403 * @opflmodify cf,pf,af,zf,sf,of
1404 */
1405FNIEMOP_DEF(iemOp_sub_Gv_Ev)
1406{
1407 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1408 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1409}
1410
1411
1412/**
1413 * @opcode 0x2c
1414 * @opgroup og_gen_arith_bin
1415 * @opflmodify cf,pf,af,zf,sf,of
1416 */
1417FNIEMOP_DEF(iemOp_sub_Al_Ib)
1418{
1419 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1420 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sub_u8);
1421}
1422
1423
1424/**
1425 * @opcode 0x2d
1426 * @opgroup og_gen_arith_bin
1427 * @opflmodify cf,pf,af,zf,sf,of
1428 */
1429FNIEMOP_DEF(iemOp_sub_eAX_Iz)
1430{
1431 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1432 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1433}
1434
1435
1436/**
1437 * @opcode 0x2e
1438 * @opmnemonic SEG
1439 * @op1 CS
1440 * @opgroup og_prefix
1441 * @openc prefix
1442 * @opdisenum OP_SEG
1443 * @ophints harmless
1444 */
1445FNIEMOP_DEF(iemOp_seg_CS)
1446{
1447 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
1448 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
1449 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
1450
1451 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1452 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1453}
1454
1455
1456/**
1457 * @opcode 0x2f
1458 * @opfltest af,cf
1459 * @opflmodify cf,pf,af,zf,sf,of
1460 * @opflundef of
1461 */
1462FNIEMOP_DEF(iemOp_das)
1463{
1464 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1465 IEMOP_HLP_NO_64BIT();
1466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1467 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1468 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_das);
1469}
1470
1471
1472/**
1473 * @opcode 0x30
1474 * @opgroup og_gen_arith_bin
1475 * @opflmodify cf,pf,af,zf,sf,of
1476 * @opflundef af
1477 * @opflclear of,cf
1478 */
1479FNIEMOP_DEF(iemOp_xor_Eb_Gb)
1480{
1481 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1482 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1483 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_xor_u8);
1484 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_xor_u8_locked);
1485}
1486
1487
1488/**
1489 * @opcode 0x31
1490 * @opgroup og_gen_arith_bin
1491 * @opflmodify cf,pf,af,zf,sf,of
1492 * @opflundef af
1493 * @opflclear of,cf
1494 */
1495FNIEMOP_DEF(iemOp_xor_Ev_Gv)
1496{
1497 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1498 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1499 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
1500 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
1501}
1502
1503
1504/**
1505 * @opcode 0x32
1506 * @opgroup og_gen_arith_bin
1507 * @opflmodify cf,pf,af,zf,sf,of
1508 * @opflundef af
1509 * @opflclear of,cf
1510 */
1511FNIEMOP_DEF(iemOp_xor_Gb_Eb)
1512{
1513 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1514 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1515 IEMOP_BODY_BINARY_r8_rm(iemAImpl_xor_u8);
1516}
1517
1518
1519/**
1520 * @opcode 0x33
1521 * @opgroup og_gen_arith_bin
1522 * @opflmodify cf,pf,af,zf,sf,of
1523 * @opflundef af
1524 * @opflclear of,cf
1525 */
1526FNIEMOP_DEF(iemOp_xor_Gv_Ev)
1527{
1528 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1529 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1530 IEMOP_BODY_BINARY_rv_rm(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1531}
1532
1533
1534/**
1535 * @opcode 0x34
1536 * @opgroup og_gen_arith_bin
1537 * @opflmodify cf,pf,af,zf,sf,of
1538 * @opflundef af
1539 * @opflclear of,cf
1540 */
1541FNIEMOP_DEF(iemOp_xor_Al_Ib)
1542{
1543 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1544 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1545 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_xor_u8);
1546}
1547
1548
1549/**
1550 * @opcode 0x35
1551 * @opgroup og_gen_arith_bin
1552 * @opflmodify cf,pf,af,zf,sf,of
1553 * @opflundef af
1554 * @opflclear of,cf
1555 */
1556FNIEMOP_DEF(iemOp_xor_eAX_Iz)
1557{
1558 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1559 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1560 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1561}
1562
1563
1564/**
1565 * @opcode 0x36
1566 * @opmnemonic SEG
1567 * @op1 SS
1568 * @opgroup og_prefix
1569 * @openc prefix
1570 * @opdisenum OP_SEG
1571 * @ophints harmless
1572 */
1573FNIEMOP_DEF(iemOp_seg_SS)
1574{
1575 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
1576 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
1577 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
1578
1579 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1580 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1581}
1582
1583
1584/**
1585 * @opcode 0x37
1586 * @opfltest af,cf
1587 * @opflmodify cf,pf,af,zf,sf,of
1588 * @opflundef pf,zf,sf,of
1589 * @opgroup og_gen_arith_dec
1590 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
1591 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
1592 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1593 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1594 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1595 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
1596 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
1597 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
1598 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
1599 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
1600 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
1601 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
1602 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
1603 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
1604 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
1605 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1606 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1607 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1608 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1609 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
1610 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
1611 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
1612 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
1613 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
1614 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
1615 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1616 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
1617 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
1618 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
1619 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
1620 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
1621 */
1622FNIEMOP_DEF(iemOp_aaa)
1623{
1624 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1625 IEMOP_HLP_NO_64BIT();
1626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1627 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1628
1629 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aaa);
1630}
1631
1632
1633/**
1634 * @opcode 0x38
1635 */
1636FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
1637{
1638 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
1639 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_cmp_u8);
1640 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
1641}
1642
1643
1644/**
1645 * @opcode 0x39
1646 */
1647FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
1648{
1649 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
1650 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
1651}
1652
1653
1654/**
1655 * @opcode 0x3a
1656 */
1657FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
1658{
1659 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
1660 IEMOP_BODY_BINARY_r8_rm(iemAImpl_cmp_u8);
1661}
1662
1663
1664/**
1665 * @opcode 0x3b
1666 */
1667FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
1668{
1669 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
1670 IEMOP_BODY_BINARY_rv_rm(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1671}
1672
1673
1674/**
1675 * @opcode 0x3c
1676 */
1677FNIEMOP_DEF(iemOp_cmp_Al_Ib)
1678{
1679 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
1680 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_cmp_u8);
1681}
1682
1683
1684/**
1685 * @opcode 0x3d
1686 */
1687FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1688{
1689 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1690 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1691}
1692
1693
1694/**
1695 * @opcode 0x3e
1696 */
1697FNIEMOP_DEF(iemOp_seg_DS)
1698{
1699 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1700 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1701 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1702
1703 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1704 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1705}
1706
1707
1708/**
1709 * @opcode 0x3f
1710 * @opfltest af,cf
1711 * @opflmodify cf,pf,af,zf,sf,of
1712 * @opflundef pf,zf,sf,of
1713 * @opgroup og_gen_arith_dec
1714 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1715 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1716 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1717 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1718 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1719 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1720 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1721 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1722 * @optest amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1723 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1724 * @optest amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1725 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1726 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1727 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1728 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1729 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1730 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1731 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1732 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1733 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1734 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
1735 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1736 * @optest amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
1737 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1738 * @optest amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
1739 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1740 * @optest amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
1741 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1742 * @optest amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
1743 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1744 * @optest amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
1745 * @optest intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1746 * @optest amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1747 * @optest intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1748 * @optest amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1749 */
1750FNIEMOP_DEF(iemOp_aas)
1751{
1752 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1753 IEMOP_HLP_NO_64BIT();
1754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1755 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1756
1757 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aas);
1758}
1759
1760
1761/**
1762 * Common 'inc/dec register' helper.
1763 *
1764 * Not for 64-bit code, only for what became the rex prefixes.
1765 */
1766#define IEMOP_BODY_UNARY_GReg(a_fnNormalU16, a_fnNormalU32, a_iReg) \
1767 switch (pVCpu->iem.s.enmEffOpSize) \
1768 { \
1769 case IEMMODE_16BIT: \
1770 IEM_MC_BEGIN(2, 0); \
1771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1772 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
1773 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1774 IEM_MC_REF_GREG_U16(pu16Dst, a_iReg); \
1775 IEM_MC_REF_EFLAGS(pEFlags); \
1776 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
1777 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1778 IEM_MC_END(); \
1779 break; \
1780 \
1781 case IEMMODE_32BIT: \
1782 IEM_MC_BEGIN(2, 0); \
1783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1784 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
1785 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1786 IEM_MC_REF_GREG_U32(pu32Dst, a_iReg); \
1787 IEM_MC_REF_EFLAGS(pEFlags); \
1788 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
1789 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
1790 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1791 IEM_MC_END(); \
1792 break; \
1793 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1794 } \
1795 (void)0
1796
1797/**
1798 * @opcode 0x40
1799 */
1800FNIEMOP_DEF(iemOp_inc_eAX)
1801{
1802 /*
1803 * This is a REX prefix in 64-bit mode.
1804 */
1805 if (IEM_IS_64BIT_CODE(pVCpu))
1806 {
1807 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1808 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1809
1810 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1811 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1812 }
1813
1814 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1815 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xAX);
1816}
1817
1818
1819/**
1820 * @opcode 0x41
1821 */
1822FNIEMOP_DEF(iemOp_inc_eCX)
1823{
1824 /*
1825 * This is a REX prefix in 64-bit mode.
1826 */
1827 if (IEM_IS_64BIT_CODE(pVCpu))
1828 {
1829 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1830 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1831 pVCpu->iem.s.uRexB = 1 << 3;
1832
1833 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1834 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1835 }
1836
1837 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1838 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xCX);
1839}
1840
1841
1842/**
1843 * @opcode 0x42
1844 */
1845FNIEMOP_DEF(iemOp_inc_eDX)
1846{
1847 /*
1848 * This is a REX prefix in 64-bit mode.
1849 */
1850 if (IEM_IS_64BIT_CODE(pVCpu))
1851 {
1852 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1853 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1854 pVCpu->iem.s.uRexIndex = 1 << 3;
1855
1856 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1857 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1858 }
1859
1860 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1861 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDX);
1862}
1863
1864
1865
1866/**
1867 * @opcode 0x43
1868 */
1869FNIEMOP_DEF(iemOp_inc_eBX)
1870{
1871 /*
1872 * This is a REX prefix in 64-bit mode.
1873 */
1874 if (IEM_IS_64BIT_CODE(pVCpu))
1875 {
1876 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1877 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1878 pVCpu->iem.s.uRexB = 1 << 3;
1879 pVCpu->iem.s.uRexIndex = 1 << 3;
1880
1881 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1882 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1883 }
1884
1885 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1886 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBX);
1887}
1888
1889
1890/**
1891 * @opcode 0x44
1892 */
1893FNIEMOP_DEF(iemOp_inc_eSP)
1894{
1895 /*
1896 * This is a REX prefix in 64-bit mode.
1897 */
1898 if (IEM_IS_64BIT_CODE(pVCpu))
1899 {
1900 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1901 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1902 pVCpu->iem.s.uRexReg = 1 << 3;
1903
1904 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1905 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1906 }
1907
1908 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1909 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSP);
1910}
1911
1912
1913/**
1914 * @opcode 0x45
1915 */
1916FNIEMOP_DEF(iemOp_inc_eBP)
1917{
1918 /*
1919 * This is a REX prefix in 64-bit mode.
1920 */
1921 if (IEM_IS_64BIT_CODE(pVCpu))
1922 {
1923 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1924 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1925 pVCpu->iem.s.uRexReg = 1 << 3;
1926 pVCpu->iem.s.uRexB = 1 << 3;
1927
1928 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1929 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1930 }
1931
1932 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1933 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBP);
1934}
1935
1936
1937/**
1938 * @opcode 0x46
1939 */
1940FNIEMOP_DEF(iemOp_inc_eSI)
1941{
1942 /*
1943 * This is a REX prefix in 64-bit mode.
1944 */
1945 if (IEM_IS_64BIT_CODE(pVCpu))
1946 {
1947 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1948 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1949 pVCpu->iem.s.uRexReg = 1 << 3;
1950 pVCpu->iem.s.uRexIndex = 1 << 3;
1951
1952 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1953 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1954 }
1955
1956 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1957 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSI);
1958}
1959
1960
1961/**
1962 * @opcode 0x47
1963 */
1964FNIEMOP_DEF(iemOp_inc_eDI)
1965{
1966 /*
1967 * This is a REX prefix in 64-bit mode.
1968 */
1969 if (IEM_IS_64BIT_CODE(pVCpu))
1970 {
1971 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1972 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1973 pVCpu->iem.s.uRexReg = 1 << 3;
1974 pVCpu->iem.s.uRexB = 1 << 3;
1975 pVCpu->iem.s.uRexIndex = 1 << 3;
1976
1977 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1978 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1979 }
1980
1981 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1982 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDI);
1983}
1984
1985
1986/**
1987 * @opcode 0x48
1988 */
1989FNIEMOP_DEF(iemOp_dec_eAX)
1990{
1991 /*
1992 * This is a REX prefix in 64-bit mode.
1993 */
1994 if (IEM_IS_64BIT_CODE(pVCpu))
1995 {
1996 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
1997 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
1998 iemRecalEffOpSize(pVCpu);
1999
2000 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2001 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2002 }
2003
2004 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
2005 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xAX);
2006}
2007
2008
2009/**
2010 * @opcode 0x49
2011 */
2012FNIEMOP_DEF(iemOp_dec_eCX)
2013{
2014 /*
2015 * This is a REX prefix in 64-bit mode.
2016 */
2017 if (IEM_IS_64BIT_CODE(pVCpu))
2018 {
2019 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
2020 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2021 pVCpu->iem.s.uRexB = 1 << 3;
2022 iemRecalEffOpSize(pVCpu);
2023
2024 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2025 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2026 }
2027
2028 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
2029 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xCX);
2030}
2031
2032
2033/**
2034 * @opcode 0x4a
2035 */
2036FNIEMOP_DEF(iemOp_dec_eDX)
2037{
2038 /*
2039 * This is a REX prefix in 64-bit mode.
2040 */
2041 if (IEM_IS_64BIT_CODE(pVCpu))
2042 {
2043 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
2044 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2045 pVCpu->iem.s.uRexIndex = 1 << 3;
2046 iemRecalEffOpSize(pVCpu);
2047
2048 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2049 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2050 }
2051
2052 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
2053 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDX);
2054}
2055
2056
2057/**
2058 * @opcode 0x4b
2059 */
2060FNIEMOP_DEF(iemOp_dec_eBX)
2061{
2062 /*
2063 * This is a REX prefix in 64-bit mode.
2064 */
2065 if (IEM_IS_64BIT_CODE(pVCpu))
2066 {
2067 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
2068 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2069 pVCpu->iem.s.uRexB = 1 << 3;
2070 pVCpu->iem.s.uRexIndex = 1 << 3;
2071 iemRecalEffOpSize(pVCpu);
2072
2073 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2074 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2075 }
2076
2077 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
2078 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBX);
2079}
2080
2081
2082/**
2083 * @opcode 0x4c
2084 */
2085FNIEMOP_DEF(iemOp_dec_eSP)
2086{
2087 /*
2088 * This is a REX prefix in 64-bit mode.
2089 */
2090 if (IEM_IS_64BIT_CODE(pVCpu))
2091 {
2092 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
2093 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
2094 pVCpu->iem.s.uRexReg = 1 << 3;
2095 iemRecalEffOpSize(pVCpu);
2096
2097 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2098 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2099 }
2100
2101 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
2102 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSP);
2103}
2104
2105
2106/**
2107 * @opcode 0x4d
2108 */
2109FNIEMOP_DEF(iemOp_dec_eBP)
2110{
2111 /*
2112 * This is a REX prefix in 64-bit mode.
2113 */
2114 if (IEM_IS_64BIT_CODE(pVCpu))
2115 {
2116 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
2117 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2118 pVCpu->iem.s.uRexReg = 1 << 3;
2119 pVCpu->iem.s.uRexB = 1 << 3;
2120 iemRecalEffOpSize(pVCpu);
2121
2122 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2123 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2124 }
2125
2126 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
2127 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBP);
2128}
2129
2130
2131/**
2132 * @opcode 0x4e
2133 */
2134FNIEMOP_DEF(iemOp_dec_eSI)
2135{
2136 /*
2137 * This is a REX prefix in 64-bit mode.
2138 */
2139 if (IEM_IS_64BIT_CODE(pVCpu))
2140 {
2141 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
2142 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2143 pVCpu->iem.s.uRexReg = 1 << 3;
2144 pVCpu->iem.s.uRexIndex = 1 << 3;
2145 iemRecalEffOpSize(pVCpu);
2146
2147 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2148 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2149 }
2150
2151 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
2152 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSI);
2153}
2154
2155
2156/**
2157 * @opcode 0x4f
2158 */
2159FNIEMOP_DEF(iemOp_dec_eDI)
2160{
2161 /*
2162 * This is a REX prefix in 64-bit mode.
2163 */
2164 if (IEM_IS_64BIT_CODE(pVCpu))
2165 {
2166 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
2167 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2168 pVCpu->iem.s.uRexReg = 1 << 3;
2169 pVCpu->iem.s.uRexB = 1 << 3;
2170 pVCpu->iem.s.uRexIndex = 1 << 3;
2171 iemRecalEffOpSize(pVCpu);
2172
2173 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2174 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2175 }
2176
2177 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
2178 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDI);
2179}
2180
2181
2182/**
2183 * Common 'push register' helper.
2184 */
2185FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
2186{
2187 if (IEM_IS_64BIT_CODE(pVCpu))
2188 {
2189 iReg |= pVCpu->iem.s.uRexB;
2190 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2191 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2192 }
2193
2194 switch (pVCpu->iem.s.enmEffOpSize)
2195 {
2196 case IEMMODE_16BIT:
2197 IEM_MC_BEGIN(0, 1);
2198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2199 IEM_MC_LOCAL(uint16_t, u16Value);
2200 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
2201 IEM_MC_PUSH_U16(u16Value);
2202 IEM_MC_ADVANCE_RIP_AND_FINISH();
2203 IEM_MC_END();
2204 break;
2205
2206 case IEMMODE_32BIT:
2207 IEM_MC_BEGIN(0, 1);
2208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2209 IEM_MC_LOCAL(uint32_t, u32Value);
2210 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
2211 IEM_MC_PUSH_U32(u32Value);
2212 IEM_MC_ADVANCE_RIP_AND_FINISH();
2213 IEM_MC_END();
2214 break;
2215
2216 case IEMMODE_64BIT:
2217 IEM_MC_BEGIN(0, 1);
2218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2219 IEM_MC_LOCAL(uint64_t, u64Value);
2220 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
2221 IEM_MC_PUSH_U64(u64Value);
2222 IEM_MC_ADVANCE_RIP_AND_FINISH();
2223 IEM_MC_END();
2224 break;
2225
2226 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2227 }
2228}
2229
2230
2231/**
2232 * @opcode 0x50
2233 */
2234FNIEMOP_DEF(iemOp_push_eAX)
2235{
2236 IEMOP_MNEMONIC(push_rAX, "push rAX");
2237 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
2238}
2239
2240
2241/**
2242 * @opcode 0x51
2243 */
2244FNIEMOP_DEF(iemOp_push_eCX)
2245{
2246 IEMOP_MNEMONIC(push_rCX, "push rCX");
2247 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
2248}
2249
2250
2251/**
2252 * @opcode 0x52
2253 */
2254FNIEMOP_DEF(iemOp_push_eDX)
2255{
2256 IEMOP_MNEMONIC(push_rDX, "push rDX");
2257 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
2258}
2259
2260
2261/**
2262 * @opcode 0x53
2263 */
2264FNIEMOP_DEF(iemOp_push_eBX)
2265{
2266 IEMOP_MNEMONIC(push_rBX, "push rBX");
2267 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
2268}
2269
2270
2271/**
2272 * @opcode 0x54
2273 */
2274FNIEMOP_DEF(iemOp_push_eSP)
2275{
2276 IEMOP_MNEMONIC(push_rSP, "push rSP");
2277 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
2278 {
2279 IEM_MC_BEGIN(0, 1);
2280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2281 IEM_MC_LOCAL(uint16_t, u16Value);
2282 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
2283 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
2284 IEM_MC_PUSH_U16(u16Value);
2285 IEM_MC_ADVANCE_RIP_AND_FINISH();
2286 IEM_MC_END();
2287 }
2288 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
2289}
2290
2291
2292/**
2293 * @opcode 0x55
2294 */
2295FNIEMOP_DEF(iemOp_push_eBP)
2296{
2297 IEMOP_MNEMONIC(push_rBP, "push rBP");
2298 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
2299}
2300
2301
2302/**
2303 * @opcode 0x56
2304 */
2305FNIEMOP_DEF(iemOp_push_eSI)
2306{
2307 IEMOP_MNEMONIC(push_rSI, "push rSI");
2308 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
2309}
2310
2311
2312/**
2313 * @opcode 0x57
2314 */
2315FNIEMOP_DEF(iemOp_push_eDI)
2316{
2317 IEMOP_MNEMONIC(push_rDI, "push rDI");
2318 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
2319}
2320
2321
2322/**
2323 * Common 'pop register' helper.
2324 */
2325FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
2326{
2327 if (IEM_IS_64BIT_CODE(pVCpu))
2328 {
2329 iReg |= pVCpu->iem.s.uRexB;
2330 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2331 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2332 }
2333
2334 switch (pVCpu->iem.s.enmEffOpSize)
2335 {
2336 case IEMMODE_16BIT:
2337 IEM_MC_BEGIN(0, 1);
2338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2339 IEM_MC_LOCAL(uint16_t *, pu16Dst);
2340 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
2341 IEM_MC_POP_U16(pu16Dst);
2342 IEM_MC_ADVANCE_RIP_AND_FINISH();
2343 IEM_MC_END();
2344 break;
2345
2346 case IEMMODE_32BIT:
2347 IEM_MC_BEGIN(0, 1);
2348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2349 IEM_MC_LOCAL(uint32_t *, pu32Dst);
2350 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
2351 IEM_MC_POP_U32(pu32Dst);
2352 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
2353 IEM_MC_ADVANCE_RIP_AND_FINISH();
2354 IEM_MC_END();
2355 break;
2356
2357 case IEMMODE_64BIT:
2358 IEM_MC_BEGIN(0, 1);
2359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2360 IEM_MC_LOCAL(uint64_t *, pu64Dst);
2361 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
2362 IEM_MC_POP_U64(pu64Dst);
2363 IEM_MC_ADVANCE_RIP_AND_FINISH();
2364 IEM_MC_END();
2365 break;
2366
2367 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2368 }
2369}
2370
2371
2372/**
2373 * @opcode 0x58
2374 */
2375FNIEMOP_DEF(iemOp_pop_eAX)
2376{
2377 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
2378 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
2379}
2380
2381
2382/**
2383 * @opcode 0x59
2384 */
2385FNIEMOP_DEF(iemOp_pop_eCX)
2386{
2387 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
2388 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
2389}
2390
2391
2392/**
2393 * @opcode 0x5a
2394 */
2395FNIEMOP_DEF(iemOp_pop_eDX)
2396{
2397 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
2398 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
2399}
2400
2401
2402/**
2403 * @opcode 0x5b
2404 */
2405FNIEMOP_DEF(iemOp_pop_eBX)
2406{
2407 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
2408 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
2409}
2410
2411
2412/**
2413 * @opcode 0x5c
2414 */
2415FNIEMOP_DEF(iemOp_pop_eSP)
2416{
2417 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
2418 if (IEM_IS_64BIT_CODE(pVCpu))
2419 {
2420 if (pVCpu->iem.s.uRexB)
2421 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
2422 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2423 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2424 }
2425
2426 /** @todo add testcase for this instruction. */
2427 switch (pVCpu->iem.s.enmEffOpSize)
2428 {
2429 case IEMMODE_16BIT:
2430 IEM_MC_BEGIN(0, 1);
2431 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2432 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2433 IEM_MC_LOCAL(uint16_t, u16Dst);
2434 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
2435 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
2436 IEM_MC_ADVANCE_RIP_AND_FINISH();
2437 IEM_MC_END();
2438 break;
2439
2440 case IEMMODE_32BIT:
2441 IEM_MC_BEGIN(0, 1);
2442 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2443 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2444 IEM_MC_LOCAL(uint32_t, u32Dst);
2445 IEM_MC_POP_U32(&u32Dst);
2446 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
2447 IEM_MC_ADVANCE_RIP_AND_FINISH();
2448 IEM_MC_END();
2449 break;
2450
2451 case IEMMODE_64BIT:
2452 IEM_MC_BEGIN(0, 1);
2453 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2454 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2455 IEM_MC_LOCAL(uint64_t, u64Dst);
2456 IEM_MC_POP_U64(&u64Dst);
2457 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
2458 IEM_MC_ADVANCE_RIP_AND_FINISH();
2459 IEM_MC_END();
2460 break;
2461
2462 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2463 }
2464}
2465
2466
2467/**
2468 * @opcode 0x5d
2469 */
2470FNIEMOP_DEF(iemOp_pop_eBP)
2471{
2472 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
2473 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
2474}
2475
2476
2477/**
2478 * @opcode 0x5e
2479 */
2480FNIEMOP_DEF(iemOp_pop_eSI)
2481{
2482 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
2483 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
2484}
2485
2486
2487/**
2488 * @opcode 0x5f
2489 */
2490FNIEMOP_DEF(iemOp_pop_eDI)
2491{
2492 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
2493 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
2494}
2495
2496
2497/**
2498 * @opcode 0x60
2499 */
2500FNIEMOP_DEF(iemOp_pusha)
2501{
2502 IEMOP_MNEMONIC(pusha, "pusha");
2503 IEMOP_HLP_MIN_186();
2504 IEMOP_HLP_NO_64BIT();
2505 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2506 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_pusha_16);
2507 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2508 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_pusha_32);
2509}
2510
2511
2512/**
2513 * @opcode 0x61
2514 */
2515FNIEMOP_DEF(iemOp_popa__mvex)
2516{
2517 if (!IEM_IS_64BIT_CODE(pVCpu))
2518 {
2519 IEMOP_MNEMONIC(popa, "popa");
2520 IEMOP_HLP_MIN_186();
2521 IEMOP_HLP_NO_64BIT();
2522 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2523 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_popa_16);
2524 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2525 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_popa_32);
2526 }
2527 IEMOP_MNEMONIC(mvex, "mvex");
2528 Log(("mvex prefix is not supported!\n"));
2529 IEMOP_RAISE_INVALID_OPCODE_RET();
2530}
2531
2532
2533/**
2534 * @opcode 0x62
2535 * @opmnemonic bound
2536 * @op1 Gv_RO
2537 * @op2 Ma
2538 * @opmincpu 80186
2539 * @ophints harmless x86_invalid_64
2540 * @optest op1=0 op2=0 ->
2541 * @optest op1=1 op2=0 -> value.xcpt=5
2542 * @optest o16 / op1=0xffff op2=0x0000fffe ->
2543 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
2544 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
2545 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
2546 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
2547 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
2548 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
2549 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
2550 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
2551 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
2552 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
2553 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
2554 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
2555 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
2556 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
2557 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
2558 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
2559 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
2560 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
2561 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
2562 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
2563 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
2564 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
2565 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
2566 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
2567 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
2568 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
2569 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
2570 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
2571 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
2572 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
2573 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
2574 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
2575 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
2576 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
2577 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
2578 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
2579 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
2580 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
2581 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
2582 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
2583 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
2584 */
2585FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
2586{
2587 /* The BOUND instruction is invalid 64-bit mode. In legacy and
2588 compatability mode it is invalid with MOD=3.
2589
2590 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
2591 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
2592 given as R and X without an exact description, so we assume it builds on
2593 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
2594 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
2595 uint8_t bRm;
2596 if (!IEM_IS_64BIT_CODE(pVCpu))
2597 {
2598 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2599 IEMOP_HLP_MIN_186();
2600 IEM_OPCODE_GET_NEXT_U8(&bRm);
2601 if (IEM_IS_MODRM_MEM_MODE(bRm))
2602 {
2603 /** @todo testcase: check that there are two memory accesses involved. Check
2604 * whether they're both read before the \#BR triggers. */
2605 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2606 {
2607 IEM_MC_BEGIN(3, 1);
2608 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2609 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
2610 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
2611 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2612
2613 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2615
2616 IEM_MC_FETCH_GREG_U16(u16Index, IEM_GET_MODRM_REG_8(bRm));
2617 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2618 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
2619
2620 IEM_MC_CALL_CIMPL_3(0, iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
2621 IEM_MC_END();
2622 }
2623 else /* 32-bit operands */
2624 {
2625 IEM_MC_BEGIN(3, 1);
2626 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2627 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
2628 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
2629 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2630
2631 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2633
2634 IEM_MC_FETCH_GREG_U32(u32Index, IEM_GET_MODRM_REG_8(bRm));
2635 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2636 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
2637
2638 IEM_MC_CALL_CIMPL_3(0, iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
2639 IEM_MC_END();
2640 }
2641 }
2642
2643 /*
2644 * @opdone
2645 */
2646 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2647 {
2648 /* Note that there is no need for the CPU to fetch further bytes
2649 here because MODRM.MOD == 3. */
2650 Log(("evex not supported by the guest CPU!\n"));
2651 IEMOP_RAISE_INVALID_OPCODE_RET();
2652 }
2653 }
2654 else
2655 {
2656 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
2657 * does modr/m read, whereas AMD probably doesn't... */
2658 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2659 {
2660 Log(("evex not supported by the guest CPU!\n"));
2661 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
2662 }
2663 IEM_OPCODE_GET_NEXT_U8(&bRm);
2664 }
2665
2666 IEMOP_MNEMONIC(evex, "evex");
2667 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
2668 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
2669 Log(("evex prefix is not implemented!\n"));
2670 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2671}
2672
2673
2674/** Opcode 0x63 - non-64-bit modes. */
2675FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
2676{
2677 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
2678 IEMOP_HLP_MIN_286();
2679 IEMOP_HLP_NO_REAL_OR_V86_MODE();
2680 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2681
2682 if (IEM_IS_MODRM_REG_MODE(bRm))
2683 {
2684 /* Register */
2685 IEM_MC_BEGIN(3, 0);
2686 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2687 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2688 IEM_MC_ARG(uint16_t, u16Src, 1);
2689 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2690
2691 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2692 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM_8(bRm));
2693 IEM_MC_REF_EFLAGS(pEFlags);
2694 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2695
2696 IEM_MC_ADVANCE_RIP_AND_FINISH();
2697 IEM_MC_END();
2698 }
2699 else
2700 {
2701 /* Memory */
2702 IEM_MC_BEGIN(3, 3);
2703 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2704 IEM_MC_ARG(uint16_t, u16Src, 1);
2705 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2707 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
2708
2709 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2710 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2711 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2712 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2713 IEM_MC_FETCH_EFLAGS(EFlags);
2714 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2715
2716 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
2717 IEM_MC_COMMIT_EFLAGS(EFlags);
2718 IEM_MC_ADVANCE_RIP_AND_FINISH();
2719 IEM_MC_END();
2720 }
2721}
2722
2723
2724/**
2725 * @opcode 0x63
2726 *
2727 * @note This is a weird one. It works like a regular move instruction if
2728 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2729 * @todo This definitely needs a testcase to verify the odd cases. */
2730FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2731{
2732 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2733
2734 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2735 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2736
2737 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2738 {
2739 if (IEM_IS_MODRM_REG_MODE(bRm))
2740 {
2741 /*
2742 * Register to register.
2743 */
2744 IEM_MC_BEGIN(0, 1);
2745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2746 IEM_MC_LOCAL(uint64_t, u64Value);
2747 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
2748 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2749 IEM_MC_ADVANCE_RIP_AND_FINISH();
2750 IEM_MC_END();
2751 }
2752 else
2753 {
2754 /*
2755 * We're loading a register from memory.
2756 */
2757 IEM_MC_BEGIN(0, 2);
2758 IEM_MC_LOCAL(uint64_t, u64Value);
2759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2762 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2763 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2764 IEM_MC_ADVANCE_RIP_AND_FINISH();
2765 IEM_MC_END();
2766 }
2767 }
2768 else
2769 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
2770}
2771
2772
2773/**
2774 * @opcode 0x64
2775 * @opmnemonic segfs
2776 * @opmincpu 80386
2777 * @opgroup og_prefixes
2778 */
2779FNIEMOP_DEF(iemOp_seg_FS)
2780{
2781 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2782 IEMOP_HLP_MIN_386();
2783
2784 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2785 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2786
2787 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2788 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2789}
2790
2791
2792/**
2793 * @opcode 0x65
2794 * @opmnemonic seggs
2795 * @opmincpu 80386
2796 * @opgroup og_prefixes
2797 */
2798FNIEMOP_DEF(iemOp_seg_GS)
2799{
2800 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2801 IEMOP_HLP_MIN_386();
2802
2803 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2804 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2805
2806 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2807 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2808}
2809
2810
2811/**
2812 * @opcode 0x66
2813 * @opmnemonic opsize
2814 * @openc prefix
2815 * @opmincpu 80386
2816 * @ophints harmless
2817 * @opgroup og_prefixes
2818 */
2819FNIEMOP_DEF(iemOp_op_size)
2820{
2821 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2822 IEMOP_HLP_MIN_386();
2823
2824 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2825 iemRecalEffOpSize(pVCpu);
2826
2827 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2828 when REPZ or REPNZ are present. */
2829 if (pVCpu->iem.s.idxPrefix == 0)
2830 pVCpu->iem.s.idxPrefix = 1;
2831
2832 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2833 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2834}
2835
2836
2837/**
2838 * @opcode 0x67
2839 * @opmnemonic addrsize
2840 * @openc prefix
2841 * @opmincpu 80386
2842 * @ophints harmless
2843 * @opgroup og_prefixes
2844 */
2845FNIEMOP_DEF(iemOp_addr_size)
2846{
2847 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2848 IEMOP_HLP_MIN_386();
2849
2850 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2851 switch (pVCpu->iem.s.enmDefAddrMode)
2852 {
2853 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2854 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2855 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2856 default: AssertFailed();
2857 }
2858
2859 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2860 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2861}
2862
2863
2864/**
2865 * @opcode 0x68
2866 */
2867FNIEMOP_DEF(iemOp_push_Iz)
2868{
2869 IEMOP_MNEMONIC(push_Iz, "push Iz");
2870 IEMOP_HLP_MIN_186();
2871 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2872 switch (pVCpu->iem.s.enmEffOpSize)
2873 {
2874 case IEMMODE_16BIT:
2875 {
2876 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2877 IEM_MC_BEGIN(0,0);
2878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2879 IEM_MC_PUSH_U16(u16Imm);
2880 IEM_MC_ADVANCE_RIP_AND_FINISH();
2881 IEM_MC_END();
2882 break;
2883 }
2884
2885 case IEMMODE_32BIT:
2886 {
2887 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2888 IEM_MC_BEGIN(0,0);
2889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2890 IEM_MC_PUSH_U32(u32Imm);
2891 IEM_MC_ADVANCE_RIP_AND_FINISH();
2892 IEM_MC_END();
2893 break;
2894 }
2895
2896 case IEMMODE_64BIT:
2897 {
2898 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2899 IEM_MC_BEGIN(0,0);
2900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2901 IEM_MC_PUSH_U64(u64Imm);
2902 IEM_MC_ADVANCE_RIP_AND_FINISH();
2903 IEM_MC_END();
2904 break;
2905 }
2906
2907 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2908 }
2909}
2910
2911
2912/**
2913 * @opcode 0x69
2914 */
2915FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
2916{
2917 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
2918 IEMOP_HLP_MIN_186();
2919 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2920 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2921
2922 switch (pVCpu->iem.s.enmEffOpSize)
2923 {
2924 case IEMMODE_16BIT:
2925 {
2926 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
2927 if (IEM_IS_MODRM_REG_MODE(bRm))
2928 {
2929 /* register operand */
2930 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2931 IEM_MC_BEGIN(3, 1);
2932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2933 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2934 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
2935 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2936 IEM_MC_LOCAL(uint16_t, u16Tmp);
2937
2938 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2939 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2940 IEM_MC_REF_EFLAGS(pEFlags);
2941 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2942 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2943
2944 IEM_MC_ADVANCE_RIP_AND_FINISH();
2945 IEM_MC_END();
2946 }
2947 else
2948 {
2949 /* memory operand */
2950 IEM_MC_BEGIN(3, 2);
2951 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2952 IEM_MC_ARG(uint16_t, u16Src, 1);
2953 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2954 IEM_MC_LOCAL(uint16_t, u16Tmp);
2955 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2956
2957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2958 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2959 IEM_MC_ASSIGN(u16Src, u16Imm);
2960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2961 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2962 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2963 IEM_MC_REF_EFLAGS(pEFlags);
2964 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2965 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2966
2967 IEM_MC_ADVANCE_RIP_AND_FINISH();
2968 IEM_MC_END();
2969 }
2970 break;
2971 }
2972
2973 case IEMMODE_32BIT:
2974 {
2975 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
2976 if (IEM_IS_MODRM_REG_MODE(bRm))
2977 {
2978 /* register operand */
2979 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2980 IEM_MC_BEGIN(3, 1);
2981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2982 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2983 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
2984 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2985 IEM_MC_LOCAL(uint32_t, u32Tmp);
2986
2987 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2988 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2989 IEM_MC_REF_EFLAGS(pEFlags);
2990 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2991 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2992
2993 IEM_MC_ADVANCE_RIP_AND_FINISH();
2994 IEM_MC_END();
2995 }
2996 else
2997 {
2998 /* memory operand */
2999 IEM_MC_BEGIN(3, 2);
3000 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3001 IEM_MC_ARG(uint32_t, u32Src, 1);
3002 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3003 IEM_MC_LOCAL(uint32_t, u32Tmp);
3004 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3005
3006 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3007 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3008 IEM_MC_ASSIGN(u32Src, u32Imm);
3009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3010 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3011 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
3012 IEM_MC_REF_EFLAGS(pEFlags);
3013 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3014 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3015
3016 IEM_MC_ADVANCE_RIP_AND_FINISH();
3017 IEM_MC_END();
3018 }
3019 break;
3020 }
3021
3022 case IEMMODE_64BIT:
3023 {
3024 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3025 if (IEM_IS_MODRM_REG_MODE(bRm))
3026 {
3027 /* register operand */
3028 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3029 IEM_MC_BEGIN(3, 1);
3030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3031 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3032 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
3033 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3034 IEM_MC_LOCAL(uint64_t, u64Tmp);
3035
3036 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3037 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3038 IEM_MC_REF_EFLAGS(pEFlags);
3039 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3040 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3041
3042 IEM_MC_ADVANCE_RIP_AND_FINISH();
3043 IEM_MC_END();
3044 }
3045 else
3046 {
3047 /* memory operand */
3048 IEM_MC_BEGIN(3, 2);
3049 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3050 IEM_MC_ARG(uint64_t, u64Src, 1);
3051 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3052 IEM_MC_LOCAL(uint64_t, u64Tmp);
3053 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3054
3055 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3056 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); /* Not using IEM_OPCODE_GET_NEXT_S32_SX_U64 to reduce the */
3057 IEM_MC_ASSIGN_U32_SX_U64(u64Src, u32Imm); /* parameter count for the threaded function for this block. */
3058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3059 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3060 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3061 IEM_MC_REF_EFLAGS(pEFlags);
3062 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3063 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3064
3065 IEM_MC_ADVANCE_RIP_AND_FINISH();
3066 IEM_MC_END();
3067 }
3068 break;
3069 }
3070
3071 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3072 }
3073}
3074
3075
3076/**
3077 * @opcode 0x6a
3078 */
3079FNIEMOP_DEF(iemOp_push_Ib)
3080{
3081 IEMOP_MNEMONIC(push_Ib, "push Ib");
3082 IEMOP_HLP_MIN_186();
3083 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3084 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3085
3086 switch (pVCpu->iem.s.enmEffOpSize)
3087 {
3088 case IEMMODE_16BIT:
3089 IEM_MC_BEGIN(0,0);
3090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3091 IEM_MC_PUSH_U16(i8Imm);
3092 IEM_MC_ADVANCE_RIP_AND_FINISH();
3093 IEM_MC_END();
3094 break;
3095 case IEMMODE_32BIT:
3096 IEM_MC_BEGIN(0,0);
3097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3098 IEM_MC_PUSH_U32(i8Imm);
3099 IEM_MC_ADVANCE_RIP_AND_FINISH();
3100 IEM_MC_END();
3101 break;
3102 case IEMMODE_64BIT:
3103 IEM_MC_BEGIN(0,0);
3104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3105 IEM_MC_PUSH_U64(i8Imm);
3106 IEM_MC_ADVANCE_RIP_AND_FINISH();
3107 IEM_MC_END();
3108 break;
3109 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3110 }
3111}
3112
3113
3114/**
3115 * @opcode 0x6b
3116 */
3117FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
3118{
3119 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
3120 IEMOP_HLP_MIN_186();
3121 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3122 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3123
3124 switch (pVCpu->iem.s.enmEffOpSize)
3125 {
3126 case IEMMODE_16BIT:
3127 {
3128 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
3129 if (IEM_IS_MODRM_REG_MODE(bRm))
3130 {
3131 /* register operand */
3132 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3133 IEM_MC_BEGIN(3, 1);
3134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3135 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3136 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
3137 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3138 IEM_MC_LOCAL(uint16_t, u16Tmp);
3139
3140 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3141 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
3142 IEM_MC_REF_EFLAGS(pEFlags);
3143 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3144 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3145
3146 IEM_MC_ADVANCE_RIP_AND_FINISH();
3147 IEM_MC_END();
3148 }
3149 else
3150 {
3151 /* memory operand */
3152 IEM_MC_BEGIN(3, 2);
3153 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3154 IEM_MC_ARG(uint16_t, u16Src, 1);
3155 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3156 IEM_MC_LOCAL(uint16_t, u16Tmp);
3157 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3158
3159 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3160 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
3161 IEM_MC_ASSIGN(u16Src, u16Imm);
3162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3163 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3164 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
3165 IEM_MC_REF_EFLAGS(pEFlags);
3166 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3167 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3168
3169 IEM_MC_ADVANCE_RIP_AND_FINISH();
3170 IEM_MC_END();
3171 }
3172 break;
3173 }
3174
3175 case IEMMODE_32BIT:
3176 {
3177 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
3178 if (IEM_IS_MODRM_REG_MODE(bRm))
3179 {
3180 /* register operand */
3181 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3182 IEM_MC_BEGIN(3, 1);
3183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3184 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3185 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
3186 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3187 IEM_MC_LOCAL(uint32_t, u32Tmp);
3188
3189 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3190 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
3191 IEM_MC_REF_EFLAGS(pEFlags);
3192 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3193 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3194
3195 IEM_MC_ADVANCE_RIP_AND_FINISH();
3196 IEM_MC_END();
3197 }
3198 else
3199 {
3200 /* memory operand */
3201 IEM_MC_BEGIN(3, 2);
3202 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3203 IEM_MC_ARG(uint32_t, u32Src, 1);
3204 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3205 IEM_MC_LOCAL(uint32_t, u32Tmp);
3206 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3207
3208 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3209 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
3210 IEM_MC_ASSIGN(u32Src, u32Imm);
3211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3212 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3213 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
3214 IEM_MC_REF_EFLAGS(pEFlags);
3215 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3216 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3217
3218 IEM_MC_ADVANCE_RIP_AND_FINISH();
3219 IEM_MC_END();
3220 }
3221 break;
3222 }
3223
3224 case IEMMODE_64BIT:
3225 {
3226 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3227 if (IEM_IS_MODRM_REG_MODE(bRm))
3228 {
3229 /* register operand */
3230 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3231 IEM_MC_BEGIN(3, 1);
3232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3233 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3234 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
3235 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3236 IEM_MC_LOCAL(uint64_t, u64Tmp);
3237
3238 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3239 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3240 IEM_MC_REF_EFLAGS(pEFlags);
3241 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3242 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3243
3244 IEM_MC_ADVANCE_RIP_AND_FINISH();
3245 IEM_MC_END();
3246 }
3247 else
3248 {
3249 /* memory operand */
3250 IEM_MC_BEGIN(3, 2);
3251 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3252 IEM_MC_ARG(uint64_t, u64Src, 1);
3253 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3254 IEM_MC_LOCAL(uint64_t, u64Tmp);
3255 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3256
3257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3258 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); /* Not using IEM_OPCODE_GET_NEXT_S8_SX_U64 to reduce the */
3259 IEM_MC_ASSIGN_U8_SX_U64(u64Src, u8Imm); /* parameter count for the threaded function for this block. */
3260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3261 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3262 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3263 IEM_MC_REF_EFLAGS(pEFlags);
3264 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3265 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3266
3267 IEM_MC_ADVANCE_RIP_AND_FINISH();
3268 IEM_MC_END();
3269 }
3270 break;
3271 }
3272
3273 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3274 }
3275}
3276
3277
3278/**
3279 * @opcode 0x6c
3280 */
3281FNIEMOP_DEF(iemOp_insb_Yb_DX)
3282{
3283 IEMOP_HLP_MIN_186();
3284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3285 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3286 {
3287 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
3288 switch (pVCpu->iem.s.enmEffAddrMode)
3289 {
3290 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3291 iemCImpl_rep_ins_op8_addr16, false);
3292 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3293 iemCImpl_rep_ins_op8_addr32, false);
3294 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3295 iemCImpl_rep_ins_op8_addr64, false);
3296 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3297 }
3298 }
3299 else
3300 {
3301 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
3302 switch (pVCpu->iem.s.enmEffAddrMode)
3303 {
3304 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3305 iemCImpl_ins_op8_addr16, false);
3306 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3307 iemCImpl_ins_op8_addr32, false);
3308 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3309 iemCImpl_ins_op8_addr64, false);
3310 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3311 }
3312 }
3313}
3314
3315
3316/**
3317 * @opcode 0x6d
3318 */
3319FNIEMOP_DEF(iemOp_inswd_Yv_DX)
3320{
3321 IEMOP_HLP_MIN_186();
3322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3323 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3324 {
3325 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
3326 switch (pVCpu->iem.s.enmEffOpSize)
3327 {
3328 case IEMMODE_16BIT:
3329 switch (pVCpu->iem.s.enmEffAddrMode)
3330 {
3331 case IEMMODE_16BIT:
3332 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3333 iemCImpl_rep_ins_op16_addr16, false);
3334 case IEMMODE_32BIT:
3335 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3336 iemCImpl_rep_ins_op16_addr32, false);
3337 case IEMMODE_64BIT:
3338 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3339 iemCImpl_rep_ins_op16_addr64, false);
3340 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3341 }
3342 break;
3343 case IEMMODE_64BIT:
3344 case IEMMODE_32BIT:
3345 switch (pVCpu->iem.s.enmEffAddrMode)
3346 {
3347 case IEMMODE_16BIT:
3348 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3349 iemCImpl_rep_ins_op32_addr16, false);
3350 case IEMMODE_32BIT:
3351 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3352 iemCImpl_rep_ins_op32_addr32, false);
3353 case IEMMODE_64BIT:
3354 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3355 iemCImpl_rep_ins_op32_addr64, false);
3356 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3357 }
3358 break;
3359 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3360 }
3361 }
3362 else
3363 {
3364 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
3365 switch (pVCpu->iem.s.enmEffOpSize)
3366 {
3367 case IEMMODE_16BIT:
3368 switch (pVCpu->iem.s.enmEffAddrMode)
3369 {
3370 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3371 iemCImpl_ins_op16_addr16, false);
3372 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3373 iemCImpl_ins_op16_addr32, false);
3374 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3375 iemCImpl_ins_op16_addr64, false);
3376 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3377 }
3378 break;
3379 case IEMMODE_64BIT:
3380 case IEMMODE_32BIT:
3381 switch (pVCpu->iem.s.enmEffAddrMode)
3382 {
3383 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3384 iemCImpl_ins_op32_addr16, false);
3385 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3386 iemCImpl_ins_op32_addr32, false);
3387 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3388 iemCImpl_ins_op32_addr64, false);
3389 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3390 }
3391 break;
3392 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3393 }
3394 }
3395}
3396
3397
3398/**
3399 * @opcode 0x6e
3400 */
3401FNIEMOP_DEF(iemOp_outsb_Yb_DX)
3402{
3403 IEMOP_HLP_MIN_186();
3404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3405 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3406 {
3407 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
3408 switch (pVCpu->iem.s.enmEffAddrMode)
3409 {
3410 case IEMMODE_16BIT:
3411 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3412 iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3413 case IEMMODE_32BIT:
3414 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3415 iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3416 case IEMMODE_64BIT:
3417 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3418 iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3419 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3420 }
3421 }
3422 else
3423 {
3424 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
3425 switch (pVCpu->iem.s.enmEffAddrMode)
3426 {
3427 case IEMMODE_16BIT:
3428 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3429 iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3430 case IEMMODE_32BIT:
3431 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3432 iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3433 case IEMMODE_64BIT:
3434 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3435 iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3436 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3437 }
3438 }
3439}
3440
3441
3442/**
3443 * @opcode 0x6f
3444 */
3445FNIEMOP_DEF(iemOp_outswd_Yv_DX)
3446{
3447 IEMOP_HLP_MIN_186();
3448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3449 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3450 {
3451 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
3452 switch (pVCpu->iem.s.enmEffOpSize)
3453 {
3454 case IEMMODE_16BIT:
3455 switch (pVCpu->iem.s.enmEffAddrMode)
3456 {
3457 case IEMMODE_16BIT:
3458 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3459 iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3460 case IEMMODE_32BIT:
3461 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3462 iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3463 case IEMMODE_64BIT:
3464 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3465 iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3466 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3467 }
3468 break;
3469 case IEMMODE_64BIT:
3470 case IEMMODE_32BIT:
3471 switch (pVCpu->iem.s.enmEffAddrMode)
3472 {
3473 case IEMMODE_16BIT:
3474 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3475 iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3476 case IEMMODE_32BIT:
3477 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3478 iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3479 case IEMMODE_64BIT:
3480 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3481 iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3482 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3483 }
3484 break;
3485 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3486 }
3487 }
3488 else
3489 {
3490 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
3491 switch (pVCpu->iem.s.enmEffOpSize)
3492 {
3493 case IEMMODE_16BIT:
3494 switch (pVCpu->iem.s.enmEffAddrMode)
3495 {
3496 case IEMMODE_16BIT:
3497 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3498 iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3499 case IEMMODE_32BIT:
3500 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3501 iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3502 case IEMMODE_64BIT:
3503 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3504 iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3505 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3506 }
3507 break;
3508 case IEMMODE_64BIT:
3509 case IEMMODE_32BIT:
3510 switch (pVCpu->iem.s.enmEffAddrMode)
3511 {
3512 case IEMMODE_16BIT:
3513 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3514 iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3515 case IEMMODE_32BIT:
3516 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3517 iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3518 case IEMMODE_64BIT:
3519 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3520 iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3521 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3522 }
3523 break;
3524 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3525 }
3526 }
3527}
3528
3529
3530/**
3531 * @opcode 0x70
3532 */
3533FNIEMOP_DEF(iemOp_jo_Jb)
3534{
3535 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
3536 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3537 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3538
3539 IEM_MC_BEGIN(0, 0);
3540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3541 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3542 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3543 } IEM_MC_ELSE() {
3544 IEM_MC_ADVANCE_RIP_AND_FINISH();
3545 } IEM_MC_ENDIF();
3546 IEM_MC_END();
3547}
3548
3549
3550/**
3551 * @opcode 0x71
3552 */
3553FNIEMOP_DEF(iemOp_jno_Jb)
3554{
3555 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
3556 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3557 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3558
3559 IEM_MC_BEGIN(0, 0);
3560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3561 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3562 IEM_MC_ADVANCE_RIP_AND_FINISH();
3563 } IEM_MC_ELSE() {
3564 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3565 } IEM_MC_ENDIF();
3566 IEM_MC_END();
3567}
3568
3569/**
3570 * @opcode 0x72
3571 */
3572FNIEMOP_DEF(iemOp_jc_Jb)
3573{
3574 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
3575 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3576 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3577
3578 IEM_MC_BEGIN(0, 0);
3579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3580 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3581 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3582 } IEM_MC_ELSE() {
3583 IEM_MC_ADVANCE_RIP_AND_FINISH();
3584 } IEM_MC_ENDIF();
3585 IEM_MC_END();
3586}
3587
3588
3589/**
3590 * @opcode 0x73
3591 */
3592FNIEMOP_DEF(iemOp_jnc_Jb)
3593{
3594 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
3595 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3596 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3597
3598 IEM_MC_BEGIN(0, 0);
3599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3600 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3601 IEM_MC_ADVANCE_RIP_AND_FINISH();
3602 } IEM_MC_ELSE() {
3603 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3604 } IEM_MC_ENDIF();
3605 IEM_MC_END();
3606}
3607
3608
3609/**
3610 * @opcode 0x74
3611 */
3612FNIEMOP_DEF(iemOp_je_Jb)
3613{
3614 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
3615 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3616 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3617
3618 IEM_MC_BEGIN(0, 0);
3619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3620 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3621 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3622 } IEM_MC_ELSE() {
3623 IEM_MC_ADVANCE_RIP_AND_FINISH();
3624 } IEM_MC_ENDIF();
3625 IEM_MC_END();
3626}
3627
3628
3629/**
3630 * @opcode 0x75
3631 */
3632FNIEMOP_DEF(iemOp_jne_Jb)
3633{
3634 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
3635 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3636 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3637
3638 IEM_MC_BEGIN(0, 0);
3639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3640 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3641 IEM_MC_ADVANCE_RIP_AND_FINISH();
3642 } IEM_MC_ELSE() {
3643 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3644 } IEM_MC_ENDIF();
3645 IEM_MC_END();
3646}
3647
3648
3649/**
3650 * @opcode 0x76
3651 */
3652FNIEMOP_DEF(iemOp_jbe_Jb)
3653{
3654 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
3655 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3656 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3657
3658 IEM_MC_BEGIN(0, 0);
3659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3660 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3661 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3662 } IEM_MC_ELSE() {
3663 IEM_MC_ADVANCE_RIP_AND_FINISH();
3664 } IEM_MC_ENDIF();
3665 IEM_MC_END();
3666}
3667
3668
3669/**
3670 * @opcode 0x77
3671 */
3672FNIEMOP_DEF(iemOp_jnbe_Jb)
3673{
3674 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
3675 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3676 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3677
3678 IEM_MC_BEGIN(0, 0);
3679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3680 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3681 IEM_MC_ADVANCE_RIP_AND_FINISH();
3682 } IEM_MC_ELSE() {
3683 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3684 } IEM_MC_ENDIF();
3685 IEM_MC_END();
3686}
3687
3688
3689/**
3690 * @opcode 0x78
3691 */
3692FNIEMOP_DEF(iemOp_js_Jb)
3693{
3694 IEMOP_MNEMONIC(js_Jb, "js Jb");
3695 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3696 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3697
3698 IEM_MC_BEGIN(0, 0);
3699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3700 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3701 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3702 } IEM_MC_ELSE() {
3703 IEM_MC_ADVANCE_RIP_AND_FINISH();
3704 } IEM_MC_ENDIF();
3705 IEM_MC_END();
3706}
3707
3708
3709/**
3710 * @opcode 0x79
3711 */
3712FNIEMOP_DEF(iemOp_jns_Jb)
3713{
3714 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
3715 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3716 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3717
3718 IEM_MC_BEGIN(0, 0);
3719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3720 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3721 IEM_MC_ADVANCE_RIP_AND_FINISH();
3722 } IEM_MC_ELSE() {
3723 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3724 } IEM_MC_ENDIF();
3725 IEM_MC_END();
3726}
3727
3728
3729/**
3730 * @opcode 0x7a
3731 */
3732FNIEMOP_DEF(iemOp_jp_Jb)
3733{
3734 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
3735 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3736 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3737
3738 IEM_MC_BEGIN(0, 0);
3739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3740 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3741 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3742 } IEM_MC_ELSE() {
3743 IEM_MC_ADVANCE_RIP_AND_FINISH();
3744 } IEM_MC_ENDIF();
3745 IEM_MC_END();
3746}
3747
3748
3749/**
3750 * @opcode 0x7b
3751 */
3752FNIEMOP_DEF(iemOp_jnp_Jb)
3753{
3754 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
3755 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3756 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3757
3758 IEM_MC_BEGIN(0, 0);
3759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3760 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3761 IEM_MC_ADVANCE_RIP_AND_FINISH();
3762 } IEM_MC_ELSE() {
3763 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3764 } IEM_MC_ENDIF();
3765 IEM_MC_END();
3766}
3767
3768
3769/**
3770 * @opcode 0x7c
3771 */
3772FNIEMOP_DEF(iemOp_jl_Jb)
3773{
3774 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
3775 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3776 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3777
3778 IEM_MC_BEGIN(0, 0);
3779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3780 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3781 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3782 } IEM_MC_ELSE() {
3783 IEM_MC_ADVANCE_RIP_AND_FINISH();
3784 } IEM_MC_ENDIF();
3785 IEM_MC_END();
3786}
3787
3788
3789/**
3790 * @opcode 0x7d
3791 */
3792FNIEMOP_DEF(iemOp_jnl_Jb)
3793{
3794 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
3795 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3796 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3797
3798 IEM_MC_BEGIN(0, 0);
3799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3800 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3801 IEM_MC_ADVANCE_RIP_AND_FINISH();
3802 } IEM_MC_ELSE() {
3803 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3804 } IEM_MC_ENDIF();
3805 IEM_MC_END();
3806}
3807
3808
3809/**
3810 * @opcode 0x7e
3811 */
3812FNIEMOP_DEF(iemOp_jle_Jb)
3813{
3814 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
3815 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3816 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3817
3818 IEM_MC_BEGIN(0, 0);
3819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3820 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3821 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3822 } IEM_MC_ELSE() {
3823 IEM_MC_ADVANCE_RIP_AND_FINISH();
3824 } IEM_MC_ENDIF();
3825 IEM_MC_END();
3826}
3827
3828
3829/**
3830 * @opcode 0x7f
3831 */
3832FNIEMOP_DEF(iemOp_jnle_Jb)
3833{
3834 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
3835 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3836 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3837
3838 IEM_MC_BEGIN(0, 0);
3839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3840 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3841 IEM_MC_ADVANCE_RIP_AND_FINISH();
3842 } IEM_MC_ELSE() {
3843 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3844 } IEM_MC_ENDIF();
3845 IEM_MC_END();
3846}
3847
3848
3849/**
3850 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
3851 * iemOp_Grp1_Eb_Ib_80.
3852 */
3853#define IEMOP_BODY_BINARY_Eb_Ib_RW(a_fnNormalU8) \
3854 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3855 { \
3856 /* register target */ \
3857 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3858 IEM_MC_BEGIN(3, 0); \
3859 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3860 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3861 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3862 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3863 \
3864 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3865 IEM_MC_REF_EFLAGS(pEFlags); \
3866 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3867 \
3868 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3869 IEM_MC_END(); \
3870 } \
3871 else \
3872 { \
3873 /* memory target */ \
3874 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
3875 { \
3876 IEM_MC_BEGIN(3, 3); \
3877 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3878 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3879 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3880 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3881 \
3882 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3883 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3884 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3885 IEMOP_HLP_DONE_DECODING(); \
3886 \
3887 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3888 IEM_MC_FETCH_EFLAGS(EFlags); \
3889 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3890 \
3891 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
3892 IEM_MC_COMMIT_EFLAGS(EFlags); \
3893 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3894 IEM_MC_END(); \
3895 } \
3896 else \
3897 { \
3898 (void)0
3899
3900#define IEMOP_BODY_BINARY_Eb_Ib_LOCKED(a_fnLockedU8) \
3901 IEM_MC_BEGIN(3, 3); \
3902 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3903 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3904 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3905 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3906 \
3907 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3908 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3909 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3910 IEMOP_HLP_DONE_DECODING(); \
3911 \
3912 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3913 IEM_MC_FETCH_EFLAGS(EFlags); \
3914 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
3915 \
3916 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
3917 IEM_MC_COMMIT_EFLAGS(EFlags); \
3918 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3919 IEM_MC_END(); \
3920 } \
3921 } \
3922 (void)0
3923
3924#define IEMOP_BODY_BINARY_Eb_Ib_RO(a_fnNormalU8) \
3925 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3926 { \
3927 /* register target */ \
3928 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3929 IEM_MC_BEGIN(3, 0); \
3930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3931 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3932 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3933 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3934 \
3935 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3936 IEM_MC_REF_EFLAGS(pEFlags); \
3937 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3938 \
3939 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3940 IEM_MC_END(); \
3941 } \
3942 else \
3943 { \
3944 /* memory target */ \
3945 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
3946 { \
3947 IEM_MC_BEGIN(3, 3); \
3948 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
3949 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3950 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3951 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3952 \
3953 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3954 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3955 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3956 IEMOP_HLP_DONE_DECODING(); \
3957 \
3958 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3959 IEM_MC_FETCH_EFLAGS(EFlags); \
3960 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3961 \
3962 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu8Dst, bUnmapInfo); \
3963 IEM_MC_COMMIT_EFLAGS(EFlags); \
3964 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3965 IEM_MC_END(); \
3966 } \
3967 else \
3968 { \
3969 (void)0
3970
3971#define IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK() \
3972 IEMOP_HLP_DONE_DECODING(); \
3973 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
3974 } \
3975 } \
3976 (void)0
3977
3978
3979
3980/**
3981 * @opmaps grp1_80,grp1_83
3982 * @opcode /0
3983 */
3984FNIEMOP_DEF_1(iemOp_Grp1_add_Eb_Ib, uint8_t, bRm)
3985{
3986 IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib");
3987 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_add_u8);
3988 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_add_u8_locked);
3989}
3990
3991
3992/**
3993 * @opmaps grp1_80,grp1_83
3994 * @opcode /1
3995 */
3996FNIEMOP_DEF_1(iemOp_Grp1_or_Eb_Ib, uint8_t, bRm)
3997{
3998 IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib");
3999 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_or_u8);
4000 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_or_u8_locked);
4001}
4002
4003
4004/**
4005 * @opmaps grp1_80,grp1_83
4006 * @opcode /2
4007 */
4008FNIEMOP_DEF_1(iemOp_Grp1_adc_Eb_Ib, uint8_t, bRm)
4009{
4010 IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib");
4011 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_adc_u8);
4012 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_adc_u8_locked);
4013}
4014
4015
4016/**
4017 * @opmaps grp1_80,grp1_83
4018 * @opcode /3
4019 */
4020FNIEMOP_DEF_1(iemOp_Grp1_sbb_Eb_Ib, uint8_t, bRm)
4021{
4022 IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib");
4023 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sbb_u8);
4024 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sbb_u8_locked);
4025}
4026
4027
4028/**
4029 * @opmaps grp1_80,grp1_83
4030 * @opcode /4
4031 */
4032FNIEMOP_DEF_1(iemOp_Grp1_and_Eb_Ib, uint8_t, bRm)
4033{
4034 IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib");
4035 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_and_u8);
4036 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_and_u8_locked);
4037}
4038
4039
4040/**
4041 * @opmaps grp1_80,grp1_83
4042 * @opcode /5
4043 */
4044FNIEMOP_DEF_1(iemOp_Grp1_sub_Eb_Ib, uint8_t, bRm)
4045{
4046 IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib");
4047 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sub_u8);
4048 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sub_u8_locked);
4049}
4050
4051
4052/**
4053 * @opmaps grp1_80,grp1_83
4054 * @opcode /6
4055 */
4056FNIEMOP_DEF_1(iemOp_Grp1_xor_Eb_Ib, uint8_t, bRm)
4057{
4058 IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib");
4059 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_xor_u8);
4060 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_xor_u8_locked);
4061}
4062
4063
4064/**
4065 * @opmaps grp1_80,grp1_83
4066 * @opcode /7
4067 */
4068FNIEMOP_DEF_1(iemOp_Grp1_cmp_Eb_Ib, uint8_t, bRm)
4069{
4070 IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib");
4071 IEMOP_BODY_BINARY_Eb_Ib_RO(iemAImpl_cmp_u8);
4072 IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK();
4073}
4074
4075
4076/**
4077 * @opcode 0x80
4078 */
4079FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
4080{
4081 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4082 switch (IEM_GET_MODRM_REG_8(bRm))
4083 {
4084 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Eb_Ib, bRm);
4085 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Eb_Ib, bRm);
4086 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Eb_Ib, bRm);
4087 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Eb_Ib, bRm);
4088 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Eb_Ib, bRm);
4089 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Eb_Ib, bRm);
4090 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Eb_Ib, bRm);
4091 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Eb_Ib, bRm);
4092 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4093 }
4094}
4095
4096
4097/**
4098 * Body for a group 1 binary operator.
4099 */
4100#define IEMOP_BODY_BINARY_Ev_Iz_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4101 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4102 { \
4103 /* register target */ \
4104 switch (pVCpu->iem.s.enmEffOpSize) \
4105 { \
4106 case IEMMODE_16BIT: \
4107 { \
4108 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4109 IEM_MC_BEGIN(3, 0); \
4110 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4111 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4112 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4113 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4114 \
4115 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4116 IEM_MC_REF_EFLAGS(pEFlags); \
4117 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4118 \
4119 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4120 IEM_MC_END(); \
4121 break; \
4122 } \
4123 \
4124 case IEMMODE_32BIT: \
4125 { \
4126 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4127 IEM_MC_BEGIN(3, 0); \
4128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4129 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4130 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4131 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4132 \
4133 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4134 IEM_MC_REF_EFLAGS(pEFlags); \
4135 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4136 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
4137 \
4138 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4139 IEM_MC_END(); \
4140 break; \
4141 } \
4142 \
4143 case IEMMODE_64BIT: \
4144 { \
4145 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4146 IEM_MC_BEGIN(3, 0); \
4147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4148 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4149 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4150 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4151 \
4152 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4153 IEM_MC_REF_EFLAGS(pEFlags); \
4154 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4155 \
4156 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4157 IEM_MC_END(); \
4158 break; \
4159 } \
4160 \
4161 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4162 } \
4163 } \
4164 else \
4165 { \
4166 /* memory target */ \
4167 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4168 { \
4169 switch (pVCpu->iem.s.enmEffOpSize) \
4170 { \
4171 case IEMMODE_16BIT: \
4172 { \
4173 IEM_MC_BEGIN(3, 3); \
4174 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4175 IEM_MC_ARG(uint16_t, u16Src, 1); \
4176 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4177 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4178 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4179 \
4180 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4181 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4182 IEM_MC_ASSIGN(u16Src, u16Imm); \
4183 IEMOP_HLP_DONE_DECODING(); \
4184 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4185 IEM_MC_FETCH_EFLAGS(EFlags); \
4186 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4187 \
4188 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4189 IEM_MC_COMMIT_EFLAGS(EFlags); \
4190 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4191 IEM_MC_END(); \
4192 break; \
4193 } \
4194 \
4195 case IEMMODE_32BIT: \
4196 { \
4197 IEM_MC_BEGIN(3, 3); \
4198 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4199 IEM_MC_ARG(uint32_t, u32Src, 1); \
4200 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4202 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4203 \
4204 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4205 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4206 IEM_MC_ASSIGN(u32Src, u32Imm); \
4207 IEMOP_HLP_DONE_DECODING(); \
4208 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4209 IEM_MC_FETCH_EFLAGS(EFlags); \
4210 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4211 \
4212 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4213 IEM_MC_COMMIT_EFLAGS(EFlags); \
4214 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4215 IEM_MC_END(); \
4216 break; \
4217 } \
4218 \
4219 case IEMMODE_64BIT: \
4220 { \
4221 IEM_MC_BEGIN(3, 3); \
4222 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4223 IEM_MC_ARG(uint64_t, u64Src, 1); \
4224 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4225 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4226 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4227 \
4228 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4229 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4230 IEMOP_HLP_DONE_DECODING(); \
4231 IEM_MC_ASSIGN(u64Src, u64Imm); \
4232 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4233 IEM_MC_FETCH_EFLAGS(EFlags); \
4234 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4235 \
4236 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4237 IEM_MC_COMMIT_EFLAGS(EFlags); \
4238 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4239 IEM_MC_END(); \
4240 break; \
4241 } \
4242 \
4243 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4244 } \
4245 } \
4246 else \
4247 { \
4248 (void)0
4249/* This must be a separate macro due to parsing restrictions in IEMAllInstPython.py. */
4250#define IEMOP_BODY_BINARY_Ev_Iz_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4251 switch (pVCpu->iem.s.enmEffOpSize) \
4252 { \
4253 case IEMMODE_16BIT: \
4254 { \
4255 IEM_MC_BEGIN(3, 3); \
4256 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4257 IEM_MC_ARG(uint16_t, u16Src, 1); \
4258 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4259 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4260 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4261 \
4262 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4263 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4264 IEM_MC_ASSIGN(u16Src, u16Imm); \
4265 IEMOP_HLP_DONE_DECODING(); \
4266 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4267 IEM_MC_FETCH_EFLAGS(EFlags); \
4268 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4269 \
4270 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4271 IEM_MC_COMMIT_EFLAGS(EFlags); \
4272 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4273 IEM_MC_END(); \
4274 break; \
4275 } \
4276 \
4277 case IEMMODE_32BIT: \
4278 { \
4279 IEM_MC_BEGIN(3, 3); \
4280 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4281 IEM_MC_ARG(uint32_t, u32Src, 1); \
4282 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4283 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4284 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4285 \
4286 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4287 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4288 IEM_MC_ASSIGN(u32Src, u32Imm); \
4289 IEMOP_HLP_DONE_DECODING(); \
4290 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4291 IEM_MC_FETCH_EFLAGS(EFlags); \
4292 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4293 \
4294 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4295 IEM_MC_COMMIT_EFLAGS(EFlags); \
4296 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4297 IEM_MC_END(); \
4298 break; \
4299 } \
4300 \
4301 case IEMMODE_64BIT: \
4302 { \
4303 IEM_MC_BEGIN(3, 3); \
4304 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4305 IEM_MC_ARG(uint64_t, u64Src, 1); \
4306 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4307 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4308 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4309 \
4310 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4311 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4312 IEMOP_HLP_DONE_DECODING(); \
4313 IEM_MC_ASSIGN(u64Src, u64Imm); \
4314 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4315 IEM_MC_FETCH_EFLAGS(EFlags); \
4316 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4317 \
4318 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4319 IEM_MC_COMMIT_EFLAGS(EFlags); \
4320 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4321 IEM_MC_END(); \
4322 break; \
4323 } \
4324 \
4325 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4326 } \
4327 } \
4328 } \
4329 (void)0
4330
4331/* read-only version */
4332#define IEMOP_BODY_BINARY_Ev_Iz_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4333 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4334 { \
4335 /* register target */ \
4336 switch (pVCpu->iem.s.enmEffOpSize) \
4337 { \
4338 case IEMMODE_16BIT: \
4339 { \
4340 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4341 IEM_MC_BEGIN(3, 0); \
4342 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4343 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4344 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4345 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4346 \
4347 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4348 IEM_MC_REF_EFLAGS(pEFlags); \
4349 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4350 \
4351 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4352 IEM_MC_END(); \
4353 break; \
4354 } \
4355 \
4356 case IEMMODE_32BIT: \
4357 { \
4358 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4359 IEM_MC_BEGIN(3, 0); \
4360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4361 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4362 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4363 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4364 \
4365 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4366 IEM_MC_REF_EFLAGS(pEFlags); \
4367 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4368 \
4369 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4370 IEM_MC_END(); \
4371 break; \
4372 } \
4373 \
4374 case IEMMODE_64BIT: \
4375 { \
4376 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4377 IEM_MC_BEGIN(3, 0); \
4378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4379 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4380 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4381 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4382 \
4383 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4384 IEM_MC_REF_EFLAGS(pEFlags); \
4385 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4386 \
4387 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4388 IEM_MC_END(); \
4389 break; \
4390 } \
4391 \
4392 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4393 } \
4394 } \
4395 else \
4396 { \
4397 /* memory target */ \
4398 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4399 { \
4400 switch (pVCpu->iem.s.enmEffOpSize) \
4401 { \
4402 case IEMMODE_16BIT: \
4403 { \
4404 IEM_MC_BEGIN(3, 3); \
4405 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
4406 IEM_MC_ARG(uint16_t, u16Src, 1); \
4407 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4408 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4409 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4410 \
4411 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4412 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4413 IEM_MC_ASSIGN(u16Src, u16Imm); \
4414 IEMOP_HLP_DONE_DECODING(); \
4415 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4416 IEM_MC_FETCH_EFLAGS(EFlags); \
4417 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4418 \
4419 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
4420 IEM_MC_COMMIT_EFLAGS(EFlags); \
4421 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4422 IEM_MC_END(); \
4423 break; \
4424 } \
4425 \
4426 case IEMMODE_32BIT: \
4427 { \
4428 IEM_MC_BEGIN(3, 3); \
4429 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
4430 IEM_MC_ARG(uint32_t, u32Src, 1); \
4431 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4432 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4433 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4434 \
4435 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4436 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4437 IEM_MC_ASSIGN(u32Src, u32Imm); \
4438 IEMOP_HLP_DONE_DECODING(); \
4439 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4440 IEM_MC_FETCH_EFLAGS(EFlags); \
4441 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4442 \
4443 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
4444 IEM_MC_COMMIT_EFLAGS(EFlags); \
4445 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4446 IEM_MC_END(); \
4447 break; \
4448 } \
4449 \
4450 case IEMMODE_64BIT: \
4451 { \
4452 IEM_MC_BEGIN(3, 3); \
4453 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
4454 IEM_MC_ARG(uint64_t, u64Src, 1); \
4455 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4456 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4457 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4458 \
4459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4460 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4461 IEMOP_HLP_DONE_DECODING(); \
4462 IEM_MC_ASSIGN(u64Src, u64Imm); \
4463 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4464 IEM_MC_FETCH_EFLAGS(EFlags); \
4465 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4466 \
4467 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
4468 IEM_MC_COMMIT_EFLAGS(EFlags); \
4469 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4470 IEM_MC_END(); \
4471 break; \
4472 } \
4473 \
4474 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4475 } \
4476 } \
4477 else \
4478 { \
4479 IEMOP_HLP_DONE_DECODING(); \
4480 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4481 } \
4482 } \
4483 (void)0
4484
4485
4486/**
4487 * @opmaps grp1_81
4488 * @opcode /0
4489 */
4490FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Iz, uint8_t, bRm)
4491{
4492 IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz");
4493 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
4494 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4495}
4496
4497
4498/**
4499 * @opmaps grp1_81
4500 * @opcode /1
4501 */
4502FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Iz, uint8_t, bRm)
4503{
4504 IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz");
4505 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
4506 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4507}
4508
4509
4510/**
4511 * @opmaps grp1_81
4512 * @opcode /2
4513 */
4514FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Iz, uint8_t, bRm)
4515{
4516 IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz");
4517 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
4518 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
4519}
4520
4521
4522/**
4523 * @opmaps grp1_81
4524 * @opcode /3
4525 */
4526FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Iz, uint8_t, bRm)
4527{
4528 IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz");
4529 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
4530 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
4531}
4532
4533
4534/**
4535 * @opmaps grp1_81
4536 * @opcode /4
4537 */
4538FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Iz, uint8_t, bRm)
4539{
4540 IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz");
4541 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
4542 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
4543}
4544
4545
4546/**
4547 * @opmaps grp1_81
4548 * @opcode /5
4549 */
4550FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Iz, uint8_t, bRm)
4551{
4552 IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz");
4553 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
4554 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
4555}
4556
4557
4558/**
4559 * @opmaps grp1_81
4560 * @opcode /6
4561 */
4562FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Iz, uint8_t, bRm)
4563{
4564 IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz");
4565 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
4566 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
4567}
4568
4569
4570/**
4571 * @opmaps grp1_81
4572 * @opcode /7
4573 */
4574FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Iz, uint8_t, bRm)
4575{
4576 IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz");
4577 IEMOP_BODY_BINARY_Ev_Iz_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
4578}
4579
4580
4581/**
4582 * @opcode 0x81
4583 */
4584FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
4585{
4586 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4587 switch (IEM_GET_MODRM_REG_8(bRm))
4588 {
4589 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Iz, bRm);
4590 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Iz, bRm);
4591 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Iz, bRm);
4592 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Iz, bRm);
4593 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Iz, bRm);
4594 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Iz, bRm);
4595 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Iz, bRm);
4596 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Iz, bRm);
4597 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4598 }
4599}
4600
4601
4602/**
4603 * @opcode 0x82
4604 * @opmnemonic grp1_82
4605 * @opgroup og_groups
4606 */
4607FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
4608{
4609 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
4610 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
4611}
4612
4613
4614/**
4615 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
4616 * iemOp_Grp1_Ev_Ib.
4617 */
4618#define IEMOP_BODY_BINARY_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4619 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4620 { \
4621 /* \
4622 * Register target \
4623 */ \
4624 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4625 switch (pVCpu->iem.s.enmEffOpSize) \
4626 { \
4627 case IEMMODE_16BIT: \
4628 { \
4629 IEM_MC_BEGIN(3, 0); \
4630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4631 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4632 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4633 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4634 \
4635 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4636 IEM_MC_REF_EFLAGS(pEFlags); \
4637 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4638 \
4639 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4640 IEM_MC_END(); \
4641 break; \
4642 } \
4643 \
4644 case IEMMODE_32BIT: \
4645 { \
4646 IEM_MC_BEGIN(3, 0); \
4647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4648 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4649 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4650 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4651 \
4652 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4653 IEM_MC_REF_EFLAGS(pEFlags); \
4654 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4655 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
4656 \
4657 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4658 IEM_MC_END(); \
4659 break; \
4660 } \
4661 \
4662 case IEMMODE_64BIT: \
4663 { \
4664 IEM_MC_BEGIN(3, 0); \
4665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4666 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4667 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4668 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4669 \
4670 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4671 IEM_MC_REF_EFLAGS(pEFlags); \
4672 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4673 \
4674 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4675 IEM_MC_END(); \
4676 break; \
4677 } \
4678 \
4679 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4680 } \
4681 } \
4682 else \
4683 { \
4684 /* \
4685 * Memory target. \
4686 */ \
4687 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4688 { \
4689 switch (pVCpu->iem.s.enmEffOpSize) \
4690 { \
4691 case IEMMODE_16BIT: \
4692 { \
4693 IEM_MC_BEGIN(3, 3); \
4694 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4695 IEM_MC_ARG(uint16_t, u16Src, 1); \
4696 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4697 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4698 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4699 \
4700 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4701 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4702 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm); \
4703 IEMOP_HLP_DONE_DECODING(); \
4704 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4705 IEM_MC_FETCH_EFLAGS(EFlags); \
4706 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4707 \
4708 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4709 IEM_MC_COMMIT_EFLAGS(EFlags); \
4710 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4711 IEM_MC_END(); \
4712 break; \
4713 } \
4714 \
4715 case IEMMODE_32BIT: \
4716 { \
4717 IEM_MC_BEGIN(3, 3); \
4718 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4719 IEM_MC_ARG(uint32_t, u32Src, 1); \
4720 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4721 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4722 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4723 \
4724 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4725 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4726 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm); \
4727 IEMOP_HLP_DONE_DECODING(); \
4728 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4729 IEM_MC_FETCH_EFLAGS(EFlags); \
4730 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4731 \
4732 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4733 IEM_MC_COMMIT_EFLAGS(EFlags); \
4734 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4735 IEM_MC_END(); \
4736 break; \
4737 } \
4738 \
4739 case IEMMODE_64BIT: \
4740 { \
4741 IEM_MC_BEGIN(3, 3); \
4742 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4743 IEM_MC_ARG(uint64_t, u64Src, 1); \
4744 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4745 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4746 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4747 \
4748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4749 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4750 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm); \
4751 IEMOP_HLP_DONE_DECODING(); \
4752 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4753 IEM_MC_FETCH_EFLAGS(EFlags); \
4754 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4755 \
4756 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4757 IEM_MC_COMMIT_EFLAGS(EFlags); \
4758 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4759 IEM_MC_END(); \
4760 break; \
4761 } \
4762 \
4763 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4764 } \
4765 } \
4766 else \
4767 { \
4768 (void)0
4769/* Separate macro to work around parsing issue in IEMAllInstPython.py */
4770#define IEMOP_BODY_BINARY_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4771 switch (pVCpu->iem.s.enmEffOpSize) \
4772 { \
4773 case IEMMODE_16BIT: \
4774 { \
4775 IEM_MC_BEGIN(3, 3); \
4776 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4777 IEM_MC_ARG(uint16_t, u16Src, 1); \
4778 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4779 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4780 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4781 \
4782 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4783 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4784 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm); \
4785 IEMOP_HLP_DONE_DECODING(); \
4786 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4787 IEM_MC_FETCH_EFLAGS(EFlags); \
4788 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4789 \
4790 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4791 IEM_MC_COMMIT_EFLAGS(EFlags); \
4792 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4793 IEM_MC_END(); \
4794 break; \
4795 } \
4796 \
4797 case IEMMODE_32BIT: \
4798 { \
4799 IEM_MC_BEGIN(3, 3); \
4800 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4801 IEM_MC_ARG(uint32_t, u32Src, 1); \
4802 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4803 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4804 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4805 \
4806 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4807 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4808 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm); \
4809 IEMOP_HLP_DONE_DECODING(); \
4810 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4811 IEM_MC_FETCH_EFLAGS(EFlags); \
4812 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4813 \
4814 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4815 IEM_MC_COMMIT_EFLAGS(EFlags); \
4816 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4817 IEM_MC_END(); \
4818 break; \
4819 } \
4820 \
4821 case IEMMODE_64BIT: \
4822 { \
4823 IEM_MC_BEGIN(3, 3); \
4824 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4825 IEM_MC_ARG(uint64_t, u64Src, 1); \
4826 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4827 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4828 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4829 \
4830 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4831 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4832 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm); \
4833 IEMOP_HLP_DONE_DECODING(); \
4834 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4835 IEM_MC_FETCH_EFLAGS(EFlags); \
4836 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4837 \
4838 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4839 IEM_MC_COMMIT_EFLAGS(EFlags); \
4840 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4841 IEM_MC_END(); \
4842 break; \
4843 } \
4844 \
4845 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4846 } \
4847 } \
4848 } \
4849 (void)0
4850
4851/* read-only variant */
4852#define IEMOP_BODY_BINARY_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4853 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4854 { \
4855 /* \
4856 * Register target \
4857 */ \
4858 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4859 switch (pVCpu->iem.s.enmEffOpSize) \
4860 { \
4861 case IEMMODE_16BIT: \
4862 { \
4863 IEM_MC_BEGIN(3, 0); \
4864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4865 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4866 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4867 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4868 \
4869 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4870 IEM_MC_REF_EFLAGS(pEFlags); \
4871 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4872 \
4873 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4874 IEM_MC_END(); \
4875 break; \
4876 } \
4877 \
4878 case IEMMODE_32BIT: \
4879 { \
4880 IEM_MC_BEGIN(3, 0); \
4881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4882 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4883 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4884 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4885 \
4886 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4887 IEM_MC_REF_EFLAGS(pEFlags); \
4888 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4889 \
4890 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4891 IEM_MC_END(); \
4892 break; \
4893 } \
4894 \
4895 case IEMMODE_64BIT: \
4896 { \
4897 IEM_MC_BEGIN(3, 0); \
4898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4899 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4900 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4901 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4902 \
4903 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4904 IEM_MC_REF_EFLAGS(pEFlags); \
4905 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4906 \
4907 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4908 IEM_MC_END(); \
4909 break; \
4910 } \
4911 \
4912 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4913 } \
4914 } \
4915 else \
4916 { \
4917 /* \
4918 * Memory target. \
4919 */ \
4920 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4921 { \
4922 switch (pVCpu->iem.s.enmEffOpSize) \
4923 { \
4924 case IEMMODE_16BIT: \
4925 { \
4926 IEM_MC_BEGIN(3, 3); \
4927 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
4928 IEM_MC_ARG(uint16_t, u16Src, 1); \
4929 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4930 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4931 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4932 \
4933 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4934 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4935 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm); \
4936 IEMOP_HLP_DONE_DECODING(); \
4937 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4938 IEM_MC_FETCH_EFLAGS(EFlags); \
4939 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4940 \
4941 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
4942 IEM_MC_COMMIT_EFLAGS(EFlags); \
4943 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4944 IEM_MC_END(); \
4945 break; \
4946 } \
4947 \
4948 case IEMMODE_32BIT: \
4949 { \
4950 IEM_MC_BEGIN(3, 3); \
4951 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
4952 IEM_MC_ARG(uint32_t, u32Src, 1); \
4953 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4954 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4955 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4956 \
4957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4958 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4959 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm); \
4960 IEMOP_HLP_DONE_DECODING(); \
4961 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4962 IEM_MC_FETCH_EFLAGS(EFlags); \
4963 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4964 \
4965 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
4966 IEM_MC_COMMIT_EFLAGS(EFlags); \
4967 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4968 IEM_MC_END(); \
4969 break; \
4970 } \
4971 \
4972 case IEMMODE_64BIT: \
4973 { \
4974 IEM_MC_BEGIN(3, 3); \
4975 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
4976 IEM_MC_ARG(uint64_t, u64Src, 1); \
4977 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4978 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4979 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4980 \
4981 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4982 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4983 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm); \
4984 IEMOP_HLP_DONE_DECODING(); \
4985 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4986 IEM_MC_FETCH_EFLAGS(EFlags); \
4987 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4988 \
4989 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
4990 IEM_MC_COMMIT_EFLAGS(EFlags); \
4991 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4992 IEM_MC_END(); \
4993 break; \
4994 } \
4995 \
4996 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4997 } \
4998 } \
4999 else \
5000 { \
5001 IEMOP_HLP_DONE_DECODING(); \
5002 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
5003 } \
5004 } \
5005 (void)0
5006
5007/**
5008 * @opmaps grp1_83
5009 * @opcode /0
5010 */
5011FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Ib, uint8_t, bRm)
5012{
5013 IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib");
5014 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
5015 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
5016}
5017
5018
5019/**
5020 * @opmaps grp1_83
5021 * @opcode /1
5022 */
5023FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Ib, uint8_t, bRm)
5024{
5025 IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib");
5026 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
5027 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
5028}
5029
5030
5031/**
5032 * @opmaps grp1_83
5033 * @opcode /2
5034 */
5035FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Ib, uint8_t, bRm)
5036{
5037 IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib");
5038 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
5039 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
5040}
5041
5042
5043/**
5044 * @opmaps grp1_83
5045 * @opcode /3
5046 */
5047FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Ib, uint8_t, bRm)
5048{
5049 IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib");
5050 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
5051 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
5052}
5053
5054
5055/**
5056 * @opmaps grp1_83
5057 * @opcode /4
5058 */
5059FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Ib, uint8_t, bRm)
5060{
5061 IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib");
5062 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
5063 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
5064}
5065
5066
5067/**
5068 * @opmaps grp1_83
5069 * @opcode /5
5070 */
5071FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Ib, uint8_t, bRm)
5072{
5073 IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib");
5074 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
5075 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
5076}
5077
5078
5079/**
5080 * @opmaps grp1_83
5081 * @opcode /6
5082 */
5083FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Ib, uint8_t, bRm)
5084{
5085 IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib");
5086 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
5087 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
5088}
5089
5090
5091/**
5092 * @opmaps grp1_83
5093 * @opcode /7
5094 */
5095FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Ib, uint8_t, bRm)
5096{
5097 IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib");
5098 IEMOP_BODY_BINARY_Ev_Ib_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
5099}
5100
5101
5102/**
5103 * @opcode 0x83
5104 */
5105FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
5106{
5107 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
5108 to the 386 even if absent in the intel reference manuals and some
5109 3rd party opcode listings. */
5110 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5111 switch (IEM_GET_MODRM_REG_8(bRm))
5112 {
5113 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Ib, bRm);
5114 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Ib, bRm);
5115 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Ib, bRm);
5116 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Ib, bRm);
5117 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Ib, bRm);
5118 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Ib, bRm);
5119 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Ib, bRm);
5120 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Ib, bRm);
5121 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5122 }
5123}
5124
5125
5126/**
5127 * @opcode 0x84
5128 */
5129FNIEMOP_DEF(iemOp_test_Eb_Gb)
5130{
5131 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
5132 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5133 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_test_u8);
5134 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
5135}
5136
5137
5138/**
5139 * @opcode 0x85
5140 */
5141FNIEMOP_DEF(iemOp_test_Ev_Gv)
5142{
5143 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
5144 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5145 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64);
5146}
5147
5148
5149/**
5150 * @opcode 0x86
5151 */
5152FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
5153{
5154 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5155 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
5156
5157 /*
5158 * If rm is denoting a register, no more instruction bytes.
5159 */
5160 if (IEM_IS_MODRM_REG_MODE(bRm))
5161 {
5162 IEM_MC_BEGIN(0, 2);
5163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5164 IEM_MC_LOCAL(uint8_t, uTmp1);
5165 IEM_MC_LOCAL(uint8_t, uTmp2);
5166
5167 IEM_MC_FETCH_GREG_U8(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5168 IEM_MC_FETCH_GREG_U8(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5169 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5170 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5171
5172 IEM_MC_ADVANCE_RIP_AND_FINISH();
5173 IEM_MC_END();
5174 }
5175 else
5176 {
5177 /*
5178 * We're accessing memory.
5179 */
5180 IEM_MC_BEGIN(2, 4);
5181 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5182 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5183 IEM_MC_LOCAL(uint8_t, uTmpReg);
5184 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
5185 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, uTmpReg, 1);
5186
5187 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5189 IEM_MC_MEM_MAP_U8_RW(pu8Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5190 IEM_MC_FETCH_GREG_U8(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5191 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5192 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_locked, pu8Mem, pu8Reg);
5193 else
5194 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_unlocked, pu8Mem, pu8Reg);
5195 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Mem, bUnmapInfo);
5196 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5197
5198 IEM_MC_ADVANCE_RIP_AND_FINISH();
5199 IEM_MC_END();
5200 }
5201}
5202
5203
5204/**
5205 * @opcode 0x87
5206 */
5207FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
5208{
5209 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
5210 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5211
5212 /*
5213 * If rm is denoting a register, no more instruction bytes.
5214 */
5215 if (IEM_IS_MODRM_REG_MODE(bRm))
5216 {
5217 switch (pVCpu->iem.s.enmEffOpSize)
5218 {
5219 case IEMMODE_16BIT:
5220 IEM_MC_BEGIN(0, 2);
5221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5222 IEM_MC_LOCAL(uint16_t, uTmp1);
5223 IEM_MC_LOCAL(uint16_t, uTmp2);
5224
5225 IEM_MC_FETCH_GREG_U16(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5226 IEM_MC_FETCH_GREG_U16(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5227 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5228 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5229
5230 IEM_MC_ADVANCE_RIP_AND_FINISH();
5231 IEM_MC_END();
5232 break;
5233
5234 case IEMMODE_32BIT:
5235 IEM_MC_BEGIN(0, 2);
5236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5237 IEM_MC_LOCAL(uint32_t, uTmp1);
5238 IEM_MC_LOCAL(uint32_t, uTmp2);
5239
5240 IEM_MC_FETCH_GREG_U32(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5241 IEM_MC_FETCH_GREG_U32(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5242 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5243 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5244
5245 IEM_MC_ADVANCE_RIP_AND_FINISH();
5246 IEM_MC_END();
5247 break;
5248
5249 case IEMMODE_64BIT:
5250 IEM_MC_BEGIN(0, 2);
5251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5252 IEM_MC_LOCAL(uint64_t, uTmp1);
5253 IEM_MC_LOCAL(uint64_t, uTmp2);
5254
5255 IEM_MC_FETCH_GREG_U64(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5256 IEM_MC_FETCH_GREG_U64(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5257 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5258 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5259
5260 IEM_MC_ADVANCE_RIP_AND_FINISH();
5261 IEM_MC_END();
5262 break;
5263
5264 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5265 }
5266 }
5267 else
5268 {
5269 /*
5270 * We're accessing memory.
5271 */
5272 switch (pVCpu->iem.s.enmEffOpSize)
5273 {
5274 case IEMMODE_16BIT:
5275 IEM_MC_BEGIN(2, 4);
5276 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5277 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5278 IEM_MC_LOCAL(uint16_t, uTmpReg);
5279 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
5280 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, uTmpReg, 1);
5281
5282 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5284 IEM_MC_MEM_MAP_U16_RW(pu16Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5285 IEM_MC_FETCH_GREG_U16(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5286 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5287 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_locked, pu16Mem, pu16Reg);
5288 else
5289 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_unlocked, pu16Mem, pu16Reg);
5290 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Mem, bUnmapInfo);
5291 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5292
5293 IEM_MC_ADVANCE_RIP_AND_FINISH();
5294 IEM_MC_END();
5295 break;
5296
5297 case IEMMODE_32BIT:
5298 IEM_MC_BEGIN(2, 4);
5299 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5300 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5301 IEM_MC_LOCAL(uint32_t, uTmpReg);
5302 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
5303 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, uTmpReg, 1);
5304
5305 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5307 IEM_MC_MEM_MAP_U32_RW(pu32Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5308 IEM_MC_FETCH_GREG_U32(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5309 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5310 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_locked, pu32Mem, pu32Reg);
5311 else
5312 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_unlocked, pu32Mem, pu32Reg);
5313 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Mem, bUnmapInfo);
5314 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5315
5316 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
5317 IEM_MC_ADVANCE_RIP_AND_FINISH();
5318 IEM_MC_END();
5319 break;
5320
5321 case IEMMODE_64BIT:
5322 IEM_MC_BEGIN(2, 4);
5323 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5324 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5325 IEM_MC_LOCAL(uint64_t, uTmpReg);
5326 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
5327 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, uTmpReg, 1);
5328
5329 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5331 IEM_MC_MEM_MAP_U64_RW(pu64Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5332 IEM_MC_FETCH_GREG_U64(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5333 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5334 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_locked, pu64Mem, pu64Reg);
5335 else
5336 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_unlocked, pu64Mem, pu64Reg);
5337 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Mem, bUnmapInfo);
5338 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5339
5340 IEM_MC_ADVANCE_RIP_AND_FINISH();
5341 IEM_MC_END();
5342 break;
5343
5344 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5345 }
5346 }
5347}
5348
5349
5350/**
5351 * @opcode 0x88
5352 */
5353FNIEMOP_DEF(iemOp_mov_Eb_Gb)
5354{
5355 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
5356
5357 uint8_t bRm;
5358 IEM_OPCODE_GET_NEXT_U8(&bRm);
5359
5360 /*
5361 * If rm is denoting a register, no more instruction bytes.
5362 */
5363 if (IEM_IS_MODRM_REG_MODE(bRm))
5364 {
5365 IEM_MC_BEGIN(0, 1);
5366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5367 IEM_MC_LOCAL(uint8_t, u8Value);
5368 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5369 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Value);
5370 IEM_MC_ADVANCE_RIP_AND_FINISH();
5371 IEM_MC_END();
5372 }
5373 else
5374 {
5375 /*
5376 * We're writing a register to memory.
5377 */
5378 IEM_MC_BEGIN(0, 2);
5379 IEM_MC_LOCAL(uint8_t, u8Value);
5380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5383 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5384 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
5385 IEM_MC_ADVANCE_RIP_AND_FINISH();
5386 IEM_MC_END();
5387 }
5388}
5389
5390
5391/**
5392 * @opcode 0x89
5393 */
5394FNIEMOP_DEF(iemOp_mov_Ev_Gv)
5395{
5396 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
5397
5398 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5399
5400 /*
5401 * If rm is denoting a register, no more instruction bytes.
5402 */
5403 if (IEM_IS_MODRM_REG_MODE(bRm))
5404 {
5405 switch (pVCpu->iem.s.enmEffOpSize)
5406 {
5407 case IEMMODE_16BIT:
5408 IEM_MC_BEGIN(0, 1);
5409 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5410 IEM_MC_LOCAL(uint16_t, u16Value);
5411 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5412 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5413 IEM_MC_ADVANCE_RIP_AND_FINISH();
5414 IEM_MC_END();
5415 break;
5416
5417 case IEMMODE_32BIT:
5418 IEM_MC_BEGIN(0, 1);
5419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5420 IEM_MC_LOCAL(uint32_t, u32Value);
5421 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5422 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5423 IEM_MC_ADVANCE_RIP_AND_FINISH();
5424 IEM_MC_END();
5425 break;
5426
5427 case IEMMODE_64BIT:
5428 IEM_MC_BEGIN(0, 1);
5429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5430 IEM_MC_LOCAL(uint64_t, u64Value);
5431 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5432 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5433 IEM_MC_ADVANCE_RIP_AND_FINISH();
5434 IEM_MC_END();
5435 break;
5436
5437 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5438 }
5439 }
5440 else
5441 {
5442 /*
5443 * We're writing a register to memory.
5444 */
5445 switch (pVCpu->iem.s.enmEffOpSize)
5446 {
5447 case IEMMODE_16BIT:
5448 IEM_MC_BEGIN(0, 2);
5449 IEM_MC_LOCAL(uint16_t, u16Value);
5450 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5451 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5453 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5454 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5455 IEM_MC_ADVANCE_RIP_AND_FINISH();
5456 IEM_MC_END();
5457 break;
5458
5459 case IEMMODE_32BIT:
5460 IEM_MC_BEGIN(0, 2);
5461 IEM_MC_LOCAL(uint32_t, u32Value);
5462 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5463 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5465 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5466 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
5467 IEM_MC_ADVANCE_RIP_AND_FINISH();
5468 IEM_MC_END();
5469 break;
5470
5471 case IEMMODE_64BIT:
5472 IEM_MC_BEGIN(0, 2);
5473 IEM_MC_LOCAL(uint64_t, u64Value);
5474 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5475 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5477 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5478 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
5479 IEM_MC_ADVANCE_RIP_AND_FINISH();
5480 IEM_MC_END();
5481 break;
5482
5483 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5484 }
5485 }
5486}
5487
5488
5489/**
5490 * @opcode 0x8a
5491 */
5492FNIEMOP_DEF(iemOp_mov_Gb_Eb)
5493{
5494 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
5495
5496 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5497
5498 /*
5499 * If rm is denoting a register, no more instruction bytes.
5500 */
5501 if (IEM_IS_MODRM_REG_MODE(bRm))
5502 {
5503 IEM_MC_BEGIN(0, 1);
5504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5505 IEM_MC_LOCAL(uint8_t, u8Value);
5506 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5507 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5508 IEM_MC_ADVANCE_RIP_AND_FINISH();
5509 IEM_MC_END();
5510 }
5511 else
5512 {
5513 /*
5514 * We're loading a register from memory.
5515 */
5516 IEM_MC_BEGIN(0, 2);
5517 IEM_MC_LOCAL(uint8_t, u8Value);
5518 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5519 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5521 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5522 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5523 IEM_MC_ADVANCE_RIP_AND_FINISH();
5524 IEM_MC_END();
5525 }
5526}
5527
5528
5529/**
5530 * @opcode 0x8b
5531 */
5532FNIEMOP_DEF(iemOp_mov_Gv_Ev)
5533{
5534 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
5535
5536 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5537
5538 /*
5539 * If rm is denoting a register, no more instruction bytes.
5540 */
5541 if (IEM_IS_MODRM_REG_MODE(bRm))
5542 {
5543 switch (pVCpu->iem.s.enmEffOpSize)
5544 {
5545 case IEMMODE_16BIT:
5546 IEM_MC_BEGIN(0, 1);
5547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5548 IEM_MC_LOCAL(uint16_t, u16Value);
5549 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5550 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5551 IEM_MC_ADVANCE_RIP_AND_FINISH();
5552 IEM_MC_END();
5553 break;
5554
5555 case IEMMODE_32BIT:
5556 IEM_MC_BEGIN(0, 1);
5557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5558 IEM_MC_LOCAL(uint32_t, u32Value);
5559 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5560 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5561 IEM_MC_ADVANCE_RIP_AND_FINISH();
5562 IEM_MC_END();
5563 break;
5564
5565 case IEMMODE_64BIT:
5566 IEM_MC_BEGIN(0, 1);
5567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5568 IEM_MC_LOCAL(uint64_t, u64Value);
5569 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5570 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5571 IEM_MC_ADVANCE_RIP_AND_FINISH();
5572 IEM_MC_END();
5573 break;
5574
5575 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5576 }
5577 }
5578 else
5579 {
5580 /*
5581 * We're loading a register from memory.
5582 */
5583 switch (pVCpu->iem.s.enmEffOpSize)
5584 {
5585 case IEMMODE_16BIT:
5586 IEM_MC_BEGIN(0, 2);
5587 IEM_MC_LOCAL(uint16_t, u16Value);
5588 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5589 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5591 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5592 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5593 IEM_MC_ADVANCE_RIP_AND_FINISH();
5594 IEM_MC_END();
5595 break;
5596
5597 case IEMMODE_32BIT:
5598 IEM_MC_BEGIN(0, 2);
5599 IEM_MC_LOCAL(uint32_t, u32Value);
5600 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5601 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5603 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5604 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5605 IEM_MC_ADVANCE_RIP_AND_FINISH();
5606 IEM_MC_END();
5607 break;
5608
5609 case IEMMODE_64BIT:
5610 IEM_MC_BEGIN(0, 2);
5611 IEM_MC_LOCAL(uint64_t, u64Value);
5612 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5613 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5615 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5616 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5617 IEM_MC_ADVANCE_RIP_AND_FINISH();
5618 IEM_MC_END();
5619 break;
5620
5621 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5622 }
5623 }
5624}
5625
5626
5627/**
5628 * opcode 0x63
5629 * @todo Table fixme
5630 */
5631FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
5632{
5633 if (!IEM_IS_64BIT_CODE(pVCpu))
5634 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
5635 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
5636 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
5637 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
5638}
5639
5640
5641/**
5642 * @opcode 0x8c
5643 */
5644FNIEMOP_DEF(iemOp_mov_Ev_Sw)
5645{
5646 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
5647
5648 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5649
5650 /*
5651 * Check that the destination register exists. The REX.R prefix is ignored.
5652 */
5653 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5654 if (iSegReg > X86_SREG_GS)
5655 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5656
5657 /*
5658 * If rm is denoting a register, no more instruction bytes.
5659 * In that case, the operand size is respected and the upper bits are
5660 * cleared (starting with some pentium).
5661 */
5662 if (IEM_IS_MODRM_REG_MODE(bRm))
5663 {
5664 switch (pVCpu->iem.s.enmEffOpSize)
5665 {
5666 case IEMMODE_16BIT:
5667 IEM_MC_BEGIN(0, 1);
5668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5669 IEM_MC_LOCAL(uint16_t, u16Value);
5670 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5671 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5672 IEM_MC_ADVANCE_RIP_AND_FINISH();
5673 IEM_MC_END();
5674 break;
5675
5676 case IEMMODE_32BIT:
5677 IEM_MC_BEGIN(0, 1);
5678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5679 IEM_MC_LOCAL(uint32_t, u32Value);
5680 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
5681 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5682 IEM_MC_ADVANCE_RIP_AND_FINISH();
5683 IEM_MC_END();
5684 break;
5685
5686 case IEMMODE_64BIT:
5687 IEM_MC_BEGIN(0, 1);
5688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5689 IEM_MC_LOCAL(uint64_t, u64Value);
5690 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
5691 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5692 IEM_MC_ADVANCE_RIP_AND_FINISH();
5693 IEM_MC_END();
5694 break;
5695
5696 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5697 }
5698 }
5699 else
5700 {
5701 /*
5702 * We're saving the register to memory. The access is word sized
5703 * regardless of operand size prefixes.
5704 */
5705#if 0 /* not necessary */
5706 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5707#endif
5708 IEM_MC_BEGIN(0, 2);
5709 IEM_MC_LOCAL(uint16_t, u16Value);
5710 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5711 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5713 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5714 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5715 IEM_MC_ADVANCE_RIP_AND_FINISH();
5716 IEM_MC_END();
5717 }
5718}
5719
5720
5721
5722
5723/**
5724 * @opcode 0x8d
5725 */
5726FNIEMOP_DEF(iemOp_lea_Gv_M)
5727{
5728 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
5729 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5730 if (IEM_IS_MODRM_REG_MODE(bRm))
5731 IEMOP_RAISE_INVALID_OPCODE_RET(); /* no register form */
5732
5733 switch (pVCpu->iem.s.enmEffOpSize)
5734 {
5735 case IEMMODE_16BIT:
5736 IEM_MC_BEGIN(0, 2);
5737 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5738 IEM_MC_LOCAL(uint16_t, u16Cast);
5739 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5741 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
5742 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Cast);
5743 IEM_MC_ADVANCE_RIP_AND_FINISH();
5744 IEM_MC_END();
5745 break;
5746
5747 case IEMMODE_32BIT:
5748 IEM_MC_BEGIN(0, 2);
5749 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5750 IEM_MC_LOCAL(uint32_t, u32Cast);
5751 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5753 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
5754 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Cast);
5755 IEM_MC_ADVANCE_RIP_AND_FINISH();
5756 IEM_MC_END();
5757 break;
5758
5759 case IEMMODE_64BIT:
5760 IEM_MC_BEGIN(0, 1);
5761 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5762 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5764 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), GCPtrEffSrc);
5765 IEM_MC_ADVANCE_RIP_AND_FINISH();
5766 IEM_MC_END();
5767 break;
5768
5769 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5770 }
5771}
5772
5773
5774/**
5775 * @opcode 0x8e
5776 */
5777FNIEMOP_DEF(iemOp_mov_Sw_Ev)
5778{
5779 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
5780
5781 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5782
5783 /*
5784 * The practical operand size is 16-bit.
5785 */
5786#if 0 /* not necessary */
5787 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5788#endif
5789
5790 /*
5791 * Check that the destination register exists and can be used with this
5792 * instruction. The REX.R prefix is ignored.
5793 */
5794 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5795 /** @todo r=bird: What does 8086 do here wrt CS? */
5796 if ( iSegReg == X86_SREG_CS
5797 || iSegReg > X86_SREG_GS)
5798 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5799
5800 /*
5801 * If rm is denoting a register, no more instruction bytes.
5802 */
5803 if (IEM_IS_MODRM_REG_MODE(bRm))
5804 {
5805 IEM_MC_BEGIN(2, 0);
5806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5807 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
5808 IEM_MC_ARG(uint16_t, u16Value, 1);
5809 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5810 if (iSRegArg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5811 IEM_MC_CALL_CIMPL_2( 0, iemCImpl_load_SReg, iSRegArg, u16Value);
5812 else
5813 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE, iemCImpl_load_SReg, iSRegArg, u16Value);
5814 IEM_MC_END();
5815 }
5816 else
5817 {
5818 /*
5819 * We're loading the register from memory. The access is word sized
5820 * regardless of operand size prefixes.
5821 */
5822 IEM_MC_BEGIN(2, 1);
5823 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
5824 IEM_MC_ARG(uint16_t, u16Value, 1);
5825 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5826 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5828 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5829 if (iSRegArg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5830 IEM_MC_CALL_CIMPL_2( 0, iemCImpl_load_SReg, iSRegArg, u16Value);
5831 else
5832 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE, iemCImpl_load_SReg, iSRegArg, u16Value);
5833 IEM_MC_END();
5834 }
5835}
5836
5837
5838/** Opcode 0x8f /0. */
5839FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
5840{
5841 /* This bugger is rather annoying as it requires rSP to be updated before
5842 doing the effective address calculations. Will eventually require a
5843 split between the R/M+SIB decoding and the effective address
5844 calculation - which is something that is required for any attempt at
5845 reusing this code for a recompiler. It may also be good to have if we
5846 need to delay #UD exception caused by invalid lock prefixes.
5847
5848 For now, we'll do a mostly safe interpreter-only implementation here. */
5849 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
5850 * now until tests show it's checked.. */
5851 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
5852
5853 /* Register access is relatively easy and can share code. */
5854 if (IEM_IS_MODRM_REG_MODE(bRm))
5855 return FNIEMOP_CALL_1(iemOpCommonPopGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
5856
5857 /*
5858 * Memory target.
5859 *
5860 * Intel says that RSP is incremented before it's used in any effective
5861 * address calcuations. This means some serious extra annoyance here since
5862 * we decode and calculate the effective address in one step and like to
5863 * delay committing registers till everything is done.
5864 *
5865 * So, we'll decode and calculate the effective address twice. This will
5866 * require some recoding if turned into a recompiler.
5867 */
5868 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
5869
5870#if 1 /* This can be compiled, optimize later if needed. */
5871 switch (pVCpu->iem.s.enmEffOpSize)
5872 {
5873 case IEMMODE_16BIT:
5874 {
5875 IEM_MC_BEGIN(2, 0);
5876 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5877 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5878 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2 << 8);
5879 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5880 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5881 IEM_MC_CALL_CIMPL_2(0, iemCImpl_pop_mem16, iEffSeg, GCPtrEffDst);
5882 IEM_MC_END();
5883 }
5884
5885 case IEMMODE_32BIT:
5886 {
5887 IEM_MC_BEGIN(2, 0);
5888 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5889 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5890 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4 << 8);
5891 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5892 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5893 IEM_MC_CALL_CIMPL_2(0, iemCImpl_pop_mem32, iEffSeg, GCPtrEffDst);
5894 IEM_MC_END();
5895 }
5896
5897 case IEMMODE_64BIT:
5898 {
5899 IEM_MC_BEGIN(2, 0);
5900 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5901 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5902 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 8 << 8);
5903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5904 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5905 IEM_MC_CALL_CIMPL_2(0, iemCImpl_pop_mem64, iEffSeg, GCPtrEffDst);
5906 IEM_MC_END();
5907 }
5908
5909 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5910 }
5911
5912#else
5913# ifndef TST_IEM_CHECK_MC
5914 /* Calc effective address with modified ESP. */
5915/** @todo testcase */
5916 RTGCPTR GCPtrEff;
5917 VBOXSTRICTRC rcStrict;
5918 switch (pVCpu->iem.s.enmEffOpSize)
5919 {
5920 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 2 << 8, &GCPtrEff); break;
5921 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 4 << 8, &GCPtrEff); break;
5922 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 8 << 8, &GCPtrEff); break;
5923 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5924 }
5925 if (rcStrict != VINF_SUCCESS)
5926 return rcStrict;
5927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5928
5929 /* Perform the operation - this should be CImpl. */
5930 RTUINT64U TmpRsp;
5931 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
5932 switch (pVCpu->iem.s.enmEffOpSize)
5933 {
5934 case IEMMODE_16BIT:
5935 {
5936 uint16_t u16Value;
5937 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
5938 if (rcStrict == VINF_SUCCESS)
5939 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
5940 break;
5941 }
5942
5943 case IEMMODE_32BIT:
5944 {
5945 uint32_t u32Value;
5946 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
5947 if (rcStrict == VINF_SUCCESS)
5948 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
5949 break;
5950 }
5951
5952 case IEMMODE_64BIT:
5953 {
5954 uint64_t u64Value;
5955 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
5956 if (rcStrict == VINF_SUCCESS)
5957 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
5958 break;
5959 }
5960
5961 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5962 }
5963 if (rcStrict == VINF_SUCCESS)
5964 {
5965 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
5966 return iemRegUpdateRipAndFinishClearingRF(pVCpu);
5967 }
5968 return rcStrict;
5969
5970# else
5971 return VERR_IEM_IPE_2;
5972# endif
5973#endif
5974}
5975
5976
5977/**
5978 * @opcode 0x8f
5979 */
5980FNIEMOP_DEF(iemOp_Grp1A__xop)
5981{
5982 /*
5983 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
5984 * three byte VEX prefix, except that the mmmmm field cannot have the values
5985 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
5986 */
5987 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5988 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
5989 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
5990
5991 IEMOP_MNEMONIC(xop, "xop");
5992 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
5993 {
5994 /** @todo Test when exctly the XOP conformance checks kick in during
5995 * instruction decoding and fetching (using \#PF). */
5996 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
5997 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
5998 if ( ( pVCpu->iem.s.fPrefixes
5999 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
6000 == 0)
6001 {
6002 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
6003 if ((bXop2 & 0x80 /* XOP.W */) && IEM_IS_64BIT_CODE(pVCpu))
6004 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
6005 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6006 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
6007 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
6008 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
6009 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
6010 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
6011
6012 /** @todo XOP: Just use new tables and decoders. */
6013 switch (bRm & 0x1f)
6014 {
6015 case 8: /* xop opcode map 8. */
6016 IEMOP_BITCH_ABOUT_STUB();
6017 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6018
6019 case 9: /* xop opcode map 9. */
6020 IEMOP_BITCH_ABOUT_STUB();
6021 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6022
6023 case 10: /* xop opcode map 10. */
6024 IEMOP_BITCH_ABOUT_STUB();
6025 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6026
6027 default:
6028 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6029 IEMOP_RAISE_INVALID_OPCODE_RET();
6030 }
6031 }
6032 else
6033 Log(("XOP: Invalid prefix mix!\n"));
6034 }
6035 else
6036 Log(("XOP: XOP support disabled!\n"));
6037 IEMOP_RAISE_INVALID_OPCODE_RET();
6038}
6039
6040
6041/**
6042 * Common 'xchg reg,rAX' helper.
6043 */
6044FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
6045{
6046 iReg |= pVCpu->iem.s.uRexB;
6047 switch (pVCpu->iem.s.enmEffOpSize)
6048 {
6049 case IEMMODE_16BIT:
6050 IEM_MC_BEGIN(0, 2);
6051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6052 IEM_MC_LOCAL(uint16_t, u16Tmp1);
6053 IEM_MC_LOCAL(uint16_t, u16Tmp2);
6054 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
6055 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
6056 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
6057 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
6058 IEM_MC_ADVANCE_RIP_AND_FINISH();
6059 IEM_MC_END();
6060 break;
6061
6062 case IEMMODE_32BIT:
6063 IEM_MC_BEGIN(0, 2);
6064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6065 IEM_MC_LOCAL(uint32_t, u32Tmp1);
6066 IEM_MC_LOCAL(uint32_t, u32Tmp2);
6067 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
6068 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
6069 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
6070 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
6071 IEM_MC_ADVANCE_RIP_AND_FINISH();
6072 IEM_MC_END();
6073 break;
6074
6075 case IEMMODE_64BIT:
6076 IEM_MC_BEGIN(0, 2);
6077 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6078 IEM_MC_LOCAL(uint64_t, u64Tmp1);
6079 IEM_MC_LOCAL(uint64_t, u64Tmp2);
6080 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
6081 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
6082 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
6083 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
6084 IEM_MC_ADVANCE_RIP_AND_FINISH();
6085 IEM_MC_END();
6086 break;
6087
6088 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6089 }
6090}
6091
6092
6093/**
6094 * @opcode 0x90
6095 */
6096FNIEMOP_DEF(iemOp_nop)
6097{
6098 /* R8/R8D and RAX/EAX can be exchanged. */
6099 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
6100 {
6101 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
6102 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
6103 }
6104
6105 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
6106 {
6107 IEMOP_MNEMONIC(pause, "pause");
6108 /* ASSUMING that we keep the IEM_F_X86_CTX_IN_GUEST, IEM_F_X86_CTX_VMX
6109 and IEM_F_X86_CTX_SVM in the TB key, we can safely do the following: */
6110 if (!IEM_IS_IN_GUEST(pVCpu))
6111 { /* probable */ }
6112#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6113 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_VMX)
6114 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmx_pause);
6115#endif
6116#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
6117 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_SVM)
6118 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_svm_pause);
6119#endif
6120 }
6121 else
6122 IEMOP_MNEMONIC(nop, "nop");
6123 /** @todo testcase: lock nop; lock pause */
6124 IEM_MC_BEGIN(0, 0);
6125 IEMOP_HLP_DONE_DECODING();
6126 IEM_MC_ADVANCE_RIP_AND_FINISH();
6127 IEM_MC_END();
6128}
6129
6130
6131/**
6132 * @opcode 0x91
6133 */
6134FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
6135{
6136 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
6137 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
6138}
6139
6140
6141/**
6142 * @opcode 0x92
6143 */
6144FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
6145{
6146 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
6147 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
6148}
6149
6150
6151/**
6152 * @opcode 0x93
6153 */
6154FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
6155{
6156 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
6157 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
6158}
6159
6160
6161/**
6162 * @opcode 0x94
6163 */
6164FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
6165{
6166 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
6167 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
6168}
6169
6170
6171/**
6172 * @opcode 0x95
6173 */
6174FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
6175{
6176 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
6177 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
6178}
6179
6180
6181/**
6182 * @opcode 0x96
6183 */
6184FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
6185{
6186 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
6187 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
6188}
6189
6190
6191/**
6192 * @opcode 0x97
6193 */
6194FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
6195{
6196 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
6197 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
6198}
6199
6200
6201/**
6202 * @opcode 0x98
6203 */
6204FNIEMOP_DEF(iemOp_cbw)
6205{
6206 switch (pVCpu->iem.s.enmEffOpSize)
6207 {
6208 case IEMMODE_16BIT:
6209 IEMOP_MNEMONIC(cbw, "cbw");
6210 IEM_MC_BEGIN(0, 1);
6211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6212 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
6213 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
6214 } IEM_MC_ELSE() {
6215 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
6216 } IEM_MC_ENDIF();
6217 IEM_MC_ADVANCE_RIP_AND_FINISH();
6218 IEM_MC_END();
6219 break;
6220
6221 case IEMMODE_32BIT:
6222 IEMOP_MNEMONIC(cwde, "cwde");
6223 IEM_MC_BEGIN(0, 1);
6224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6225 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6226 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
6227 } IEM_MC_ELSE() {
6228 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
6229 } IEM_MC_ENDIF();
6230 IEM_MC_ADVANCE_RIP_AND_FINISH();
6231 IEM_MC_END();
6232 break;
6233
6234 case IEMMODE_64BIT:
6235 IEMOP_MNEMONIC(cdqe, "cdqe");
6236 IEM_MC_BEGIN(0, 1);
6237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6238 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6239 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
6240 } IEM_MC_ELSE() {
6241 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
6242 } IEM_MC_ENDIF();
6243 IEM_MC_ADVANCE_RIP_AND_FINISH();
6244 IEM_MC_END();
6245 break;
6246
6247 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6248 }
6249}
6250
6251
6252/**
6253 * @opcode 0x99
6254 */
6255FNIEMOP_DEF(iemOp_cwd)
6256{
6257 switch (pVCpu->iem.s.enmEffOpSize)
6258 {
6259 case IEMMODE_16BIT:
6260 IEMOP_MNEMONIC(cwd, "cwd");
6261 IEM_MC_BEGIN(0, 1);
6262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6263 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6264 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
6265 } IEM_MC_ELSE() {
6266 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
6267 } IEM_MC_ENDIF();
6268 IEM_MC_ADVANCE_RIP_AND_FINISH();
6269 IEM_MC_END();
6270 break;
6271
6272 case IEMMODE_32BIT:
6273 IEMOP_MNEMONIC(cdq, "cdq");
6274 IEM_MC_BEGIN(0, 1);
6275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6276 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6277 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
6278 } IEM_MC_ELSE() {
6279 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
6280 } IEM_MC_ENDIF();
6281 IEM_MC_ADVANCE_RIP_AND_FINISH();
6282 IEM_MC_END();
6283 break;
6284
6285 case IEMMODE_64BIT:
6286 IEMOP_MNEMONIC(cqo, "cqo");
6287 IEM_MC_BEGIN(0, 1);
6288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6289 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
6290 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
6291 } IEM_MC_ELSE() {
6292 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
6293 } IEM_MC_ENDIF();
6294 IEM_MC_ADVANCE_RIP_AND_FINISH();
6295 IEM_MC_END();
6296 break;
6297
6298 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6299 }
6300}
6301
6302
6303/**
6304 * @opcode 0x9a
6305 */
6306FNIEMOP_DEF(iemOp_call_Ap)
6307{
6308 IEMOP_MNEMONIC(call_Ap, "call Ap");
6309 IEMOP_HLP_NO_64BIT();
6310
6311 /* Decode the far pointer address and pass it on to the far call C implementation. */
6312 uint32_t off32Seg;
6313 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
6314 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
6315 else
6316 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
6317 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
6318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6319 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
6320 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
6321 iemCImpl_callf, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
6322}
6323
6324
6325/** Opcode 0x9b. (aka fwait) */
6326FNIEMOP_DEF(iemOp_wait)
6327{
6328 IEMOP_MNEMONIC(wait, "wait");
6329 IEM_MC_BEGIN(0, 0);
6330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6331 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
6332 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6333 IEM_MC_ADVANCE_RIP_AND_FINISH();
6334 IEM_MC_END();
6335}
6336
6337
6338/**
6339 * @opcode 0x9c
6340 */
6341FNIEMOP_DEF(iemOp_pushf_Fv)
6342{
6343 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
6344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6345 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6346 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
6347}
6348
6349
6350/**
6351 * @opcode 0x9d
6352 */
6353FNIEMOP_DEF(iemOp_popf_Fv)
6354{
6355 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
6356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6357 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6358 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE_AND_AFTER,
6359 iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
6360}
6361
6362
6363/**
6364 * @opcode 0x9e
6365 */
6366FNIEMOP_DEF(iemOp_sahf)
6367{
6368 IEMOP_MNEMONIC(sahf, "sahf");
6369 if ( IEM_IS_64BIT_CODE(pVCpu)
6370 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6371 IEMOP_RAISE_INVALID_OPCODE_RET();
6372 IEM_MC_BEGIN(0, 2);
6373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6374 IEM_MC_LOCAL(uint32_t, u32Flags);
6375 IEM_MC_LOCAL(uint32_t, EFlags);
6376 IEM_MC_FETCH_EFLAGS(EFlags);
6377 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
6378 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6379 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
6380 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
6381 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
6382 IEM_MC_COMMIT_EFLAGS(EFlags);
6383 IEM_MC_ADVANCE_RIP_AND_FINISH();
6384 IEM_MC_END();
6385}
6386
6387
6388/**
6389 * @opcode 0x9f
6390 */
6391FNIEMOP_DEF(iemOp_lahf)
6392{
6393 IEMOP_MNEMONIC(lahf, "lahf");
6394 if ( IEM_IS_64BIT_CODE(pVCpu)
6395 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6396 IEMOP_RAISE_INVALID_OPCODE_RET();
6397 IEM_MC_BEGIN(0, 1);
6398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6399 IEM_MC_LOCAL(uint8_t, u8Flags);
6400 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
6401 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
6402 IEM_MC_ADVANCE_RIP_AND_FINISH();
6403 IEM_MC_END();
6404}
6405
6406
6407/**
6408 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
6409 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode.
6410 * Will return/throw on failures.
6411 * @param a_GCPtrMemOff The variable to store the offset in.
6412 */
6413#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
6414 do \
6415 { \
6416 switch (pVCpu->iem.s.enmEffAddrMode) \
6417 { \
6418 case IEMMODE_16BIT: \
6419 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
6420 break; \
6421 case IEMMODE_32BIT: \
6422 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
6423 break; \
6424 case IEMMODE_64BIT: \
6425 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
6426 break; \
6427 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
6428 } \
6429 } while (0)
6430
6431/**
6432 * @opcode 0xa0
6433 */
6434FNIEMOP_DEF(iemOp_mov_AL_Ob)
6435{
6436 /*
6437 * Get the offset.
6438 */
6439 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
6440 RTGCPTR GCPtrMemOff;
6441 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
6442
6443 /*
6444 * Fetch AL.
6445 */
6446 IEM_MC_BEGIN(0,1);
6447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6448 IEM_MC_LOCAL(uint8_t, u8Tmp);
6449 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6450 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6451 IEM_MC_ADVANCE_RIP_AND_FINISH();
6452 IEM_MC_END();
6453}
6454
6455
6456/**
6457 * @opcode 0xa1
6458 */
6459FNIEMOP_DEF(iemOp_mov_rAX_Ov)
6460{
6461 /*
6462 * Get the offset.
6463 */
6464 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
6465 RTGCPTR GCPtrMemOff;
6466 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
6467
6468 /*
6469 * Fetch rAX.
6470 */
6471 switch (pVCpu->iem.s.enmEffOpSize)
6472 {
6473 case IEMMODE_16BIT:
6474 IEM_MC_BEGIN(0,1);
6475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6476 IEM_MC_LOCAL(uint16_t, u16Tmp);
6477 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6478 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
6479 IEM_MC_ADVANCE_RIP_AND_FINISH();
6480 IEM_MC_END();
6481 break;
6482
6483 case IEMMODE_32BIT:
6484 IEM_MC_BEGIN(0,1);
6485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6486 IEM_MC_LOCAL(uint32_t, u32Tmp);
6487 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6488 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
6489 IEM_MC_ADVANCE_RIP_AND_FINISH();
6490 IEM_MC_END();
6491 break;
6492
6493 case IEMMODE_64BIT:
6494 IEM_MC_BEGIN(0,1);
6495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6496 IEM_MC_LOCAL(uint64_t, u64Tmp);
6497 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6498 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
6499 IEM_MC_ADVANCE_RIP_AND_FINISH();
6500 IEM_MC_END();
6501 break;
6502
6503 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6504 }
6505}
6506
6507
6508/**
6509 * @opcode 0xa2
6510 */
6511FNIEMOP_DEF(iemOp_mov_Ob_AL)
6512{
6513 /*
6514 * Get the offset.
6515 */
6516 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
6517 RTGCPTR GCPtrMemOff;
6518 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
6519
6520 /*
6521 * Store AL.
6522 */
6523 IEM_MC_BEGIN(0,1);
6524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6525 IEM_MC_LOCAL(uint8_t, u8Tmp);
6526 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
6527 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
6528 IEM_MC_ADVANCE_RIP_AND_FINISH();
6529 IEM_MC_END();
6530}
6531
6532
6533/**
6534 * @opcode 0xa3
6535 */
6536FNIEMOP_DEF(iemOp_mov_Ov_rAX)
6537{
6538 /*
6539 * Get the offset.
6540 */
6541 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
6542 RTGCPTR GCPtrMemOff;
6543 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
6544
6545 /*
6546 * Store rAX.
6547 */
6548 switch (pVCpu->iem.s.enmEffOpSize)
6549 {
6550 case IEMMODE_16BIT:
6551 IEM_MC_BEGIN(0,1);
6552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6553 IEM_MC_LOCAL(uint16_t, u16Tmp);
6554 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
6555 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
6556 IEM_MC_ADVANCE_RIP_AND_FINISH();
6557 IEM_MC_END();
6558 break;
6559
6560 case IEMMODE_32BIT:
6561 IEM_MC_BEGIN(0,1);
6562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6563 IEM_MC_LOCAL(uint32_t, u32Tmp);
6564 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
6565 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
6566 IEM_MC_ADVANCE_RIP_AND_FINISH();
6567 IEM_MC_END();
6568 break;
6569
6570 case IEMMODE_64BIT:
6571 IEM_MC_BEGIN(0,1);
6572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6573 IEM_MC_LOCAL(uint64_t, u64Tmp);
6574 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
6575 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
6576 IEM_MC_ADVANCE_RIP_AND_FINISH();
6577 IEM_MC_END();
6578 break;
6579
6580 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6581 }
6582}
6583
6584/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
6585#define IEM_MOVS_CASE(ValBits, AddrBits) \
6586 IEM_MC_BEGIN(0, 2); \
6587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6588 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6589 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6590 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6591 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
6592 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6593 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
6594 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6595 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6596 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6597 } IEM_MC_ELSE() { \
6598 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6599 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6600 } IEM_MC_ENDIF(); \
6601 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6602 IEM_MC_END() \
6603
6604/**
6605 * @opcode 0xa4
6606 */
6607FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
6608{
6609 /*
6610 * Use the C implementation if a repeat prefix is encountered.
6611 */
6612 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6613 {
6614 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
6615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6616 switch (pVCpu->iem.s.enmEffAddrMode)
6617 {
6618 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
6619 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
6620 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
6621 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6622 }
6623 }
6624
6625 /*
6626 * Sharing case implementation with movs[wdq] below.
6627 */
6628 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
6629 switch (pVCpu->iem.s.enmEffAddrMode)
6630 {
6631 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
6632 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
6633 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
6634 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6635 }
6636}
6637
6638
6639/**
6640 * @opcode 0xa5
6641 */
6642FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
6643{
6644
6645 /*
6646 * Use the C implementation if a repeat prefix is encountered.
6647 */
6648 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6649 {
6650 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
6651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6652 switch (pVCpu->iem.s.enmEffOpSize)
6653 {
6654 case IEMMODE_16BIT:
6655 switch (pVCpu->iem.s.enmEffAddrMode)
6656 {
6657 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
6658 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
6659 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
6660 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6661 }
6662 break;
6663 case IEMMODE_32BIT:
6664 switch (pVCpu->iem.s.enmEffAddrMode)
6665 {
6666 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
6667 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
6668 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
6669 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6670 }
6671 case IEMMODE_64BIT:
6672 switch (pVCpu->iem.s.enmEffAddrMode)
6673 {
6674 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
6675 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
6676 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
6677 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6678 }
6679 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6680 }
6681 }
6682
6683 /*
6684 * Annoying double switch here.
6685 * Using ugly macro for implementing the cases, sharing it with movsb.
6686 */
6687 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
6688 switch (pVCpu->iem.s.enmEffOpSize)
6689 {
6690 case IEMMODE_16BIT:
6691 switch (pVCpu->iem.s.enmEffAddrMode)
6692 {
6693 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
6694 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
6695 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
6696 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6697 }
6698 break;
6699
6700 case IEMMODE_32BIT:
6701 switch (pVCpu->iem.s.enmEffAddrMode)
6702 {
6703 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
6704 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
6705 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
6706 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6707 }
6708 break;
6709
6710 case IEMMODE_64BIT:
6711 switch (pVCpu->iem.s.enmEffAddrMode)
6712 {
6713 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6714 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
6715 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
6716 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6717 }
6718 break;
6719 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6720 }
6721}
6722
6723#undef IEM_MOVS_CASE
6724
6725/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
6726#define IEM_CMPS_CASE(ValBits, AddrBits) \
6727 IEM_MC_BEGIN(3, 3); \
6728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6729 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
6730 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
6731 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
6732 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
6733 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6734 \
6735 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6736 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
6737 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6738 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
6739 IEM_MC_REF_LOCAL(puValue1, uValue1); \
6740 IEM_MC_REF_EFLAGS(pEFlags); \
6741 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
6742 \
6743 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6744 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6745 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6746 } IEM_MC_ELSE() { \
6747 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6748 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6749 } IEM_MC_ENDIF(); \
6750 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6751 IEM_MC_END() \
6752
6753/**
6754 * @opcode 0xa6
6755 */
6756FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
6757{
6758
6759 /*
6760 * Use the C implementation if a repeat prefix is encountered.
6761 */
6762 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6763 {
6764 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
6765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6766 switch (pVCpu->iem.s.enmEffAddrMode)
6767 {
6768 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6769 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6770 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6771 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6772 }
6773 }
6774 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6775 {
6776 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
6777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6778 switch (pVCpu->iem.s.enmEffAddrMode)
6779 {
6780 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6781 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6782 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6783 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6784 }
6785 }
6786
6787 /*
6788 * Sharing case implementation with cmps[wdq] below.
6789 */
6790 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
6791 switch (pVCpu->iem.s.enmEffAddrMode)
6792 {
6793 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
6794 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
6795 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
6796 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6797 }
6798}
6799
6800
6801/**
6802 * @opcode 0xa7
6803 */
6804FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
6805{
6806 /*
6807 * Use the C implementation if a repeat prefix is encountered.
6808 */
6809 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6810 {
6811 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
6812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6813 switch (pVCpu->iem.s.enmEffOpSize)
6814 {
6815 case IEMMODE_16BIT:
6816 switch (pVCpu->iem.s.enmEffAddrMode)
6817 {
6818 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
6819 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
6820 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
6821 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6822 }
6823 break;
6824 case IEMMODE_32BIT:
6825 switch (pVCpu->iem.s.enmEffAddrMode)
6826 {
6827 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
6828 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
6829 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
6830 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6831 }
6832 case IEMMODE_64BIT:
6833 switch (pVCpu->iem.s.enmEffAddrMode)
6834 {
6835 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
6836 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
6837 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
6838 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6839 }
6840 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6841 }
6842 }
6843
6844 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6845 {
6846 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
6847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6848 switch (pVCpu->iem.s.enmEffOpSize)
6849 {
6850 case IEMMODE_16BIT:
6851 switch (pVCpu->iem.s.enmEffAddrMode)
6852 {
6853 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
6854 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
6855 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
6856 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6857 }
6858 break;
6859 case IEMMODE_32BIT:
6860 switch (pVCpu->iem.s.enmEffAddrMode)
6861 {
6862 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
6863 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
6864 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
6865 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6866 }
6867 case IEMMODE_64BIT:
6868 switch (pVCpu->iem.s.enmEffAddrMode)
6869 {
6870 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
6871 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
6872 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
6873 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6874 }
6875 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6876 }
6877 }
6878
6879 /*
6880 * Annoying double switch here.
6881 * Using ugly macro for implementing the cases, sharing it with cmpsb.
6882 */
6883 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
6884 switch (pVCpu->iem.s.enmEffOpSize)
6885 {
6886 case IEMMODE_16BIT:
6887 switch (pVCpu->iem.s.enmEffAddrMode)
6888 {
6889 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
6890 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
6891 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
6892 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6893 }
6894 break;
6895
6896 case IEMMODE_32BIT:
6897 switch (pVCpu->iem.s.enmEffAddrMode)
6898 {
6899 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
6900 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
6901 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
6902 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6903 }
6904 break;
6905
6906 case IEMMODE_64BIT:
6907 switch (pVCpu->iem.s.enmEffAddrMode)
6908 {
6909 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6910 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
6911 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
6912 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6913 }
6914 break;
6915 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6916 }
6917}
6918
6919#undef IEM_CMPS_CASE
6920
6921/**
6922 * @opcode 0xa8
6923 */
6924FNIEMOP_DEF(iemOp_test_AL_Ib)
6925{
6926 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
6927 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6928 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_test_u8);
6929}
6930
6931
6932/**
6933 * @opcode 0xa9
6934 */
6935FNIEMOP_DEF(iemOp_test_eAX_Iz)
6936{
6937 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
6938 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6939 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, 0);
6940}
6941
6942
6943/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
6944#define IEM_STOS_CASE(ValBits, AddrBits) \
6945 IEM_MC_BEGIN(0, 2); \
6946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6947 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6948 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6949 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
6950 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6951 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
6952 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6953 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6954 } IEM_MC_ELSE() { \
6955 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6956 } IEM_MC_ENDIF(); \
6957 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6958 IEM_MC_END() \
6959
6960/**
6961 * @opcode 0xaa
6962 */
6963FNIEMOP_DEF(iemOp_stosb_Yb_AL)
6964{
6965 /*
6966 * Use the C implementation if a repeat prefix is encountered.
6967 */
6968 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6969 {
6970 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
6971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6972 switch (pVCpu->iem.s.enmEffAddrMode)
6973 {
6974 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP, iemCImpl_stos_al_m16);
6975 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP, iemCImpl_stos_al_m32);
6976 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP, iemCImpl_stos_al_m64);
6977 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6978 }
6979 }
6980
6981 /*
6982 * Sharing case implementation with stos[wdq] below.
6983 */
6984 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
6985 switch (pVCpu->iem.s.enmEffAddrMode)
6986 {
6987 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
6988 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
6989 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
6990 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6991 }
6992}
6993
6994
6995/**
6996 * @opcode 0xab
6997 */
6998FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
6999{
7000 /*
7001 * Use the C implementation if a repeat prefix is encountered.
7002 */
7003 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7004 {
7005 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
7006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7007 switch (pVCpu->iem.s.enmEffOpSize)
7008 {
7009 case IEMMODE_16BIT:
7010 switch (pVCpu->iem.s.enmEffAddrMode)
7011 {
7012 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_ax_m16);
7013 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_ax_m32);
7014 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_ax_m64);
7015 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7016 }
7017 break;
7018 case IEMMODE_32BIT:
7019 switch (pVCpu->iem.s.enmEffAddrMode)
7020 {
7021 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_eax_m16);
7022 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_eax_m32);
7023 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_eax_m64);
7024 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7025 }
7026 case IEMMODE_64BIT:
7027 switch (pVCpu->iem.s.enmEffAddrMode)
7028 {
7029 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
7030 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_rax_m32);
7031 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_rax_m64);
7032 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7033 }
7034 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7035 }
7036 }
7037
7038 /*
7039 * Annoying double switch here.
7040 * Using ugly macro for implementing the cases, sharing it with stosb.
7041 */
7042 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
7043 switch (pVCpu->iem.s.enmEffOpSize)
7044 {
7045 case IEMMODE_16BIT:
7046 switch (pVCpu->iem.s.enmEffAddrMode)
7047 {
7048 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
7049 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
7050 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
7051 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7052 }
7053 break;
7054
7055 case IEMMODE_32BIT:
7056 switch (pVCpu->iem.s.enmEffAddrMode)
7057 {
7058 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
7059 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
7060 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
7061 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7062 }
7063 break;
7064
7065 case IEMMODE_64BIT:
7066 switch (pVCpu->iem.s.enmEffAddrMode)
7067 {
7068 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7069 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
7070 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
7071 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7072 }
7073 break;
7074 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7075 }
7076}
7077
7078#undef IEM_STOS_CASE
7079
7080/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
7081#define IEM_LODS_CASE(ValBits, AddrBits) \
7082 IEM_MC_BEGIN(0, 2); \
7083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7084 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7085 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7086 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
7087 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
7088 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
7089 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7090 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7091 } IEM_MC_ELSE() { \
7092 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7093 } IEM_MC_ENDIF(); \
7094 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7095 IEM_MC_END() \
7096
7097/**
7098 * @opcode 0xac
7099 */
7100FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
7101{
7102 /*
7103 * Use the C implementation if a repeat prefix is encountered.
7104 */
7105 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7106 {
7107 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
7108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7109 switch (pVCpu->iem.s.enmEffAddrMode)
7110 {
7111 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
7112 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
7113 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
7114 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7115 }
7116 }
7117
7118 /*
7119 * Sharing case implementation with stos[wdq] below.
7120 */
7121 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
7122 switch (pVCpu->iem.s.enmEffAddrMode)
7123 {
7124 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
7125 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
7126 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
7127 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7128 }
7129}
7130
7131
7132/**
7133 * @opcode 0xad
7134 */
7135FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
7136{
7137 /*
7138 * Use the C implementation if a repeat prefix is encountered.
7139 */
7140 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7141 {
7142 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
7143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7144 switch (pVCpu->iem.s.enmEffOpSize)
7145 {
7146 case IEMMODE_16BIT:
7147 switch (pVCpu->iem.s.enmEffAddrMode)
7148 {
7149 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
7150 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
7151 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
7152 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7153 }
7154 break;
7155 case IEMMODE_32BIT:
7156 switch (pVCpu->iem.s.enmEffAddrMode)
7157 {
7158 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
7159 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
7160 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
7161 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7162 }
7163 case IEMMODE_64BIT:
7164 switch (pVCpu->iem.s.enmEffAddrMode)
7165 {
7166 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
7167 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
7168 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
7169 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7170 }
7171 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7172 }
7173 }
7174
7175 /*
7176 * Annoying double switch here.
7177 * Using ugly macro for implementing the cases, sharing it with lodsb.
7178 */
7179 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
7180 switch (pVCpu->iem.s.enmEffOpSize)
7181 {
7182 case IEMMODE_16BIT:
7183 switch (pVCpu->iem.s.enmEffAddrMode)
7184 {
7185 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
7186 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
7187 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
7188 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7189 }
7190 break;
7191
7192 case IEMMODE_32BIT:
7193 switch (pVCpu->iem.s.enmEffAddrMode)
7194 {
7195 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
7196 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
7197 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
7198 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7199 }
7200 break;
7201
7202 case IEMMODE_64BIT:
7203 switch (pVCpu->iem.s.enmEffAddrMode)
7204 {
7205 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7206 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
7207 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
7208 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7209 }
7210 break;
7211 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7212 }
7213}
7214
7215#undef IEM_LODS_CASE
7216
7217/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
7218#define IEM_SCAS_CASE(ValBits, AddrBits) \
7219 IEM_MC_BEGIN(3, 2); \
7220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7221 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
7222 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
7223 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
7224 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7225 \
7226 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7227 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
7228 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
7229 IEM_MC_REF_EFLAGS(pEFlags); \
7230 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
7231 \
7232 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7233 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7234 } IEM_MC_ELSE() { \
7235 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7236 } IEM_MC_ENDIF(); \
7237 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7238 IEM_MC_END();
7239
7240/**
7241 * @opcode 0xae
7242 */
7243FNIEMOP_DEF(iemOp_scasb_AL_Xb)
7244{
7245 /*
7246 * Use the C implementation if a repeat prefix is encountered.
7247 */
7248 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7249 {
7250 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
7251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7252 switch (pVCpu->iem.s.enmEffAddrMode)
7253 {
7254 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_al_m16);
7255 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_al_m32);
7256 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_al_m64);
7257 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7258 }
7259 }
7260 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7261 {
7262 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
7263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7264 switch (pVCpu->iem.s.enmEffAddrMode)
7265 {
7266 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_al_m16);
7267 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_al_m32);
7268 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_al_m64);
7269 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7270 }
7271 }
7272
7273 /*
7274 * Sharing case implementation with stos[wdq] below.
7275 */
7276 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
7277 switch (pVCpu->iem.s.enmEffAddrMode)
7278 {
7279 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
7280 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
7281 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
7282 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7283 }
7284}
7285
7286
7287/**
7288 * @opcode 0xaf
7289 */
7290FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
7291{
7292 /*
7293 * Use the C implementation if a repeat prefix is encountered.
7294 */
7295 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7296 {
7297 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
7298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7299 switch (pVCpu->iem.s.enmEffOpSize)
7300 {
7301 case IEMMODE_16BIT:
7302 switch (pVCpu->iem.s.enmEffAddrMode)
7303 {
7304 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_ax_m16);
7305 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_ax_m32);
7306 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_ax_m64);
7307 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7308 }
7309 break;
7310 case IEMMODE_32BIT:
7311 switch (pVCpu->iem.s.enmEffAddrMode)
7312 {
7313 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_eax_m16);
7314 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_eax_m32);
7315 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_eax_m64);
7316 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7317 }
7318 case IEMMODE_64BIT:
7319 switch (pVCpu->iem.s.enmEffAddrMode)
7320 {
7321 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
7322 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_rax_m32);
7323 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_rax_m64);
7324 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7325 }
7326 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7327 }
7328 }
7329 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7330 {
7331 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
7332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7333 switch (pVCpu->iem.s.enmEffOpSize)
7334 {
7335 case IEMMODE_16BIT:
7336 switch (pVCpu->iem.s.enmEffAddrMode)
7337 {
7338 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_ax_m16);
7339 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_ax_m32);
7340 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_ax_m64);
7341 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7342 }
7343 break;
7344 case IEMMODE_32BIT:
7345 switch (pVCpu->iem.s.enmEffAddrMode)
7346 {
7347 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_eax_m16);
7348 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_eax_m32);
7349 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_eax_m64);
7350 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7351 }
7352 case IEMMODE_64BIT:
7353 switch (pVCpu->iem.s.enmEffAddrMode)
7354 {
7355 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
7356 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_rax_m32);
7357 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_rax_m64);
7358 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7359 }
7360 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7361 }
7362 }
7363
7364 /*
7365 * Annoying double switch here.
7366 * Using ugly macro for implementing the cases, sharing it with scasb.
7367 */
7368 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
7369 switch (pVCpu->iem.s.enmEffOpSize)
7370 {
7371 case IEMMODE_16BIT:
7372 switch (pVCpu->iem.s.enmEffAddrMode)
7373 {
7374 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
7375 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
7376 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
7377 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7378 }
7379 break;
7380
7381 case IEMMODE_32BIT:
7382 switch (pVCpu->iem.s.enmEffAddrMode)
7383 {
7384 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
7385 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
7386 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
7387 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7388 }
7389 break;
7390
7391 case IEMMODE_64BIT:
7392 switch (pVCpu->iem.s.enmEffAddrMode)
7393 {
7394 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7395 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
7396 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
7397 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7398 }
7399 break;
7400 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7401 }
7402}
7403
7404#undef IEM_SCAS_CASE
7405
7406/**
7407 * Common 'mov r8, imm8' helper.
7408 */
7409FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iFixedReg)
7410{
7411 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7412 IEM_MC_BEGIN(0, 1);
7413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7414 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
7415 IEM_MC_STORE_GREG_U8(iFixedReg, u8Value);
7416 IEM_MC_ADVANCE_RIP_AND_FINISH();
7417 IEM_MC_END();
7418}
7419
7420
7421/**
7422 * @opcode 0xb0
7423 */
7424FNIEMOP_DEF(iemOp_mov_AL_Ib)
7425{
7426 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
7427 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7428}
7429
7430
7431/**
7432 * @opcode 0xb1
7433 */
7434FNIEMOP_DEF(iemOp_CL_Ib)
7435{
7436 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
7437 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7438}
7439
7440
7441/**
7442 * @opcode 0xb2
7443 */
7444FNIEMOP_DEF(iemOp_DL_Ib)
7445{
7446 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
7447 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7448}
7449
7450
7451/**
7452 * @opcode 0xb3
7453 */
7454FNIEMOP_DEF(iemOp_BL_Ib)
7455{
7456 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
7457 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7458}
7459
7460
7461/**
7462 * @opcode 0xb4
7463 */
7464FNIEMOP_DEF(iemOp_mov_AH_Ib)
7465{
7466 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
7467 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7468}
7469
7470
7471/**
7472 * @opcode 0xb5
7473 */
7474FNIEMOP_DEF(iemOp_CH_Ib)
7475{
7476 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
7477 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7478}
7479
7480
7481/**
7482 * @opcode 0xb6
7483 */
7484FNIEMOP_DEF(iemOp_DH_Ib)
7485{
7486 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
7487 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7488}
7489
7490
7491/**
7492 * @opcode 0xb7
7493 */
7494FNIEMOP_DEF(iemOp_BH_Ib)
7495{
7496 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
7497 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7498}
7499
7500
7501/**
7502 * Common 'mov regX,immX' helper.
7503 */
7504FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iFixedReg)
7505{
7506 switch (pVCpu->iem.s.enmEffOpSize)
7507 {
7508 case IEMMODE_16BIT:
7509 {
7510 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7511 IEM_MC_BEGIN(0, 1);
7512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7513 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
7514 IEM_MC_STORE_GREG_U16(iFixedReg, u16Value);
7515 IEM_MC_ADVANCE_RIP_AND_FINISH();
7516 IEM_MC_END();
7517 break;
7518 }
7519
7520 case IEMMODE_32BIT:
7521 {
7522 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
7523 IEM_MC_BEGIN(0, 1);
7524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7525 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
7526 IEM_MC_STORE_GREG_U32(iFixedReg, u32Value);
7527 IEM_MC_ADVANCE_RIP_AND_FINISH();
7528 IEM_MC_END();
7529 break;
7530 }
7531 case IEMMODE_64BIT:
7532 {
7533 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
7534 IEM_MC_BEGIN(0, 1);
7535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7536 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
7537 IEM_MC_STORE_GREG_U64(iFixedReg, u64Value);
7538 IEM_MC_ADVANCE_RIP_AND_FINISH();
7539 IEM_MC_END();
7540 break;
7541 }
7542 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7543 }
7544}
7545
7546
7547/**
7548 * @opcode 0xb8
7549 */
7550FNIEMOP_DEF(iemOp_eAX_Iv)
7551{
7552 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
7553 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7554}
7555
7556
7557/**
7558 * @opcode 0xb9
7559 */
7560FNIEMOP_DEF(iemOp_eCX_Iv)
7561{
7562 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
7563 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7564}
7565
7566
7567/**
7568 * @opcode 0xba
7569 */
7570FNIEMOP_DEF(iemOp_eDX_Iv)
7571{
7572 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
7573 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7574}
7575
7576
7577/**
7578 * @opcode 0xbb
7579 */
7580FNIEMOP_DEF(iemOp_eBX_Iv)
7581{
7582 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
7583 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7584}
7585
7586
7587/**
7588 * @opcode 0xbc
7589 */
7590FNIEMOP_DEF(iemOp_eSP_Iv)
7591{
7592 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
7593 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7594}
7595
7596
7597/**
7598 * @opcode 0xbd
7599 */
7600FNIEMOP_DEF(iemOp_eBP_Iv)
7601{
7602 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
7603 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7604}
7605
7606
7607/**
7608 * @opcode 0xbe
7609 */
7610FNIEMOP_DEF(iemOp_eSI_Iv)
7611{
7612 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
7613 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7614}
7615
7616
7617/**
7618 * @opcode 0xbf
7619 */
7620FNIEMOP_DEF(iemOp_eDI_Iv)
7621{
7622 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
7623 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7624}
7625
7626
7627/**
7628 * @opcode 0xc0
7629 */
7630FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
7631{
7632 IEMOP_HLP_MIN_186();
7633 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7634 PCIEMOPSHIFTSIZES pImpl;
7635 switch (IEM_GET_MODRM_REG_8(bRm))
7636 {
7637 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
7638 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
7639 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
7640 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
7641 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
7642 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
7643 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
7644 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7645 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
7646 }
7647 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7648
7649 if (IEM_IS_MODRM_REG_MODE(bRm))
7650 {
7651 /* register */
7652 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7653 IEM_MC_BEGIN(3, 0);
7654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7655 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7656 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7657 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7658 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7659 IEM_MC_REF_EFLAGS(pEFlags);
7660 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7661 IEM_MC_ADVANCE_RIP_AND_FINISH();
7662 IEM_MC_END();
7663 }
7664 else
7665 {
7666 /* memory */
7667 IEM_MC_BEGIN(3, 3);
7668 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7669 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7670 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7671 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7672 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
7673
7674 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7675 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7676 IEM_MC_ASSIGN(cShiftArg, cShift);
7677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7678 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7679 IEM_MC_FETCH_EFLAGS(EFlags);
7680 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7681
7682 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
7683 IEM_MC_COMMIT_EFLAGS(EFlags);
7684 IEM_MC_ADVANCE_RIP_AND_FINISH();
7685 IEM_MC_END();
7686 }
7687}
7688
7689
7690/**
7691 * @opcode 0xc1
7692 */
7693FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
7694{
7695 IEMOP_HLP_MIN_186();
7696 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7697 PCIEMOPSHIFTSIZES pImpl;
7698 switch (IEM_GET_MODRM_REG_8(bRm))
7699 {
7700 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
7701 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
7702 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
7703 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
7704 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
7705 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
7706 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
7707 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7708 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
7709 }
7710 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7711
7712 if (IEM_IS_MODRM_REG_MODE(bRm))
7713 {
7714 /* register */
7715 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7716 switch (pVCpu->iem.s.enmEffOpSize)
7717 {
7718 case IEMMODE_16BIT:
7719 IEM_MC_BEGIN(3, 0);
7720 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7721 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7722 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7723 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7724 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7725 IEM_MC_REF_EFLAGS(pEFlags);
7726 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7727 IEM_MC_ADVANCE_RIP_AND_FINISH();
7728 IEM_MC_END();
7729 break;
7730
7731 case IEMMODE_32BIT:
7732 IEM_MC_BEGIN(3, 0);
7733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7734 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7735 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7736 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7737 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7738 IEM_MC_REF_EFLAGS(pEFlags);
7739 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7740 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7741 IEM_MC_ADVANCE_RIP_AND_FINISH();
7742 IEM_MC_END();
7743 break;
7744
7745 case IEMMODE_64BIT:
7746 IEM_MC_BEGIN(3, 0);
7747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7748 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7749 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7750 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7751 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7752 IEM_MC_REF_EFLAGS(pEFlags);
7753 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7754 IEM_MC_ADVANCE_RIP_AND_FINISH();
7755 IEM_MC_END();
7756 break;
7757
7758 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7759 }
7760 }
7761 else
7762 {
7763 /* memory */
7764 switch (pVCpu->iem.s.enmEffOpSize)
7765 {
7766 case IEMMODE_16BIT:
7767 IEM_MC_BEGIN(3, 3);
7768 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7769 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7770 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7771 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7772 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
7773
7774 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7775 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7776 IEM_MC_ASSIGN(cShiftArg, cShift);
7777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7778 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7779 IEM_MC_FETCH_EFLAGS(EFlags);
7780 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7781
7782 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
7783 IEM_MC_COMMIT_EFLAGS(EFlags);
7784 IEM_MC_ADVANCE_RIP_AND_FINISH();
7785 IEM_MC_END();
7786 break;
7787
7788 case IEMMODE_32BIT:
7789 IEM_MC_BEGIN(3, 3);
7790 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7791 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7792 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7793 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7794 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
7795
7796 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7797 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7798 IEM_MC_ASSIGN(cShiftArg, cShift);
7799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7800 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7801 IEM_MC_FETCH_EFLAGS(EFlags);
7802 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7803
7804 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
7805 IEM_MC_COMMIT_EFLAGS(EFlags);
7806 IEM_MC_ADVANCE_RIP_AND_FINISH();
7807 IEM_MC_END();
7808 break;
7809
7810 case IEMMODE_64BIT:
7811 IEM_MC_BEGIN(3, 3);
7812 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7813 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7814 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7815 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7816 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
7817
7818 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7819 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7820 IEM_MC_ASSIGN(cShiftArg, cShift);
7821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7822 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7823 IEM_MC_FETCH_EFLAGS(EFlags);
7824 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7825
7826 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
7827 IEM_MC_COMMIT_EFLAGS(EFlags);
7828 IEM_MC_ADVANCE_RIP_AND_FINISH();
7829 IEM_MC_END();
7830 break;
7831
7832 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7833 }
7834 }
7835}
7836
7837
7838/**
7839 * @opcode 0xc2
7840 */
7841FNIEMOP_DEF(iemOp_retn_Iw)
7842{
7843 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
7844 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7845 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7847 switch (pVCpu->iem.s.enmEffOpSize)
7848 {
7849 case IEMMODE_16BIT:
7850 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_iw_16, u16Imm);
7851 case IEMMODE_32BIT:
7852 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_iw_32, u16Imm);
7853 case IEMMODE_64BIT:
7854 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_iw_64, u16Imm);
7855 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7856 }
7857}
7858
7859
7860/**
7861 * @opcode 0xc3
7862 */
7863FNIEMOP_DEF(iemOp_retn)
7864{
7865 IEMOP_MNEMONIC(retn, "retn");
7866 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7868 switch (pVCpu->iem.s.enmEffOpSize)
7869 {
7870 case IEMMODE_16BIT:
7871 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_16);
7872 case IEMMODE_32BIT:
7873 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_32);
7874 case IEMMODE_64BIT:
7875 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_64);
7876 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7877 }
7878}
7879
7880
7881/**
7882 * @opcode 0xc4
7883 */
7884FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
7885{
7886 /* The LDS instruction is invalid 64-bit mode. In legacy and
7887 compatability mode it is invalid with MOD=3.
7888 The use as a VEX prefix is made possible by assigning the inverted
7889 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
7890 outside of 64-bit mode. VEX is not available in real or v86 mode. */
7891 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7892 if ( IEM_IS_64BIT_CODE(pVCpu)
7893 || IEM_IS_MODRM_REG_MODE(bRm) )
7894 {
7895 IEMOP_MNEMONIC(vex3_prefix, "vex3");
7896 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
7897 {
7898 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
7899 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
7900 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
7901 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
7902 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
7903 if ((bVex2 & 0x80 /* VEX.W */) && IEM_IS_64BIT_CODE(pVCpu))
7904 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
7905 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
7906 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
7907 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
7908 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
7909 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
7910 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
7911
7912 switch (bRm & 0x1f)
7913 {
7914 case 1: /* 0x0f lead opcode byte. */
7915#ifdef IEM_WITH_VEX
7916 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7917#else
7918 IEMOP_BITCH_ABOUT_STUB();
7919 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7920#endif
7921
7922 case 2: /* 0x0f 0x38 lead opcode bytes. */
7923#ifdef IEM_WITH_VEX
7924 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7925#else
7926 IEMOP_BITCH_ABOUT_STUB();
7927 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7928#endif
7929
7930 case 3: /* 0x0f 0x3a lead opcode bytes. */
7931#ifdef IEM_WITH_VEX
7932 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7933#else
7934 IEMOP_BITCH_ABOUT_STUB();
7935 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7936#endif
7937
7938 default:
7939 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
7940 IEMOP_RAISE_INVALID_OPCODE_RET();
7941 }
7942 }
7943 Log(("VEX3: VEX support disabled!\n"));
7944 IEMOP_RAISE_INVALID_OPCODE_RET();
7945 }
7946
7947 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
7948 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
7949}
7950
7951
7952/**
7953 * @opcode 0xc5
7954 */
7955FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
7956{
7957 /* The LES instruction is invalid 64-bit mode. In legacy and
7958 compatability mode it is invalid with MOD=3.
7959 The use as a VEX prefix is made possible by assigning the inverted
7960 REX.R to the top MOD bit, and the top bit in the inverted register
7961 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
7962 to accessing registers 0..7 in this VEX form. */
7963 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7964 if ( IEM_IS_64BIT_CODE(pVCpu)
7965 || IEM_IS_MODRM_REG_MODE(bRm))
7966 {
7967 IEMOP_MNEMONIC(vex2_prefix, "vex2");
7968 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
7969 {
7970 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
7971 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
7972 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
7973 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
7974 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
7975 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
7976 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
7977 pVCpu->iem.s.idxPrefix = bRm & 0x3;
7978
7979#ifdef IEM_WITH_VEX
7980 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7981#else
7982 IEMOP_BITCH_ABOUT_STUB();
7983 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7984#endif
7985 }
7986
7987 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
7988 Log(("VEX2: VEX support disabled!\n"));
7989 IEMOP_RAISE_INVALID_OPCODE_RET();
7990 }
7991
7992 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
7993 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
7994}
7995
7996
7997/**
7998 * @opcode 0xc6
7999 */
8000FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
8001{
8002 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8003 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
8004 IEMOP_RAISE_INVALID_OPCODE_RET();
8005 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
8006
8007 if (IEM_IS_MODRM_REG_MODE(bRm))
8008 {
8009 /* register access */
8010 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8011 IEM_MC_BEGIN(0, 0);
8012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8013 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Imm);
8014 IEM_MC_ADVANCE_RIP_AND_FINISH();
8015 IEM_MC_END();
8016 }
8017 else
8018 {
8019 /* memory access. */
8020 IEM_MC_BEGIN(0, 1);
8021 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8023 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8025 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
8026 IEM_MC_ADVANCE_RIP_AND_FINISH();
8027 IEM_MC_END();
8028 }
8029}
8030
8031
8032/**
8033 * @opcode 0xc7
8034 */
8035FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
8036{
8037 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8038 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
8039 IEMOP_RAISE_INVALID_OPCODE_RET();
8040 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
8041
8042 if (IEM_IS_MODRM_REG_MODE(bRm))
8043 {
8044 /* register access */
8045 switch (pVCpu->iem.s.enmEffOpSize)
8046 {
8047 case IEMMODE_16BIT:
8048 IEM_MC_BEGIN(0, 0);
8049 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8051 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Imm);
8052 IEM_MC_ADVANCE_RIP_AND_FINISH();
8053 IEM_MC_END();
8054 break;
8055
8056 case IEMMODE_32BIT:
8057 IEM_MC_BEGIN(0, 0);
8058 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8060 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Imm);
8061 IEM_MC_ADVANCE_RIP_AND_FINISH();
8062 IEM_MC_END();
8063 break;
8064
8065 case IEMMODE_64BIT:
8066 IEM_MC_BEGIN(0, 0);
8067 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8069 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Imm);
8070 IEM_MC_ADVANCE_RIP_AND_FINISH();
8071 IEM_MC_END();
8072 break;
8073
8074 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8075 }
8076 }
8077 else
8078 {
8079 /* memory access. */
8080 switch (pVCpu->iem.s.enmEffOpSize)
8081 {
8082 case IEMMODE_16BIT:
8083 IEM_MC_BEGIN(0, 1);
8084 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8085 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
8086 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8088 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
8089 IEM_MC_ADVANCE_RIP_AND_FINISH();
8090 IEM_MC_END();
8091 break;
8092
8093 case IEMMODE_32BIT:
8094 IEM_MC_BEGIN(0, 1);
8095 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8096 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8097 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8099 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
8100 IEM_MC_ADVANCE_RIP_AND_FINISH();
8101 IEM_MC_END();
8102 break;
8103
8104 case IEMMODE_64BIT:
8105 IEM_MC_BEGIN(0, 1);
8106 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8108 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8110 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
8111 IEM_MC_ADVANCE_RIP_AND_FINISH();
8112 IEM_MC_END();
8113 break;
8114
8115 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8116 }
8117 }
8118}
8119
8120
8121
8122
8123/**
8124 * @opcode 0xc8
8125 */
8126FNIEMOP_DEF(iemOp_enter_Iw_Ib)
8127{
8128 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
8129 IEMOP_HLP_MIN_186();
8130 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8131 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
8132 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
8133 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8134 IEM_MC_DEFER_TO_CIMPL_3_RET(0, iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
8135}
8136
8137
8138/**
8139 * @opcode 0xc9
8140 */
8141FNIEMOP_DEF(iemOp_leave)
8142{
8143 IEMOP_MNEMONIC(leave, "leave");
8144 IEMOP_HLP_MIN_186();
8145 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8147 IEM_MC_DEFER_TO_CIMPL_1_RET(0, iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
8148}
8149
8150
8151/**
8152 * @opcode 0xca
8153 */
8154FNIEMOP_DEF(iemOp_retf_Iw)
8155{
8156 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
8157 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8159 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_MODE,
8160 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
8161}
8162
8163
8164/**
8165 * @opcode 0xcb
8166 */
8167FNIEMOP_DEF(iemOp_retf)
8168{
8169 IEMOP_MNEMONIC(retf, "retf");
8170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8171 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_MODE,
8172 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
8173}
8174
8175
8176/**
8177 * @opcode 0xcc
8178 */
8179FNIEMOP_DEF(iemOp_int3)
8180{
8181 IEMOP_MNEMONIC(int3, "int3");
8182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8183 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
8184 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
8185 iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
8186}
8187
8188
8189/**
8190 * @opcode 0xcd
8191 */
8192FNIEMOP_DEF(iemOp_int_Ib)
8193{
8194 IEMOP_MNEMONIC(int_Ib, "int Ib");
8195 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
8196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8197 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
8198 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
8199 iemCImpl_int, u8Int, IEMINT_INTN);
8200}
8201
8202
8203/**
8204 * @opcode 0xce
8205 */
8206FNIEMOP_DEF(iemOp_into)
8207{
8208 IEMOP_MNEMONIC(into, "into");
8209 IEMOP_HLP_NO_64BIT();
8210 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_CONDITIONAL
8211 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
8212 iemCImpl_int, X86_XCPT_OF, IEMINT_INTO);
8213}
8214
8215
8216/**
8217 * @opcode 0xcf
8218 */
8219FNIEMOP_DEF(iemOp_iret)
8220{
8221 IEMOP_MNEMONIC(iret, "iret");
8222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8223 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
8224 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE | IEM_CIMPL_F_VMEXIT,
8225 iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
8226}
8227
8228
8229/**
8230 * @opcode 0xd0
8231 */
8232FNIEMOP_DEF(iemOp_Grp2_Eb_1)
8233{
8234 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8235 PCIEMOPSHIFTSIZES pImpl;
8236 switch (IEM_GET_MODRM_REG_8(bRm))
8237 {
8238 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
8239 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
8240 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
8241 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
8242 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
8243 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
8244 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
8245 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8246 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
8247 }
8248 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8249
8250 if (IEM_IS_MODRM_REG_MODE(bRm))
8251 {
8252 /* register */
8253 IEM_MC_BEGIN(3, 0);
8254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8255 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8256 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
8257 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8258 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8259 IEM_MC_REF_EFLAGS(pEFlags);
8260 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8261 IEM_MC_ADVANCE_RIP_AND_FINISH();
8262 IEM_MC_END();
8263 }
8264 else
8265 {
8266 /* memory */
8267 IEM_MC_BEGIN(3, 3);
8268 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8269 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
8270 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8271 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8272 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8273
8274 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8276 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8277 IEM_MC_FETCH_EFLAGS(EFlags);
8278 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8279
8280 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
8281 IEM_MC_COMMIT_EFLAGS(EFlags);
8282 IEM_MC_ADVANCE_RIP_AND_FINISH();
8283 IEM_MC_END();
8284 }
8285}
8286
8287
8288
8289/**
8290 * @opcode 0xd1
8291 */
8292FNIEMOP_DEF(iemOp_Grp2_Ev_1)
8293{
8294 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8295 PCIEMOPSHIFTSIZES pImpl;
8296 switch (IEM_GET_MODRM_REG_8(bRm))
8297 {
8298 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
8299 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
8300 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
8301 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
8302 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
8303 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
8304 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
8305 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8306 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
8307 }
8308 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8309
8310 if (IEM_IS_MODRM_REG_MODE(bRm))
8311 {
8312 /* register */
8313 switch (pVCpu->iem.s.enmEffOpSize)
8314 {
8315 case IEMMODE_16BIT:
8316 IEM_MC_BEGIN(3, 0);
8317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8318 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8319 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8320 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8321 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8322 IEM_MC_REF_EFLAGS(pEFlags);
8323 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8324 IEM_MC_ADVANCE_RIP_AND_FINISH();
8325 IEM_MC_END();
8326 break;
8327
8328 case IEMMODE_32BIT:
8329 IEM_MC_BEGIN(3, 0);
8330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8331 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8332 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8333 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8334 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8335 IEM_MC_REF_EFLAGS(pEFlags);
8336 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8337 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8338 IEM_MC_ADVANCE_RIP_AND_FINISH();
8339 IEM_MC_END();
8340 break;
8341
8342 case IEMMODE_64BIT:
8343 IEM_MC_BEGIN(3, 0);
8344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8345 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8346 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8347 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8348 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8349 IEM_MC_REF_EFLAGS(pEFlags);
8350 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8351 IEM_MC_ADVANCE_RIP_AND_FINISH();
8352 IEM_MC_END();
8353 break;
8354
8355 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8356 }
8357 }
8358 else
8359 {
8360 /* memory */
8361 switch (pVCpu->iem.s.enmEffOpSize)
8362 {
8363 case IEMMODE_16BIT:
8364 IEM_MC_BEGIN(3, 3);
8365 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8366 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8367 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8368 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8369 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8370
8371 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8373 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8374 IEM_MC_FETCH_EFLAGS(EFlags);
8375 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8376
8377 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
8378 IEM_MC_COMMIT_EFLAGS(EFlags);
8379 IEM_MC_ADVANCE_RIP_AND_FINISH();
8380 IEM_MC_END();
8381 break;
8382
8383 case IEMMODE_32BIT:
8384 IEM_MC_BEGIN(3, 3);
8385 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8386 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8387 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8388 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8389 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8390
8391 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8393 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8394 IEM_MC_FETCH_EFLAGS(EFlags);
8395 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8396
8397 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
8398 IEM_MC_COMMIT_EFLAGS(EFlags);
8399 IEM_MC_ADVANCE_RIP_AND_FINISH();
8400 IEM_MC_END();
8401 break;
8402
8403 case IEMMODE_64BIT:
8404 IEM_MC_BEGIN(3, 3);
8405 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8406 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8407 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8408 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8409 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8410
8411 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8413 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8414 IEM_MC_FETCH_EFLAGS(EFlags);
8415 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8416
8417 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
8418 IEM_MC_COMMIT_EFLAGS(EFlags);
8419 IEM_MC_ADVANCE_RIP_AND_FINISH();
8420 IEM_MC_END();
8421 break;
8422
8423 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8424 }
8425 }
8426}
8427
8428
8429/**
8430 * @opcode 0xd2
8431 */
8432FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
8433{
8434 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8435 PCIEMOPSHIFTSIZES pImpl;
8436 switch (IEM_GET_MODRM_REG_8(bRm))
8437 {
8438 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
8439 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
8440 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
8441 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
8442 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
8443 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
8444 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
8445 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8446 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
8447 }
8448 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8449
8450 if (IEM_IS_MODRM_REG_MODE(bRm))
8451 {
8452 /* register */
8453 IEM_MC_BEGIN(3, 0);
8454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8455 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8456 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8457 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8458 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8459 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8460 IEM_MC_REF_EFLAGS(pEFlags);
8461 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8462 IEM_MC_ADVANCE_RIP_AND_FINISH();
8463 IEM_MC_END();
8464 }
8465 else
8466 {
8467 /* memory */
8468 IEM_MC_BEGIN(3, 3);
8469 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8470 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8471 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8472 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8473 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8474
8475 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8477 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8478 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8479 IEM_MC_FETCH_EFLAGS(EFlags);
8480 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8481
8482 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
8483 IEM_MC_COMMIT_EFLAGS(EFlags);
8484 IEM_MC_ADVANCE_RIP_AND_FINISH();
8485 IEM_MC_END();
8486 }
8487}
8488
8489
8490/**
8491 * @opcode 0xd3
8492 */
8493FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
8494{
8495 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8496 PCIEMOPSHIFTSIZES pImpl;
8497 switch (IEM_GET_MODRM_REG_8(bRm))
8498 {
8499 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
8500 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
8501 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
8502 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
8503 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
8504 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
8505 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
8506 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8507 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8508 }
8509 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8510
8511 if (IEM_IS_MODRM_REG_MODE(bRm))
8512 {
8513 /* register */
8514 switch (pVCpu->iem.s.enmEffOpSize)
8515 {
8516 case IEMMODE_16BIT:
8517 IEM_MC_BEGIN(3, 0);
8518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8519 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8520 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8521 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8522 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8523 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8524 IEM_MC_REF_EFLAGS(pEFlags);
8525 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8526 IEM_MC_ADVANCE_RIP_AND_FINISH();
8527 IEM_MC_END();
8528 break;
8529
8530 case IEMMODE_32BIT:
8531 IEM_MC_BEGIN(3, 0);
8532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8533 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8534 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8535 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8536 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8537 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8538 IEM_MC_REF_EFLAGS(pEFlags);
8539 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8540 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8541 IEM_MC_ADVANCE_RIP_AND_FINISH();
8542 IEM_MC_END();
8543 break;
8544
8545 case IEMMODE_64BIT:
8546 IEM_MC_BEGIN(3, 0);
8547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8548 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8549 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8550 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8551 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8552 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8553 IEM_MC_REF_EFLAGS(pEFlags);
8554 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8555 IEM_MC_ADVANCE_RIP_AND_FINISH();
8556 IEM_MC_END();
8557 break;
8558
8559 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8560 }
8561 }
8562 else
8563 {
8564 /* memory */
8565 switch (pVCpu->iem.s.enmEffOpSize)
8566 {
8567 case IEMMODE_16BIT:
8568 IEM_MC_BEGIN(3, 3);
8569 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8570 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8571 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8572 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8573 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8574
8575 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8577 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8578 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8579 IEM_MC_FETCH_EFLAGS(EFlags);
8580 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8581
8582 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
8583 IEM_MC_COMMIT_EFLAGS(EFlags);
8584 IEM_MC_ADVANCE_RIP_AND_FINISH();
8585 IEM_MC_END();
8586 break;
8587
8588 case IEMMODE_32BIT:
8589 IEM_MC_BEGIN(3, 3);
8590 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8591 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8592 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8593 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8594 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8595
8596 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8598 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8599 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8600 IEM_MC_FETCH_EFLAGS(EFlags);
8601 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8602
8603 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
8604 IEM_MC_COMMIT_EFLAGS(EFlags);
8605 IEM_MC_ADVANCE_RIP_AND_FINISH();
8606 IEM_MC_END();
8607 break;
8608
8609 case IEMMODE_64BIT:
8610 IEM_MC_BEGIN(3, 3);
8611 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8612 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8613 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8614 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8615 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8616
8617 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8619 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8620 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8621 IEM_MC_FETCH_EFLAGS(EFlags);
8622 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8623
8624 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
8625 IEM_MC_COMMIT_EFLAGS(EFlags);
8626 IEM_MC_ADVANCE_RIP_AND_FINISH();
8627 IEM_MC_END();
8628 break;
8629
8630 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8631 }
8632 }
8633}
8634
8635/**
8636 * @opcode 0xd4
8637 */
8638FNIEMOP_DEF(iemOp_aam_Ib)
8639{
8640 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
8641 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
8642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8643 IEMOP_HLP_NO_64BIT();
8644 if (!bImm)
8645 IEMOP_RAISE_DIVIDE_ERROR_RET();
8646 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aam, bImm);
8647}
8648
8649
8650/**
8651 * @opcode 0xd5
8652 */
8653FNIEMOP_DEF(iemOp_aad_Ib)
8654{
8655 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
8656 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
8657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8658 IEMOP_HLP_NO_64BIT();
8659 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aad, bImm);
8660}
8661
8662
8663/**
8664 * @opcode 0xd6
8665 */
8666FNIEMOP_DEF(iemOp_salc)
8667{
8668 IEMOP_MNEMONIC(salc, "salc");
8669 IEMOP_HLP_NO_64BIT();
8670
8671 IEM_MC_BEGIN(0, 0);
8672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8673 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8674 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
8675 } IEM_MC_ELSE() {
8676 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
8677 } IEM_MC_ENDIF();
8678 IEM_MC_ADVANCE_RIP_AND_FINISH();
8679 IEM_MC_END();
8680}
8681
8682
8683/**
8684 * @opcode 0xd7
8685 */
8686FNIEMOP_DEF(iemOp_xlat)
8687{
8688 IEMOP_MNEMONIC(xlat, "xlat");
8689 switch (pVCpu->iem.s.enmEffAddrMode)
8690 {
8691 case IEMMODE_16BIT:
8692 IEM_MC_BEGIN(2, 0);
8693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8694 IEM_MC_LOCAL(uint8_t, u8Tmp);
8695 IEM_MC_LOCAL(uint16_t, u16Addr);
8696 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
8697 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
8698 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
8699 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
8700 IEM_MC_ADVANCE_RIP_AND_FINISH();
8701 IEM_MC_END();
8702 break;
8703
8704 case IEMMODE_32BIT:
8705 IEM_MC_BEGIN(2, 0);
8706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8707 IEM_MC_LOCAL(uint8_t, u8Tmp);
8708 IEM_MC_LOCAL(uint32_t, u32Addr);
8709 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
8710 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
8711 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
8712 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
8713 IEM_MC_ADVANCE_RIP_AND_FINISH();
8714 IEM_MC_END();
8715 break;
8716
8717 case IEMMODE_64BIT:
8718 IEM_MC_BEGIN(2, 0);
8719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8720 IEM_MC_LOCAL(uint8_t, u8Tmp);
8721 IEM_MC_LOCAL(uint64_t, u64Addr);
8722 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
8723 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
8724 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
8725 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
8726 IEM_MC_ADVANCE_RIP_AND_FINISH();
8727 IEM_MC_END();
8728 break;
8729
8730 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8731 }
8732}
8733
8734
8735/**
8736 * Common worker for FPU instructions working on ST0 and STn, and storing the
8737 * result in ST0.
8738 *
8739 * @param bRm Mod R/M byte.
8740 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8741 */
8742FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
8743{
8744 IEM_MC_BEGIN(3, 1);
8745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8746 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8747 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8748 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8749 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8750
8751 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8752 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8753 IEM_MC_PREPARE_FPU_USAGE();
8754 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8755 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
8756 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
8757 } IEM_MC_ELSE() {
8758 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
8759 } IEM_MC_ENDIF();
8760 IEM_MC_ADVANCE_RIP_AND_FINISH();
8761
8762 IEM_MC_END();
8763}
8764
8765
8766/**
8767 * Common worker for FPU instructions working on ST0 and STn, and only affecting
8768 * flags.
8769 *
8770 * @param bRm Mod R/M byte.
8771 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8772 */
8773FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8774{
8775 IEM_MC_BEGIN(3, 1);
8776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8777 IEM_MC_LOCAL(uint16_t, u16Fsw);
8778 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8779 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8780 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8781
8782 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8783 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8784 IEM_MC_PREPARE_FPU_USAGE();
8785 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8786 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8787 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
8788 } IEM_MC_ELSE() {
8789 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
8790 } IEM_MC_ENDIF();
8791 IEM_MC_ADVANCE_RIP_AND_FINISH();
8792
8793 IEM_MC_END();
8794}
8795
8796
8797/**
8798 * Common worker for FPU instructions working on ST0 and STn, only affecting
8799 * flags, and popping when done.
8800 *
8801 * @param bRm Mod R/M byte.
8802 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8803 */
8804FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8805{
8806 IEM_MC_BEGIN(3, 1);
8807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8808 IEM_MC_LOCAL(uint16_t, u16Fsw);
8809 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8810 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8811 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8812
8813 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8814 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8815 IEM_MC_PREPARE_FPU_USAGE();
8816 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8817 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8818 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
8819 } IEM_MC_ELSE() {
8820 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
8821 } IEM_MC_ENDIF();
8822 IEM_MC_ADVANCE_RIP_AND_FINISH();
8823
8824 IEM_MC_END();
8825}
8826
8827
8828/** Opcode 0xd8 11/0. */
8829FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
8830{
8831 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
8832 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
8833}
8834
8835
8836/** Opcode 0xd8 11/1. */
8837FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
8838{
8839 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
8840 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
8841}
8842
8843
8844/** Opcode 0xd8 11/2. */
8845FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
8846{
8847 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
8848 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
8849}
8850
8851
8852/** Opcode 0xd8 11/3. */
8853FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
8854{
8855 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
8856 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
8857}
8858
8859
8860/** Opcode 0xd8 11/4. */
8861FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
8862{
8863 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
8864 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
8865}
8866
8867
8868/** Opcode 0xd8 11/5. */
8869FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
8870{
8871 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
8872 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
8873}
8874
8875
8876/** Opcode 0xd8 11/6. */
8877FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
8878{
8879 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
8880 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
8881}
8882
8883
8884/** Opcode 0xd8 11/7. */
8885FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
8886{
8887 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
8888 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
8889}
8890
8891
8892/**
8893 * Common worker for FPU instructions working on ST0 and an m32r, and storing
8894 * the result in ST0.
8895 *
8896 * @param bRm Mod R/M byte.
8897 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8898 */
8899FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
8900{
8901 IEM_MC_BEGIN(3, 3);
8902 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8903 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8904 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8905 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8906 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8907 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8908
8909 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8911
8912 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8913 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8914 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8915
8916 IEM_MC_PREPARE_FPU_USAGE();
8917 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8918 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
8919 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
8920 } IEM_MC_ELSE() {
8921 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
8922 } IEM_MC_ENDIF();
8923 IEM_MC_ADVANCE_RIP_AND_FINISH();
8924
8925 IEM_MC_END();
8926}
8927
8928
8929/** Opcode 0xd8 !11/0. */
8930FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
8931{
8932 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
8933 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
8934}
8935
8936
8937/** Opcode 0xd8 !11/1. */
8938FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
8939{
8940 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
8941 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
8942}
8943
8944
8945/** Opcode 0xd8 !11/2. */
8946FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
8947{
8948 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
8949
8950 IEM_MC_BEGIN(3, 3);
8951 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8952 IEM_MC_LOCAL(uint16_t, u16Fsw);
8953 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8954 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8955 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8956 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8957
8958 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8960
8961 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8962 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8963 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8964
8965 IEM_MC_PREPARE_FPU_USAGE();
8966 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8967 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
8968 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8969 } IEM_MC_ELSE() {
8970 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8971 } IEM_MC_ENDIF();
8972 IEM_MC_ADVANCE_RIP_AND_FINISH();
8973
8974 IEM_MC_END();
8975}
8976
8977
8978/** Opcode 0xd8 !11/3. */
8979FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
8980{
8981 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
8982
8983 IEM_MC_BEGIN(3, 3);
8984 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8985 IEM_MC_LOCAL(uint16_t, u16Fsw);
8986 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8987 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8988 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8989 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8990
8991 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8993
8994 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8995 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8996 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8997
8998 IEM_MC_PREPARE_FPU_USAGE();
8999 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9000 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
9001 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9002 } IEM_MC_ELSE() {
9003 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9004 } IEM_MC_ENDIF();
9005 IEM_MC_ADVANCE_RIP_AND_FINISH();
9006
9007 IEM_MC_END();
9008}
9009
9010
9011/** Opcode 0xd8 !11/4. */
9012FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
9013{
9014 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
9015 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
9016}
9017
9018
9019/** Opcode 0xd8 !11/5. */
9020FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
9021{
9022 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
9023 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
9024}
9025
9026
9027/** Opcode 0xd8 !11/6. */
9028FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
9029{
9030 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
9031 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
9032}
9033
9034
9035/** Opcode 0xd8 !11/7. */
9036FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
9037{
9038 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
9039 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
9040}
9041
9042
9043/**
9044 * @opcode 0xd8
9045 */
9046FNIEMOP_DEF(iemOp_EscF0)
9047{
9048 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9049 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
9050
9051 if (IEM_IS_MODRM_REG_MODE(bRm))
9052 {
9053 switch (IEM_GET_MODRM_REG_8(bRm))
9054 {
9055 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
9056 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
9057 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
9058 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
9059 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
9060 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
9061 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
9062 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
9063 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9064 }
9065 }
9066 else
9067 {
9068 switch (IEM_GET_MODRM_REG_8(bRm))
9069 {
9070 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
9071 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
9072 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
9073 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
9074 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
9075 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
9076 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
9077 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
9078 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9079 }
9080 }
9081}
9082
9083
9084/** Opcode 0xd9 /0 mem32real
9085 * @sa iemOp_fld_m64r */
9086FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
9087{
9088 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
9089
9090 IEM_MC_BEGIN(2, 3);
9091 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9092 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9093 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
9094 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9095 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
9096
9097 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9099
9100 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9101 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9102 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9103 IEM_MC_PREPARE_FPU_USAGE();
9104 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
9105 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r32, pFpuRes, pr32Val);
9106 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9107 } IEM_MC_ELSE() {
9108 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9109 } IEM_MC_ENDIF();
9110 IEM_MC_ADVANCE_RIP_AND_FINISH();
9111
9112 IEM_MC_END();
9113}
9114
9115
9116/** Opcode 0xd9 !11/2 mem32real */
9117FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
9118{
9119 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
9120 IEM_MC_BEGIN(3, 2);
9121 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9122 IEM_MC_LOCAL(uint16_t, u16Fsw);
9123 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9124 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
9125 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9126
9127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9129 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9130 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9131
9132 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9133 IEM_MC_PREPARE_FPU_USAGE();
9134 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9135 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
9136 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9137 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9138 } IEM_MC_ELSE() {
9139 IEM_MC_IF_FCW_IM() {
9140 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
9141 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
9142 } IEM_MC_ENDIF();
9143 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9144 } IEM_MC_ENDIF();
9145 IEM_MC_ADVANCE_RIP_AND_FINISH();
9146
9147 IEM_MC_END();
9148}
9149
9150
9151/** Opcode 0xd9 !11/3 */
9152FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
9153{
9154 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
9155 IEM_MC_BEGIN(3, 2);
9156 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9157 IEM_MC_LOCAL(uint16_t, u16Fsw);
9158 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9159 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
9160 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9161
9162 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9164 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9165 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9166
9167 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9168 IEM_MC_PREPARE_FPU_USAGE();
9169 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9170 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
9171 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9172 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9173 } IEM_MC_ELSE() {
9174 IEM_MC_IF_FCW_IM() {
9175 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
9176 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
9177 } IEM_MC_ENDIF();
9178 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9179 } IEM_MC_ENDIF();
9180 IEM_MC_ADVANCE_RIP_AND_FINISH();
9181
9182 IEM_MC_END();
9183}
9184
9185
9186/** Opcode 0xd9 !11/4 */
9187FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
9188{
9189 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
9190 IEM_MC_BEGIN(3, 0);
9191 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9192 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9193 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
9194 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9196 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9197 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9198 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9199 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
9200 IEM_MC_END();
9201}
9202
9203
9204/** Opcode 0xd9 !11/5 */
9205FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
9206{
9207 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
9208 IEM_MC_BEGIN(1, 1);
9209 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9210 IEM_MC_ARG(uint16_t, u16Fsw, 0);
9211 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9213 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9214 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9215 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9216 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_FPU, iemCImpl_fldcw, u16Fsw);
9217 IEM_MC_END();
9218}
9219
9220
9221/** Opcode 0xd9 !11/6 */
9222FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
9223{
9224 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
9225 IEM_MC_BEGIN(3, 0);
9226 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9227 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9228 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
9229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9231 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9232 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9233 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9234 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
9235 IEM_MC_END();
9236}
9237
9238
9239/** Opcode 0xd9 !11/7 */
9240FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
9241{
9242 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
9243 IEM_MC_BEGIN(2, 0);
9244 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9245 IEM_MC_LOCAL(uint16_t, u16Fcw);
9246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9248 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9249 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9250 IEM_MC_FETCH_FCW(u16Fcw);
9251 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
9252 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
9253 IEM_MC_END();
9254}
9255
9256
9257/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
9258FNIEMOP_DEF(iemOp_fnop)
9259{
9260 IEMOP_MNEMONIC(fnop, "fnop");
9261 IEM_MC_BEGIN(0, 0);
9262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9263 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9264 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9265 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9266 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
9267 * intel optimizations. Investigate. */
9268 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9269 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
9270 IEM_MC_END();
9271}
9272
9273
9274/** Opcode 0xd9 11/0 stN */
9275FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
9276{
9277 IEMOP_MNEMONIC(fld_stN, "fld stN");
9278 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
9279 * indicates that it does. */
9280 IEM_MC_BEGIN(0, 2);
9281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9282 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9283 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9284 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9285 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9286
9287 IEM_MC_PREPARE_FPU_USAGE();
9288 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, IEM_GET_MODRM_RM_8(bRm)) {
9289 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9290 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
9291 } IEM_MC_ELSE() {
9292 IEM_MC_FPU_STACK_PUSH_UNDERFLOW(pVCpu->iem.s.uFpuOpcode);
9293 } IEM_MC_ENDIF();
9294
9295 IEM_MC_ADVANCE_RIP_AND_FINISH();
9296 IEM_MC_END();
9297}
9298
9299
9300/** Opcode 0xd9 11/3 stN */
9301FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
9302{
9303 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
9304 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
9305 * indicates that it does. */
9306 IEM_MC_BEGIN(2, 3);
9307 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9308 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
9309 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
9310 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9311 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ IEM_GET_MODRM_RM_8(bRm), 0);
9312 IEM_MC_ARG_CONST(uint16_t, uFpuOpcode, /*=*/ pVCpu->iem.s.uFpuOpcode, 1);
9313 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9314 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9315
9316 IEM_MC_PREPARE_FPU_USAGE();
9317 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9318 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
9319 IEM_MC_STORE_FPUREG_R80_SRC_REF(IEM_GET_MODRM_RM_8(bRm), pr80Value1);
9320 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9321 } IEM_MC_ELSE() {
9322 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, iemCImpl_fxch_underflow, iStReg, uFpuOpcode);
9323 } IEM_MC_ENDIF();
9324
9325 IEM_MC_ADVANCE_RIP_AND_FINISH();
9326 IEM_MC_END();
9327}
9328
9329
9330/** Opcode 0xd9 11/4, 0xdd 11/2. */
9331FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
9332{
9333 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
9334
9335 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
9336 uint8_t const iDstReg = IEM_GET_MODRM_RM_8(bRm);
9337 if (!iDstReg)
9338 {
9339 IEM_MC_BEGIN(0, 1);
9340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9341 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
9342 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9343 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9344
9345 IEM_MC_PREPARE_FPU_USAGE();
9346 IEM_MC_IF_FPUREG_NOT_EMPTY(0) {
9347 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9348 } IEM_MC_ELSE() {
9349 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0, pVCpu->iem.s.uFpuOpcode);
9350 } IEM_MC_ENDIF();
9351
9352 IEM_MC_ADVANCE_RIP_AND_FINISH();
9353 IEM_MC_END();
9354 }
9355 else
9356 {
9357 IEM_MC_BEGIN(0, 2);
9358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9359 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9360 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9361 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9362 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9363
9364 IEM_MC_PREPARE_FPU_USAGE();
9365 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9366 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9367 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg, pVCpu->iem.s.uFpuOpcode);
9368 } IEM_MC_ELSE() {
9369 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg, pVCpu->iem.s.uFpuOpcode);
9370 } IEM_MC_ENDIF();
9371
9372 IEM_MC_ADVANCE_RIP_AND_FINISH();
9373 IEM_MC_END();
9374 }
9375}
9376
9377
9378/**
9379 * Common worker for FPU instructions working on ST0 and replaces it with the
9380 * result, i.e. unary operators.
9381 *
9382 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9383 */
9384FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
9385{
9386 IEM_MC_BEGIN(2, 1);
9387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9388 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9389 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9390 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9391
9392 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9393 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9394 IEM_MC_PREPARE_FPU_USAGE();
9395 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9396 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
9397 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9398 } IEM_MC_ELSE() {
9399 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9400 } IEM_MC_ENDIF();
9401 IEM_MC_ADVANCE_RIP_AND_FINISH();
9402
9403 IEM_MC_END();
9404}
9405
9406
9407/** Opcode 0xd9 0xe0. */
9408FNIEMOP_DEF(iemOp_fchs)
9409{
9410 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
9411 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
9412}
9413
9414
9415/** Opcode 0xd9 0xe1. */
9416FNIEMOP_DEF(iemOp_fabs)
9417{
9418 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
9419 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
9420}
9421
9422
9423/** Opcode 0xd9 0xe4. */
9424FNIEMOP_DEF(iemOp_ftst)
9425{
9426 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
9427 IEM_MC_BEGIN(2, 1);
9428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9429 IEM_MC_LOCAL(uint16_t, u16Fsw);
9430 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9431 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9432
9433 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9434 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9435 IEM_MC_PREPARE_FPU_USAGE();
9436 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9437 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_ftst_r80, pu16Fsw, pr80Value);
9438 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9439 } IEM_MC_ELSE() {
9440 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
9441 } IEM_MC_ENDIF();
9442 IEM_MC_ADVANCE_RIP_AND_FINISH();
9443
9444 IEM_MC_END();
9445}
9446
9447
9448/** Opcode 0xd9 0xe5. */
9449FNIEMOP_DEF(iemOp_fxam)
9450{
9451 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
9452 IEM_MC_BEGIN(2, 1);
9453 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9454 IEM_MC_LOCAL(uint16_t, u16Fsw);
9455 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9456 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9457
9458 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9459 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9460 IEM_MC_PREPARE_FPU_USAGE();
9461 IEM_MC_REF_FPUREG(pr80Value, 0);
9462 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fxam_r80, pu16Fsw, pr80Value);
9463 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9464 IEM_MC_ADVANCE_RIP_AND_FINISH();
9465
9466 IEM_MC_END();
9467}
9468
9469
9470/**
9471 * Common worker for FPU instructions pushing a constant onto the FPU stack.
9472 *
9473 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9474 */
9475FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
9476{
9477 IEM_MC_BEGIN(1, 1);
9478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9479 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9480 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9481
9482 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9483 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9484 IEM_MC_PREPARE_FPU_USAGE();
9485 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
9486 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
9487 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
9488 } IEM_MC_ELSE() {
9489 IEM_MC_FPU_STACK_PUSH_OVERFLOW(pVCpu->iem.s.uFpuOpcode);
9490 } IEM_MC_ENDIF();
9491 IEM_MC_ADVANCE_RIP_AND_FINISH();
9492
9493 IEM_MC_END();
9494}
9495
9496
9497/** Opcode 0xd9 0xe8. */
9498FNIEMOP_DEF(iemOp_fld1)
9499{
9500 IEMOP_MNEMONIC(fld1, "fld1");
9501 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
9502}
9503
9504
9505/** Opcode 0xd9 0xe9. */
9506FNIEMOP_DEF(iemOp_fldl2t)
9507{
9508 IEMOP_MNEMONIC(fldl2t, "fldl2t");
9509 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
9510}
9511
9512
9513/** Opcode 0xd9 0xea. */
9514FNIEMOP_DEF(iemOp_fldl2e)
9515{
9516 IEMOP_MNEMONIC(fldl2e, "fldl2e");
9517 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
9518}
9519
9520/** Opcode 0xd9 0xeb. */
9521FNIEMOP_DEF(iemOp_fldpi)
9522{
9523 IEMOP_MNEMONIC(fldpi, "fldpi");
9524 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
9525}
9526
9527
9528/** Opcode 0xd9 0xec. */
9529FNIEMOP_DEF(iemOp_fldlg2)
9530{
9531 IEMOP_MNEMONIC(fldlg2, "fldlg2");
9532 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
9533}
9534
9535/** Opcode 0xd9 0xed. */
9536FNIEMOP_DEF(iemOp_fldln2)
9537{
9538 IEMOP_MNEMONIC(fldln2, "fldln2");
9539 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
9540}
9541
9542
9543/** Opcode 0xd9 0xee. */
9544FNIEMOP_DEF(iemOp_fldz)
9545{
9546 IEMOP_MNEMONIC(fldz, "fldz");
9547 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
9548}
9549
9550
9551/** Opcode 0xd9 0xf0.
9552 *
9553 * The f2xm1 instruction works on values +1.0 thru -1.0, currently (the range on
9554 * 287 & 8087 was +0.5 thru 0.0 according to docs). In addition is does appear
9555 * to produce proper results for +Inf and -Inf.
9556 *
9557 * This is probably usful in the implementation pow() and similar.
9558 */
9559FNIEMOP_DEF(iemOp_f2xm1)
9560{
9561 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
9562 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
9563}
9564
9565
9566/**
9567 * Common worker for FPU instructions working on STn and ST0, storing the result
9568 * in STn, and popping the stack unless IE, DE or ZE was raised.
9569 *
9570 * @param bRm Mod R/M byte.
9571 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9572 */
9573FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
9574{
9575 IEM_MC_BEGIN(3, 1);
9576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9577 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9578 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9579 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9580 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9581
9582 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9583 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9584
9585 IEM_MC_PREPARE_FPU_USAGE();
9586 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
9587 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
9588 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
9589 } IEM_MC_ELSE() {
9590 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
9591 } IEM_MC_ENDIF();
9592 IEM_MC_ADVANCE_RIP_AND_FINISH();
9593
9594 IEM_MC_END();
9595}
9596
9597
9598/** Opcode 0xd9 0xf1. */
9599FNIEMOP_DEF(iemOp_fyl2x)
9600{
9601 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
9602 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
9603}
9604
9605
9606/**
9607 * Common worker for FPU instructions working on ST0 and having two outputs, one
9608 * replacing ST0 and one pushed onto the stack.
9609 *
9610 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9611 */
9612FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
9613{
9614 IEM_MC_BEGIN(2, 1);
9615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9616 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
9617 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
9618 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9619
9620 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9621 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9622 IEM_MC_PREPARE_FPU_USAGE();
9623 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9624 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
9625 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo, pVCpu->iem.s.uFpuOpcode);
9626 } IEM_MC_ELSE() {
9627 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO(pVCpu->iem.s.uFpuOpcode);
9628 } IEM_MC_ENDIF();
9629 IEM_MC_ADVANCE_RIP_AND_FINISH();
9630
9631 IEM_MC_END();
9632}
9633
9634
9635/** Opcode 0xd9 0xf2. */
9636FNIEMOP_DEF(iemOp_fptan)
9637{
9638 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
9639 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
9640}
9641
9642
9643/** Opcode 0xd9 0xf3. */
9644FNIEMOP_DEF(iemOp_fpatan)
9645{
9646 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
9647 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
9648}
9649
9650
9651/** Opcode 0xd9 0xf4. */
9652FNIEMOP_DEF(iemOp_fxtract)
9653{
9654 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
9655 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
9656}
9657
9658
9659/** Opcode 0xd9 0xf5. */
9660FNIEMOP_DEF(iemOp_fprem1)
9661{
9662 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
9663 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
9664}
9665
9666
9667/** Opcode 0xd9 0xf6. */
9668FNIEMOP_DEF(iemOp_fdecstp)
9669{
9670 IEMOP_MNEMONIC(fdecstp, "fdecstp");
9671 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
9672 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
9673 * FINCSTP and FDECSTP. */
9674 IEM_MC_BEGIN(0,0);
9675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9676
9677 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9678 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9679
9680 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9681 IEM_MC_FPU_STACK_DEC_TOP();
9682 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
9683
9684 IEM_MC_ADVANCE_RIP_AND_FINISH();
9685 IEM_MC_END();
9686}
9687
9688
9689/** Opcode 0xd9 0xf7. */
9690FNIEMOP_DEF(iemOp_fincstp)
9691{
9692 IEMOP_MNEMONIC(fincstp, "fincstp");
9693 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
9694 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
9695 * FINCSTP and FDECSTP. */
9696 IEM_MC_BEGIN(0,0);
9697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9698
9699 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9700 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9701
9702 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9703 IEM_MC_FPU_STACK_INC_TOP();
9704 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
9705
9706 IEM_MC_ADVANCE_RIP_AND_FINISH();
9707 IEM_MC_END();
9708}
9709
9710
9711/** Opcode 0xd9 0xf8. */
9712FNIEMOP_DEF(iemOp_fprem)
9713{
9714 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
9715 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
9716}
9717
9718
9719/** Opcode 0xd9 0xf9. */
9720FNIEMOP_DEF(iemOp_fyl2xp1)
9721{
9722 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
9723 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
9724}
9725
9726
9727/** Opcode 0xd9 0xfa. */
9728FNIEMOP_DEF(iemOp_fsqrt)
9729{
9730 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
9731 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
9732}
9733
9734
9735/** Opcode 0xd9 0xfb. */
9736FNIEMOP_DEF(iemOp_fsincos)
9737{
9738 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
9739 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
9740}
9741
9742
9743/** Opcode 0xd9 0xfc. */
9744FNIEMOP_DEF(iemOp_frndint)
9745{
9746 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
9747 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
9748}
9749
9750
9751/** Opcode 0xd9 0xfd. */
9752FNIEMOP_DEF(iemOp_fscale)
9753{
9754 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
9755 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
9756}
9757
9758
9759/** Opcode 0xd9 0xfe. */
9760FNIEMOP_DEF(iemOp_fsin)
9761{
9762 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
9763 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
9764}
9765
9766
9767/** Opcode 0xd9 0xff. */
9768FNIEMOP_DEF(iemOp_fcos)
9769{
9770 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
9771 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
9772}
9773
9774
9775/** Used by iemOp_EscF1. */
9776IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
9777{
9778 /* 0xe0 */ iemOp_fchs,
9779 /* 0xe1 */ iemOp_fabs,
9780 /* 0xe2 */ iemOp_Invalid,
9781 /* 0xe3 */ iemOp_Invalid,
9782 /* 0xe4 */ iemOp_ftst,
9783 /* 0xe5 */ iemOp_fxam,
9784 /* 0xe6 */ iemOp_Invalid,
9785 /* 0xe7 */ iemOp_Invalid,
9786 /* 0xe8 */ iemOp_fld1,
9787 /* 0xe9 */ iemOp_fldl2t,
9788 /* 0xea */ iemOp_fldl2e,
9789 /* 0xeb */ iemOp_fldpi,
9790 /* 0xec */ iemOp_fldlg2,
9791 /* 0xed */ iemOp_fldln2,
9792 /* 0xee */ iemOp_fldz,
9793 /* 0xef */ iemOp_Invalid,
9794 /* 0xf0 */ iemOp_f2xm1,
9795 /* 0xf1 */ iemOp_fyl2x,
9796 /* 0xf2 */ iemOp_fptan,
9797 /* 0xf3 */ iemOp_fpatan,
9798 /* 0xf4 */ iemOp_fxtract,
9799 /* 0xf5 */ iemOp_fprem1,
9800 /* 0xf6 */ iemOp_fdecstp,
9801 /* 0xf7 */ iemOp_fincstp,
9802 /* 0xf8 */ iemOp_fprem,
9803 /* 0xf9 */ iemOp_fyl2xp1,
9804 /* 0xfa */ iemOp_fsqrt,
9805 /* 0xfb */ iemOp_fsincos,
9806 /* 0xfc */ iemOp_frndint,
9807 /* 0xfd */ iemOp_fscale,
9808 /* 0xfe */ iemOp_fsin,
9809 /* 0xff */ iemOp_fcos
9810};
9811
9812
9813/**
9814 * @opcode 0xd9
9815 */
9816FNIEMOP_DEF(iemOp_EscF1)
9817{
9818 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9819 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
9820
9821 if (IEM_IS_MODRM_REG_MODE(bRm))
9822 {
9823 switch (IEM_GET_MODRM_REG_8(bRm))
9824 {
9825 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
9826 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
9827 case 2:
9828 if (bRm == 0xd0)
9829 return FNIEMOP_CALL(iemOp_fnop);
9830 IEMOP_RAISE_INVALID_OPCODE_RET();
9831 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
9832 case 4:
9833 case 5:
9834 case 6:
9835 case 7:
9836 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
9837 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
9838 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9839 }
9840 }
9841 else
9842 {
9843 switch (IEM_GET_MODRM_REG_8(bRm))
9844 {
9845 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
9846 case 1: IEMOP_RAISE_INVALID_OPCODE_RET();
9847 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
9848 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
9849 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
9850 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
9851 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
9852 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
9853 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9854 }
9855 }
9856}
9857
9858
9859/** Opcode 0xda 11/0. */
9860FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
9861{
9862 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
9863 IEM_MC_BEGIN(0, 1);
9864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9865 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9866
9867 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9868 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9869
9870 IEM_MC_PREPARE_FPU_USAGE();
9871 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9872 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9873 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9874 } IEM_MC_ENDIF();
9875 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9876 } IEM_MC_ELSE() {
9877 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9878 } IEM_MC_ENDIF();
9879 IEM_MC_ADVANCE_RIP_AND_FINISH();
9880
9881 IEM_MC_END();
9882}
9883
9884
9885/** Opcode 0xda 11/1. */
9886FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
9887{
9888 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
9889 IEM_MC_BEGIN(0, 1);
9890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9891 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9892
9893 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9894 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9895
9896 IEM_MC_PREPARE_FPU_USAGE();
9897 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9898 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9899 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9900 } IEM_MC_ENDIF();
9901 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9902 } IEM_MC_ELSE() {
9903 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9904 } IEM_MC_ENDIF();
9905 IEM_MC_ADVANCE_RIP_AND_FINISH();
9906
9907 IEM_MC_END();
9908}
9909
9910
9911/** Opcode 0xda 11/2. */
9912FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
9913{
9914 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
9915 IEM_MC_BEGIN(0, 1);
9916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9917 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9918
9919 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9920 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9921
9922 IEM_MC_PREPARE_FPU_USAGE();
9923 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9924 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9925 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9926 } IEM_MC_ENDIF();
9927 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9928 } IEM_MC_ELSE() {
9929 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9930 } IEM_MC_ENDIF();
9931 IEM_MC_ADVANCE_RIP_AND_FINISH();
9932
9933 IEM_MC_END();
9934}
9935
9936
9937/** Opcode 0xda 11/3. */
9938FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
9939{
9940 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
9941 IEM_MC_BEGIN(0, 1);
9942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9943 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9944
9945 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9946 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9947
9948 IEM_MC_PREPARE_FPU_USAGE();
9949 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9950 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9951 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9952 } IEM_MC_ENDIF();
9953 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9954 } IEM_MC_ELSE() {
9955 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9956 } IEM_MC_ENDIF();
9957 IEM_MC_ADVANCE_RIP_AND_FINISH();
9958
9959 IEM_MC_END();
9960}
9961
9962
9963/**
9964 * Common worker for FPU instructions working on ST0 and ST1, only affecting
9965 * flags, and popping twice when done.
9966 *
9967 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9968 */
9969FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
9970{
9971 IEM_MC_BEGIN(3, 1);
9972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9973 IEM_MC_LOCAL(uint16_t, u16Fsw);
9974 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9975 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9976 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9977
9978 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9979 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9980
9981 IEM_MC_PREPARE_FPU_USAGE();
9982 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1) {
9983 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
9984 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9985 } IEM_MC_ELSE() {
9986 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP(pVCpu->iem.s.uFpuOpcode);
9987 } IEM_MC_ENDIF();
9988 IEM_MC_ADVANCE_RIP_AND_FINISH();
9989
9990 IEM_MC_END();
9991}
9992
9993
9994/** Opcode 0xda 0xe9. */
9995FNIEMOP_DEF(iemOp_fucompp)
9996{
9997 IEMOP_MNEMONIC(fucompp, "fucompp");
9998 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fucom_r80_by_r80);
9999}
10000
10001
10002/**
10003 * Common worker for FPU instructions working on ST0 and an m32i, and storing
10004 * the result in ST0.
10005 *
10006 * @param bRm Mod R/M byte.
10007 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10008 */
10009FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
10010{
10011 IEM_MC_BEGIN(3, 3);
10012 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10013 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10014 IEM_MC_LOCAL(int32_t, i32Val2);
10015 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10016 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10017 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10018
10019 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10021
10022 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10023 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10024 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10025
10026 IEM_MC_PREPARE_FPU_USAGE();
10027 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10028 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
10029 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10030 } IEM_MC_ELSE() {
10031 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10032 } IEM_MC_ENDIF();
10033 IEM_MC_ADVANCE_RIP_AND_FINISH();
10034
10035 IEM_MC_END();
10036}
10037
10038
10039/** Opcode 0xda !11/0. */
10040FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
10041{
10042 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
10043 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
10044}
10045
10046
10047/** Opcode 0xda !11/1. */
10048FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
10049{
10050 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
10051 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
10052}
10053
10054
10055/** Opcode 0xda !11/2. */
10056FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
10057{
10058 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
10059
10060 IEM_MC_BEGIN(3, 3);
10061 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10062 IEM_MC_LOCAL(uint16_t, u16Fsw);
10063 IEM_MC_LOCAL(int32_t, i32Val2);
10064 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10065 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10066 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10067
10068 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10069 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10070
10071 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10072 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10073 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10074
10075 IEM_MC_PREPARE_FPU_USAGE();
10076 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10077 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
10078 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10079 } IEM_MC_ELSE() {
10080 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10081 } IEM_MC_ENDIF();
10082 IEM_MC_ADVANCE_RIP_AND_FINISH();
10083
10084 IEM_MC_END();
10085}
10086
10087
10088/** Opcode 0xda !11/3. */
10089FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
10090{
10091 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
10092
10093 IEM_MC_BEGIN(3, 3);
10094 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10095 IEM_MC_LOCAL(uint16_t, u16Fsw);
10096 IEM_MC_LOCAL(int32_t, i32Val2);
10097 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10098 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10099 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10100
10101 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10103
10104 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10105 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10106 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10107
10108 IEM_MC_PREPARE_FPU_USAGE();
10109 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10110 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
10111 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10112 } IEM_MC_ELSE() {
10113 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10114 } IEM_MC_ENDIF();
10115 IEM_MC_ADVANCE_RIP_AND_FINISH();
10116
10117 IEM_MC_END();
10118}
10119
10120
10121/** Opcode 0xda !11/4. */
10122FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
10123{
10124 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
10125 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
10126}
10127
10128
10129/** Opcode 0xda !11/5. */
10130FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
10131{
10132 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
10133 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
10134}
10135
10136
10137/** Opcode 0xda !11/6. */
10138FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
10139{
10140 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
10141 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
10142}
10143
10144
10145/** Opcode 0xda !11/7. */
10146FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
10147{
10148 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
10149 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
10150}
10151
10152
10153/**
10154 * @opcode 0xda
10155 */
10156FNIEMOP_DEF(iemOp_EscF2)
10157{
10158 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10159 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
10160 if (IEM_IS_MODRM_REG_MODE(bRm))
10161 {
10162 switch (IEM_GET_MODRM_REG_8(bRm))
10163 {
10164 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
10165 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
10166 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
10167 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
10168 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
10169 case 5:
10170 if (bRm == 0xe9)
10171 return FNIEMOP_CALL(iemOp_fucompp);
10172 IEMOP_RAISE_INVALID_OPCODE_RET();
10173 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10174 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
10175 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10176 }
10177 }
10178 else
10179 {
10180 switch (IEM_GET_MODRM_REG_8(bRm))
10181 {
10182 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
10183 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
10184 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
10185 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
10186 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
10187 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
10188 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
10189 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
10190 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10191 }
10192 }
10193}
10194
10195
10196/** Opcode 0xdb !11/0. */
10197FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
10198{
10199 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
10200
10201 IEM_MC_BEGIN(2, 3);
10202 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10203 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10204 IEM_MC_LOCAL(int32_t, i32Val);
10205 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10206 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
10207
10208 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10210
10211 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10212 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10213 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10214
10215 IEM_MC_PREPARE_FPU_USAGE();
10216 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10217 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i32, pFpuRes, pi32Val);
10218 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10219 } IEM_MC_ELSE() {
10220 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10221 } IEM_MC_ENDIF();
10222 IEM_MC_ADVANCE_RIP_AND_FINISH();
10223
10224 IEM_MC_END();
10225}
10226
10227
10228/** Opcode 0xdb !11/1. */
10229FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
10230{
10231 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
10232 IEM_MC_BEGIN(3, 2);
10233 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10234 IEM_MC_LOCAL(uint16_t, u16Fsw);
10235 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10236 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10237 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10238
10239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10241 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10242 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10243
10244 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10245 IEM_MC_PREPARE_FPU_USAGE();
10246 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10247 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10248 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
10249 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10250 } IEM_MC_ELSE() {
10251 IEM_MC_IF_FCW_IM() {
10252 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10253 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
10254 } IEM_MC_ENDIF();
10255 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10256 } IEM_MC_ENDIF();
10257 IEM_MC_ADVANCE_RIP_AND_FINISH();
10258
10259 IEM_MC_END();
10260}
10261
10262
10263/** Opcode 0xdb !11/2. */
10264FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
10265{
10266 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
10267 IEM_MC_BEGIN(3, 2);
10268 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10269 IEM_MC_LOCAL(uint16_t, u16Fsw);
10270 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10271 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10272 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10273
10274 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10276 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10277 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10278
10279 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10280 IEM_MC_PREPARE_FPU_USAGE();
10281 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10282 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10283 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
10284 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10285 } IEM_MC_ELSE() {
10286 IEM_MC_IF_FCW_IM() {
10287 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10288 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
10289 } IEM_MC_ENDIF();
10290 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10291 } IEM_MC_ENDIF();
10292 IEM_MC_ADVANCE_RIP_AND_FINISH();
10293
10294 IEM_MC_END();
10295}
10296
10297
10298/** Opcode 0xdb !11/3. */
10299FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
10300{
10301 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
10302 IEM_MC_BEGIN(3, 2);
10303 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10304 IEM_MC_LOCAL(uint16_t, u16Fsw);
10305 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10306 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10307 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10308
10309 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10310 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10311 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10312 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10313
10314 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10315 IEM_MC_PREPARE_FPU_USAGE();
10316 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10317 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10318 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
10319 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10320 } IEM_MC_ELSE() {
10321 IEM_MC_IF_FCW_IM() {
10322 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10323 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
10324 } IEM_MC_ENDIF();
10325 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10326 } IEM_MC_ENDIF();
10327 IEM_MC_ADVANCE_RIP_AND_FINISH();
10328
10329 IEM_MC_END();
10330}
10331
10332
10333/** Opcode 0xdb !11/5. */
10334FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
10335{
10336 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
10337
10338 IEM_MC_BEGIN(2, 3);
10339 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10340 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10341 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
10342 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10343 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
10344
10345 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10347
10348 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10349 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10350 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10351
10352 IEM_MC_PREPARE_FPU_USAGE();
10353 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10354 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
10355 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10356 } IEM_MC_ELSE() {
10357 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10358 } IEM_MC_ENDIF();
10359 IEM_MC_ADVANCE_RIP_AND_FINISH();
10360
10361 IEM_MC_END();
10362}
10363
10364
10365/** Opcode 0xdb !11/7. */
10366FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
10367{
10368 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
10369 IEM_MC_BEGIN(3, 2);
10370 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10371 IEM_MC_LOCAL(uint16_t, u16Fsw);
10372 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10373 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
10374 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10375
10376 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10378 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10379 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10380
10381 IEM_MC_MEM_MAP_EX(pr80Dst, IEM_ACCESS_DATA_W, sizeof(*pr80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
10382 IEM_MC_PREPARE_FPU_USAGE();
10383 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10384 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
10385 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
10386 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10387 } IEM_MC_ELSE() {
10388 IEM_MC_IF_FCW_IM() {
10389 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
10390 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
10391 } IEM_MC_ENDIF();
10392 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10393 } IEM_MC_ENDIF();
10394 IEM_MC_ADVANCE_RIP_AND_FINISH();
10395
10396 IEM_MC_END();
10397}
10398
10399
10400/** Opcode 0xdb 11/0. */
10401FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
10402{
10403 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
10404 IEM_MC_BEGIN(0, 1);
10405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10406 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10407
10408 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10409 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10410
10411 IEM_MC_PREPARE_FPU_USAGE();
10412 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10413 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF) {
10414 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10415 } IEM_MC_ENDIF();
10416 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10417 } IEM_MC_ELSE() {
10418 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10419 } IEM_MC_ENDIF();
10420 IEM_MC_ADVANCE_RIP_AND_FINISH();
10421
10422 IEM_MC_END();
10423}
10424
10425
10426/** Opcode 0xdb 11/1. */
10427FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
10428{
10429 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
10430 IEM_MC_BEGIN(0, 1);
10431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10432 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10433
10434 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10435 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10436
10437 IEM_MC_PREPARE_FPU_USAGE();
10438 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10439 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10440 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10441 } IEM_MC_ENDIF();
10442 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10443 } IEM_MC_ELSE() {
10444 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10445 } IEM_MC_ENDIF();
10446 IEM_MC_ADVANCE_RIP_AND_FINISH();
10447
10448 IEM_MC_END();
10449}
10450
10451
10452/** Opcode 0xdb 11/2. */
10453FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
10454{
10455 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
10456 IEM_MC_BEGIN(0, 1);
10457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10458 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10459
10460 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10461 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10462
10463 IEM_MC_PREPARE_FPU_USAGE();
10464 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10465 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10466 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10467 } IEM_MC_ENDIF();
10468 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10469 } IEM_MC_ELSE() {
10470 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10471 } IEM_MC_ENDIF();
10472 IEM_MC_ADVANCE_RIP_AND_FINISH();
10473
10474 IEM_MC_END();
10475}
10476
10477
10478/** Opcode 0xdb 11/3. */
10479FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
10480{
10481 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
10482 IEM_MC_BEGIN(0, 1);
10483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10484 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10485
10486 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10487 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10488
10489 IEM_MC_PREPARE_FPU_USAGE();
10490 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10491 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF) {
10492 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10493 } IEM_MC_ENDIF();
10494 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10495 } IEM_MC_ELSE() {
10496 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10497 } IEM_MC_ENDIF();
10498 IEM_MC_ADVANCE_RIP_AND_FINISH();
10499
10500 IEM_MC_END();
10501}
10502
10503
10504/** Opcode 0xdb 0xe0. */
10505FNIEMOP_DEF(iemOp_fneni)
10506{
10507 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
10508 IEM_MC_BEGIN(0,0);
10509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10510 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10511 IEM_MC_ADVANCE_RIP_AND_FINISH();
10512 IEM_MC_END();
10513}
10514
10515
10516/** Opcode 0xdb 0xe1. */
10517FNIEMOP_DEF(iemOp_fndisi)
10518{
10519 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
10520 IEM_MC_BEGIN(0,0);
10521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10522 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10523 IEM_MC_ADVANCE_RIP_AND_FINISH();
10524 IEM_MC_END();
10525}
10526
10527
10528/** Opcode 0xdb 0xe2. */
10529FNIEMOP_DEF(iemOp_fnclex)
10530{
10531 IEMOP_MNEMONIC(fnclex, "fnclex");
10532 IEM_MC_BEGIN(0,0);
10533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10534 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10535 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10536 IEM_MC_CLEAR_FSW_EX();
10537 IEM_MC_ADVANCE_RIP_AND_FINISH();
10538 IEM_MC_END();
10539}
10540
10541
10542/** Opcode 0xdb 0xe3. */
10543FNIEMOP_DEF(iemOp_fninit)
10544{
10545 IEMOP_MNEMONIC(fninit, "fninit");
10546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10547 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_FPU, iemCImpl_finit, false /*fCheckXcpts*/);
10548}
10549
10550
10551/** Opcode 0xdb 0xe4. */
10552FNIEMOP_DEF(iemOp_fnsetpm)
10553{
10554 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
10555 IEM_MC_BEGIN(0,0);
10556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10557 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10558 IEM_MC_ADVANCE_RIP_AND_FINISH();
10559 IEM_MC_END();
10560}
10561
10562
10563/** Opcode 0xdb 0xe5. */
10564FNIEMOP_DEF(iemOp_frstpm)
10565{
10566 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
10567#if 0 /* #UDs on newer CPUs */
10568 IEM_MC_BEGIN(0,0);
10569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10570 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10571 IEM_MC_ADVANCE_RIP_AND_FINISH();
10572 IEM_MC_END();
10573 return VINF_SUCCESS;
10574#else
10575 IEMOP_RAISE_INVALID_OPCODE_RET();
10576#endif
10577}
10578
10579
10580/** Opcode 0xdb 11/5. */
10581FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
10582{
10583 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
10584 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
10585 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fucomi_r80_by_r80,
10586 0 /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
10587}
10588
10589
10590/** Opcode 0xdb 11/6. */
10591FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
10592{
10593 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
10594 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
10595 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80,
10596 false /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
10597}
10598
10599
10600/**
10601 * @opcode 0xdb
10602 */
10603FNIEMOP_DEF(iemOp_EscF3)
10604{
10605 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10606 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
10607 if (IEM_IS_MODRM_REG_MODE(bRm))
10608 {
10609 switch (IEM_GET_MODRM_REG_8(bRm))
10610 {
10611 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
10612 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
10613 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
10614 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
10615 case 4:
10616 switch (bRm)
10617 {
10618 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
10619 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
10620 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
10621 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
10622 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
10623 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
10624 case 0xe6: IEMOP_RAISE_INVALID_OPCODE_RET();
10625 case 0xe7: IEMOP_RAISE_INVALID_OPCODE_RET();
10626 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10627 }
10628 break;
10629 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
10630 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
10631 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
10632 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10633 }
10634 }
10635 else
10636 {
10637 switch (IEM_GET_MODRM_REG_8(bRm))
10638 {
10639 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
10640 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
10641 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
10642 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
10643 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
10644 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
10645 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10646 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
10647 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10648 }
10649 }
10650}
10651
10652
10653/**
10654 * Common worker for FPU instructions working on STn and ST0, and storing the
10655 * result in STn unless IE, DE or ZE was raised.
10656 *
10657 * @param bRm Mod R/M byte.
10658 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10659 */
10660FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
10661{
10662 IEM_MC_BEGIN(3, 1);
10663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10664 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10665 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10666 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10667 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10668
10669 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10670 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10671
10672 IEM_MC_PREPARE_FPU_USAGE();
10673 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
10674 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
10675 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10676 } IEM_MC_ELSE() {
10677 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10678 } IEM_MC_ENDIF();
10679 IEM_MC_ADVANCE_RIP_AND_FINISH();
10680
10681 IEM_MC_END();
10682}
10683
10684
10685/** Opcode 0xdc 11/0. */
10686FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
10687{
10688 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
10689 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
10690}
10691
10692
10693/** Opcode 0xdc 11/1. */
10694FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
10695{
10696 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
10697 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
10698}
10699
10700
10701/** Opcode 0xdc 11/4. */
10702FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
10703{
10704 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
10705 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
10706}
10707
10708
10709/** Opcode 0xdc 11/5. */
10710FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
10711{
10712 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
10713 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
10714}
10715
10716
10717/** Opcode 0xdc 11/6. */
10718FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
10719{
10720 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
10721 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
10722}
10723
10724
10725/** Opcode 0xdc 11/7. */
10726FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
10727{
10728 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
10729 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
10730}
10731
10732
10733/**
10734 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
10735 * memory operand, and storing the result in ST0.
10736 *
10737 * @param bRm Mod R/M byte.
10738 * @param pfnImpl Pointer to the instruction implementation (assembly).
10739 */
10740FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
10741{
10742 IEM_MC_BEGIN(3, 3);
10743 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10744 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10745 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
10746 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10747 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
10748 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
10749
10750 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10751 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10752 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10753 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10754
10755 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10756 IEM_MC_PREPARE_FPU_USAGE();
10757 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0) {
10758 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
10759 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10760 } IEM_MC_ELSE() {
10761 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10762 } IEM_MC_ENDIF();
10763 IEM_MC_ADVANCE_RIP_AND_FINISH();
10764
10765 IEM_MC_END();
10766}
10767
10768
10769/** Opcode 0xdc !11/0. */
10770FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
10771{
10772 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
10773 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
10774}
10775
10776
10777/** Opcode 0xdc !11/1. */
10778FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
10779{
10780 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
10781 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
10782}
10783
10784
10785/** Opcode 0xdc !11/2. */
10786FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
10787{
10788 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
10789
10790 IEM_MC_BEGIN(3, 3);
10791 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10792 IEM_MC_LOCAL(uint16_t, u16Fsw);
10793 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
10794 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10795 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10796 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
10797
10798 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10800
10801 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10802 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10803 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10804
10805 IEM_MC_PREPARE_FPU_USAGE();
10806 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10807 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
10808 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10809 } IEM_MC_ELSE() {
10810 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10811 } IEM_MC_ENDIF();
10812 IEM_MC_ADVANCE_RIP_AND_FINISH();
10813
10814 IEM_MC_END();
10815}
10816
10817
10818/** Opcode 0xdc !11/3. */
10819FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
10820{
10821 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
10822
10823 IEM_MC_BEGIN(3, 3);
10824 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10825 IEM_MC_LOCAL(uint16_t, u16Fsw);
10826 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
10827 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10828 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10829 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
10830
10831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10833
10834 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10835 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10836 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10837
10838 IEM_MC_PREPARE_FPU_USAGE();
10839 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10840 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
10841 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10842 } IEM_MC_ELSE() {
10843 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10844 } IEM_MC_ENDIF();
10845 IEM_MC_ADVANCE_RIP_AND_FINISH();
10846
10847 IEM_MC_END();
10848}
10849
10850
10851/** Opcode 0xdc !11/4. */
10852FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
10853{
10854 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
10855 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
10856}
10857
10858
10859/** Opcode 0xdc !11/5. */
10860FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
10861{
10862 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
10863 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
10864}
10865
10866
10867/** Opcode 0xdc !11/6. */
10868FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
10869{
10870 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
10871 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
10872}
10873
10874
10875/** Opcode 0xdc !11/7. */
10876FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
10877{
10878 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
10879 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
10880}
10881
10882
10883/**
10884 * @opcode 0xdc
10885 */
10886FNIEMOP_DEF(iemOp_EscF4)
10887{
10888 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10889 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
10890 if (IEM_IS_MODRM_REG_MODE(bRm))
10891 {
10892 switch (IEM_GET_MODRM_REG_8(bRm))
10893 {
10894 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
10895 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
10896 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
10897 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
10898 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
10899 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
10900 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
10901 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
10902 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10903 }
10904 }
10905 else
10906 {
10907 switch (IEM_GET_MODRM_REG_8(bRm))
10908 {
10909 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
10910 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
10911 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
10912 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
10913 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
10914 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
10915 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
10916 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
10917 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10918 }
10919 }
10920}
10921
10922
10923/** Opcode 0xdd !11/0.
10924 * @sa iemOp_fld_m32r */
10925FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
10926{
10927 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
10928
10929 IEM_MC_BEGIN(2, 3);
10930 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10931 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10932 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
10933 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10934 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
10935
10936 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10938 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10939 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10940
10941 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10942 IEM_MC_PREPARE_FPU_USAGE();
10943 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10944 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r64, pFpuRes, pr64Val);
10945 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10946 } IEM_MC_ELSE() {
10947 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10948 } IEM_MC_ENDIF();
10949 IEM_MC_ADVANCE_RIP_AND_FINISH();
10950
10951 IEM_MC_END();
10952}
10953
10954
10955/** Opcode 0xdd !11/0. */
10956FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
10957{
10958 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
10959 IEM_MC_BEGIN(3, 2);
10960 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10961 IEM_MC_LOCAL(uint16_t, u16Fsw);
10962 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10963 IEM_MC_ARG(int64_t *, pi64Dst, 1);
10964 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10965
10966 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10968 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10969 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10970
10971 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10972 IEM_MC_PREPARE_FPU_USAGE();
10973 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10974 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
10975 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10976 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10977 } IEM_MC_ELSE() {
10978 IEM_MC_IF_FCW_IM() {
10979 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
10980 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
10981 } IEM_MC_ENDIF();
10982 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10983 } IEM_MC_ENDIF();
10984 IEM_MC_ADVANCE_RIP_AND_FINISH();
10985
10986 IEM_MC_END();
10987}
10988
10989
10990/** Opcode 0xdd !11/0. */
10991FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
10992{
10993 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
10994 IEM_MC_BEGIN(3, 2);
10995 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10996 IEM_MC_LOCAL(uint16_t, u16Fsw);
10997 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10998 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
10999 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11000
11001 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11003 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11004 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11005
11006 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11007 IEM_MC_PREPARE_FPU_USAGE();
11008 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11009 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
11010 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11011 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11012 } IEM_MC_ELSE() {
11013 IEM_MC_IF_FCW_IM() {
11014 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
11015 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
11016 } IEM_MC_ENDIF();
11017 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11018 } IEM_MC_ENDIF();
11019 IEM_MC_ADVANCE_RIP_AND_FINISH();
11020
11021 IEM_MC_END();
11022}
11023
11024
11025
11026
11027/** Opcode 0xdd !11/0. */
11028FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
11029{
11030 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
11031 IEM_MC_BEGIN(3, 2);
11032 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11033 IEM_MC_LOCAL(uint16_t, u16Fsw);
11034 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11035 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
11036 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11037
11038 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11040 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11041 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11042
11043 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11044 IEM_MC_PREPARE_FPU_USAGE();
11045 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11046 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
11047 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11048 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11049 } IEM_MC_ELSE() {
11050 IEM_MC_IF_FCW_IM() {
11051 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
11052 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
11053 } IEM_MC_ENDIF();
11054 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11055 } IEM_MC_ENDIF();
11056 IEM_MC_ADVANCE_RIP_AND_FINISH();
11057
11058 IEM_MC_END();
11059}
11060
11061
11062/** Opcode 0xdd !11/0. */
11063FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
11064{
11065 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
11066 IEM_MC_BEGIN(3, 0);
11067 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
11068 IEM_MC_ARG(uint8_t, iEffSeg, 1);
11069 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
11070 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11072 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11073 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11074 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
11075 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
11076 IEM_MC_END();
11077}
11078
11079
11080/** Opcode 0xdd !11/0. */
11081FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
11082{
11083 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
11084 IEM_MC_BEGIN(3, 0);
11085 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
11086 IEM_MC_ARG(uint8_t, iEffSeg, 1);
11087 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
11088 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11090 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11091 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); /* Note! Implicit fninit after the save, do not use FOR_READ here! */
11092 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
11093 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
11094 IEM_MC_END();
11095}
11096
11097/** Opcode 0xdd !11/0. */
11098FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
11099{
11100 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
11101
11102 IEM_MC_BEGIN(0, 2);
11103 IEM_MC_LOCAL(uint16_t, u16Tmp);
11104 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11105
11106 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11108 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11109
11110 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
11111 IEM_MC_FETCH_FSW(u16Tmp);
11112 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
11113 IEM_MC_ADVANCE_RIP_AND_FINISH();
11114
11115/** @todo Debug / drop a hint to the verifier that things may differ
11116 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
11117 * NT4SP1. (X86_FSW_PE) */
11118 IEM_MC_END();
11119}
11120
11121
11122/** Opcode 0xdd 11/0. */
11123FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
11124{
11125 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
11126 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
11127 unmodified. */
11128 IEM_MC_BEGIN(0, 0);
11129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11130
11131 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11132 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11133
11134 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11135 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
11136 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11137
11138 IEM_MC_ADVANCE_RIP_AND_FINISH();
11139 IEM_MC_END();
11140}
11141
11142
11143/** Opcode 0xdd 11/1. */
11144FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
11145{
11146 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
11147 IEM_MC_BEGIN(0, 2);
11148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11149 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
11150 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11151 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11152 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11153
11154 IEM_MC_PREPARE_FPU_USAGE();
11155 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11156 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
11157 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11158 } IEM_MC_ELSE() {
11159 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11160 } IEM_MC_ENDIF();
11161
11162 IEM_MC_ADVANCE_RIP_AND_FINISH();
11163 IEM_MC_END();
11164}
11165
11166
11167/** Opcode 0xdd 11/3. */
11168FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
11169{
11170 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
11171 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
11172}
11173
11174
11175/** Opcode 0xdd 11/4. */
11176FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
11177{
11178 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
11179 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
11180}
11181
11182
11183/**
11184 * @opcode 0xdd
11185 */
11186FNIEMOP_DEF(iemOp_EscF5)
11187{
11188 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11189 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
11190 if (IEM_IS_MODRM_REG_MODE(bRm))
11191 {
11192 switch (IEM_GET_MODRM_REG_8(bRm))
11193 {
11194 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
11195 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
11196 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
11197 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
11198 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
11199 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
11200 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
11201 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11202 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11203 }
11204 }
11205 else
11206 {
11207 switch (IEM_GET_MODRM_REG_8(bRm))
11208 {
11209 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
11210 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
11211 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
11212 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
11213 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
11214 case 5: IEMOP_RAISE_INVALID_OPCODE_RET();
11215 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
11216 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
11217 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11218 }
11219 }
11220}
11221
11222
11223/** Opcode 0xde 11/0. */
11224FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
11225{
11226 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
11227 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
11228}
11229
11230
11231/** Opcode 0xde 11/0. */
11232FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
11233{
11234 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
11235 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
11236}
11237
11238
11239/** Opcode 0xde 0xd9. */
11240FNIEMOP_DEF(iemOp_fcompp)
11241{
11242 IEMOP_MNEMONIC(fcompp, "fcompp");
11243 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fcom_r80_by_r80);
11244}
11245
11246
11247/** Opcode 0xde 11/4. */
11248FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
11249{
11250 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
11251 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
11252}
11253
11254
11255/** Opcode 0xde 11/5. */
11256FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
11257{
11258 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
11259 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
11260}
11261
11262
11263/** Opcode 0xde 11/6. */
11264FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
11265{
11266 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
11267 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
11268}
11269
11270
11271/** Opcode 0xde 11/7. */
11272FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
11273{
11274 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
11275 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
11276}
11277
11278
11279/**
11280 * Common worker for FPU instructions working on ST0 and an m16i, and storing
11281 * the result in ST0.
11282 *
11283 * @param bRm Mod R/M byte.
11284 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11285 */
11286FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
11287{
11288 IEM_MC_BEGIN(3, 3);
11289 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11290 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11291 IEM_MC_LOCAL(int16_t, i16Val2);
11292 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11293 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11294 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11295
11296 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11298
11299 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11300 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11301 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11302
11303 IEM_MC_PREPARE_FPU_USAGE();
11304 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11305 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
11306 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
11307 } IEM_MC_ELSE() {
11308 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11309 } IEM_MC_ENDIF();
11310 IEM_MC_ADVANCE_RIP_AND_FINISH();
11311
11312 IEM_MC_END();
11313}
11314
11315
11316/** Opcode 0xde !11/0. */
11317FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
11318{
11319 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
11320 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
11321}
11322
11323
11324/** Opcode 0xde !11/1. */
11325FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
11326{
11327 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
11328 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
11329}
11330
11331
11332/** Opcode 0xde !11/2. */
11333FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
11334{
11335 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
11336
11337 IEM_MC_BEGIN(3, 3);
11338 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11339 IEM_MC_LOCAL(uint16_t, u16Fsw);
11340 IEM_MC_LOCAL(int16_t, i16Val2);
11341 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11342 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11343 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11344
11345 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11347
11348 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11349 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11350 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11351
11352 IEM_MC_PREPARE_FPU_USAGE();
11353 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11354 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
11355 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11356 } IEM_MC_ELSE() {
11357 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11358 } IEM_MC_ENDIF();
11359 IEM_MC_ADVANCE_RIP_AND_FINISH();
11360
11361 IEM_MC_END();
11362}
11363
11364
11365/** Opcode 0xde !11/3. */
11366FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
11367{
11368 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
11369
11370 IEM_MC_BEGIN(3, 3);
11371 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11372 IEM_MC_LOCAL(uint16_t, u16Fsw);
11373 IEM_MC_LOCAL(int16_t, i16Val2);
11374 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11375 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11376 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11377
11378 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11380
11381 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11382 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11383 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11384
11385 IEM_MC_PREPARE_FPU_USAGE();
11386 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11387 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
11388 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11389 } IEM_MC_ELSE() {
11390 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11391 } IEM_MC_ENDIF();
11392 IEM_MC_ADVANCE_RIP_AND_FINISH();
11393
11394 IEM_MC_END();
11395}
11396
11397
11398/** Opcode 0xde !11/4. */
11399FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
11400{
11401 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
11402 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
11403}
11404
11405
11406/** Opcode 0xde !11/5. */
11407FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
11408{
11409 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
11410 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
11411}
11412
11413
11414/** Opcode 0xde !11/6. */
11415FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
11416{
11417 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
11418 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
11419}
11420
11421
11422/** Opcode 0xde !11/7. */
11423FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
11424{
11425 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
11426 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
11427}
11428
11429
11430/**
11431 * @opcode 0xde
11432 */
11433FNIEMOP_DEF(iemOp_EscF6)
11434{
11435 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11436 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
11437 if (IEM_IS_MODRM_REG_MODE(bRm))
11438 {
11439 switch (IEM_GET_MODRM_REG_8(bRm))
11440 {
11441 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
11442 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
11443 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
11444 case 3: if (bRm == 0xd9)
11445 return FNIEMOP_CALL(iemOp_fcompp);
11446 IEMOP_RAISE_INVALID_OPCODE_RET();
11447 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
11448 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
11449 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
11450 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
11451 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11452 }
11453 }
11454 else
11455 {
11456 switch (IEM_GET_MODRM_REG_8(bRm))
11457 {
11458 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
11459 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
11460 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
11461 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
11462 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
11463 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
11464 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
11465 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
11466 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11467 }
11468 }
11469}
11470
11471
11472/** Opcode 0xdf 11/0.
11473 * Undocument instruction, assumed to work like ffree + fincstp. */
11474FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
11475{
11476 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
11477 IEM_MC_BEGIN(0, 0);
11478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11479
11480 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11481 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11482
11483 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11484 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
11485 IEM_MC_FPU_STACK_INC_TOP();
11486 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11487
11488 IEM_MC_ADVANCE_RIP_AND_FINISH();
11489 IEM_MC_END();
11490}
11491
11492
11493/** Opcode 0xdf 0xe0. */
11494FNIEMOP_DEF(iemOp_fnstsw_ax)
11495{
11496 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
11497 IEM_MC_BEGIN(0, 1);
11498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11499 IEM_MC_LOCAL(uint16_t, u16Tmp);
11500 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11501 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
11502 IEM_MC_FETCH_FSW(u16Tmp);
11503 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
11504 IEM_MC_ADVANCE_RIP_AND_FINISH();
11505 IEM_MC_END();
11506}
11507
11508
11509/** Opcode 0xdf 11/5. */
11510FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
11511{
11512 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
11513 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
11514 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80,
11515 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11516}
11517
11518
11519/** Opcode 0xdf 11/6. */
11520FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
11521{
11522 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
11523 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
11524 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80,
11525 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11526}
11527
11528
11529/** Opcode 0xdf !11/0. */
11530FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
11531{
11532 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
11533
11534 IEM_MC_BEGIN(2, 3);
11535 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11536 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11537 IEM_MC_LOCAL(int16_t, i16Val);
11538 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11539 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
11540
11541 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11543
11544 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11545 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11546 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11547
11548 IEM_MC_PREPARE_FPU_USAGE();
11549 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11550 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i16, pFpuRes, pi16Val);
11551 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11552 } IEM_MC_ELSE() {
11553 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11554 } IEM_MC_ENDIF();
11555 IEM_MC_ADVANCE_RIP_AND_FINISH();
11556
11557 IEM_MC_END();
11558}
11559
11560
11561/** Opcode 0xdf !11/1. */
11562FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
11563{
11564 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
11565 IEM_MC_BEGIN(3, 2);
11566 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11567 IEM_MC_LOCAL(uint16_t, u16Fsw);
11568 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11569 IEM_MC_ARG(int16_t *, pi16Dst, 1);
11570 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11571
11572 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11574 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11575 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11576
11577 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11578 IEM_MC_PREPARE_FPU_USAGE();
11579 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11580 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
11581 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
11582 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11583 } IEM_MC_ELSE() {
11584 IEM_MC_IF_FCW_IM() {
11585 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
11586 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
11587 } IEM_MC_ENDIF();
11588 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11589 } IEM_MC_ENDIF();
11590 IEM_MC_ADVANCE_RIP_AND_FINISH();
11591
11592 IEM_MC_END();
11593}
11594
11595
11596/** Opcode 0xdf !11/2. */
11597FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
11598{
11599 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
11600 IEM_MC_BEGIN(3, 2);
11601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11602 IEM_MC_LOCAL(uint16_t, u16Fsw);
11603 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11604 IEM_MC_ARG(int16_t *, pi16Dst, 1);
11605 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11606
11607 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11609 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11610 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11611
11612 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11613 IEM_MC_PREPARE_FPU_USAGE();
11614 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11615 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
11616 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
11617 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11618 } IEM_MC_ELSE() {
11619 IEM_MC_IF_FCW_IM() {
11620 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
11621 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
11622 } IEM_MC_ENDIF();
11623 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11624 } IEM_MC_ENDIF();
11625 IEM_MC_ADVANCE_RIP_AND_FINISH();
11626
11627 IEM_MC_END();
11628}
11629
11630
11631/** Opcode 0xdf !11/3. */
11632FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
11633{
11634 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
11635 IEM_MC_BEGIN(3, 2);
11636 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11637 IEM_MC_LOCAL(uint16_t, u16Fsw);
11638 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11639 IEM_MC_ARG(int16_t *, pi16Dst, 1);
11640 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11641
11642 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11644 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11645 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11646
11647 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11648 IEM_MC_PREPARE_FPU_USAGE();
11649 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11650 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
11651 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
11652 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11653 } IEM_MC_ELSE() {
11654 IEM_MC_IF_FCW_IM() {
11655 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
11656 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
11657 } IEM_MC_ENDIF();
11658 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11659 } IEM_MC_ENDIF();
11660 IEM_MC_ADVANCE_RIP_AND_FINISH();
11661
11662 IEM_MC_END();
11663}
11664
11665
11666/** Opcode 0xdf !11/4. */
11667FNIEMOP_DEF_1(iemOp_fbld_m80d, uint8_t, bRm)
11668{
11669 IEMOP_MNEMONIC(fbld_m80d, "fbld m80d");
11670
11671 IEM_MC_BEGIN(2, 3);
11672 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11673 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11674 IEM_MC_LOCAL(RTPBCD80U, d80Val);
11675 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11676 IEM_MC_ARG_LOCAL_REF(PCRTPBCD80U, pd80Val, d80Val, 1);
11677
11678 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11680
11681 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11682 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11683 IEM_MC_FETCH_MEM_D80(d80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11684
11685 IEM_MC_PREPARE_FPU_USAGE();
11686 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11687 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_d80, pFpuRes, pd80Val);
11688 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11689 } IEM_MC_ELSE() {
11690 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11691 } IEM_MC_ENDIF();
11692 IEM_MC_ADVANCE_RIP_AND_FINISH();
11693
11694 IEM_MC_END();
11695}
11696
11697
11698/** Opcode 0xdf !11/5. */
11699FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
11700{
11701 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
11702
11703 IEM_MC_BEGIN(2, 3);
11704 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11705 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11706 IEM_MC_LOCAL(int64_t, i64Val);
11707 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11708 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
11709
11710 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11712
11713 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11714 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11715 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11716
11717 IEM_MC_PREPARE_FPU_USAGE();
11718 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11719 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i64, pFpuRes, pi64Val);
11720 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11721 } IEM_MC_ELSE() {
11722 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11723 } IEM_MC_ENDIF();
11724 IEM_MC_ADVANCE_RIP_AND_FINISH();
11725
11726 IEM_MC_END();
11727}
11728
11729
11730/** Opcode 0xdf !11/6. */
11731FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
11732{
11733 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
11734 IEM_MC_BEGIN(3, 2);
11735 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11736 IEM_MC_LOCAL(uint16_t, u16Fsw);
11737 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11738 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
11739 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11740
11741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11743 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11744 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11745
11746 IEM_MC_MEM_MAP_EX(pd80Dst, IEM_ACCESS_DATA_W, sizeof(*pd80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
11747 IEM_MC_PREPARE_FPU_USAGE();
11748 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11749 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
11750 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pd80Dst, IEM_ACCESS_DATA_W, u16Fsw);
11751 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11752 } IEM_MC_ELSE() {
11753 IEM_MC_IF_FCW_IM() {
11754 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
11755 IEM_MC_MEM_COMMIT_AND_UNMAP(pd80Dst, IEM_ACCESS_DATA_W);
11756 } IEM_MC_ENDIF();
11757 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11758 } IEM_MC_ENDIF();
11759 IEM_MC_ADVANCE_RIP_AND_FINISH();
11760
11761 IEM_MC_END();
11762}
11763
11764
11765/** Opcode 0xdf !11/7. */
11766FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
11767{
11768 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
11769 IEM_MC_BEGIN(3, 2);
11770 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11771 IEM_MC_LOCAL(uint16_t, u16Fsw);
11772 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11773 IEM_MC_ARG(int64_t *, pi64Dst, 1);
11774 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11775
11776 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11778 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11779 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11780
11781 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11782 IEM_MC_PREPARE_FPU_USAGE();
11783 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11784 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
11785 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11786 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11787 } IEM_MC_ELSE() {
11788 IEM_MC_IF_FCW_IM() {
11789 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
11790 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
11791 } IEM_MC_ENDIF();
11792 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11793 } IEM_MC_ENDIF();
11794 IEM_MC_ADVANCE_RIP_AND_FINISH();
11795
11796 IEM_MC_END();
11797}
11798
11799
11800/**
11801 * @opcode 0xdf
11802 */
11803FNIEMOP_DEF(iemOp_EscF7)
11804{
11805 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11806 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdf & 0x7);
11807 if (IEM_IS_MODRM_REG_MODE(bRm))
11808 {
11809 switch (IEM_GET_MODRM_REG_8(bRm))
11810 {
11811 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
11812 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
11813 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
11814 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
11815 case 4: if (bRm == 0xe0)
11816 return FNIEMOP_CALL(iemOp_fnstsw_ax);
11817 IEMOP_RAISE_INVALID_OPCODE_RET();
11818 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
11819 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
11820 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11821 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11822 }
11823 }
11824 else
11825 {
11826 switch (IEM_GET_MODRM_REG_8(bRm))
11827 {
11828 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
11829 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
11830 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
11831 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
11832 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
11833 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
11834 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
11835 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
11836 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11837 }
11838 }
11839}
11840
11841
11842/**
11843 * @opcode 0xe0
11844 */
11845FNIEMOP_DEF(iemOp_loopne_Jb)
11846{
11847 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
11848 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11849 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11850
11851 switch (pVCpu->iem.s.enmEffAddrMode)
11852 {
11853 case IEMMODE_16BIT:
11854 IEM_MC_BEGIN(0,0);
11855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11856 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11857 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11858 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11859 } IEM_MC_ELSE() {
11860 IEM_MC_ADVANCE_RIP_AND_FINISH();
11861 } IEM_MC_ENDIF();
11862 IEM_MC_END();
11863 break;
11864
11865 case IEMMODE_32BIT:
11866 IEM_MC_BEGIN(0,0);
11867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11868 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11869 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11870 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11871 } IEM_MC_ELSE() {
11872 IEM_MC_ADVANCE_RIP_AND_FINISH();
11873 } IEM_MC_ENDIF();
11874 IEM_MC_END();
11875 break;
11876
11877 case IEMMODE_64BIT:
11878 IEM_MC_BEGIN(0,0);
11879 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11880 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
11881 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11882 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11883 } IEM_MC_ELSE() {
11884 IEM_MC_ADVANCE_RIP_AND_FINISH();
11885 } IEM_MC_ENDIF();
11886 IEM_MC_END();
11887 break;
11888
11889 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11890 }
11891}
11892
11893
11894/**
11895 * @opcode 0xe1
11896 */
11897FNIEMOP_DEF(iemOp_loope_Jb)
11898{
11899 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
11900 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11901 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11902
11903 switch (pVCpu->iem.s.enmEffAddrMode)
11904 {
11905 case IEMMODE_16BIT:
11906 IEM_MC_BEGIN(0,0);
11907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11908 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11909 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11910 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11911 } IEM_MC_ELSE() {
11912 IEM_MC_ADVANCE_RIP_AND_FINISH();
11913 } IEM_MC_ENDIF();
11914 IEM_MC_END();
11915 break;
11916
11917 case IEMMODE_32BIT:
11918 IEM_MC_BEGIN(0,0);
11919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11920 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11921 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11922 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11923 } IEM_MC_ELSE() {
11924 IEM_MC_ADVANCE_RIP_AND_FINISH();
11925 } IEM_MC_ENDIF();
11926 IEM_MC_END();
11927 break;
11928
11929 case IEMMODE_64BIT:
11930 IEM_MC_BEGIN(0,0);
11931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11932 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
11933 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11934 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11935 } IEM_MC_ELSE() {
11936 IEM_MC_ADVANCE_RIP_AND_FINISH();
11937 } IEM_MC_ENDIF();
11938 IEM_MC_END();
11939 break;
11940
11941 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11942 }
11943}
11944
11945
11946/**
11947 * @opcode 0xe2
11948 */
11949FNIEMOP_DEF(iemOp_loop_Jb)
11950{
11951 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
11952 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11953 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11954
11955 /** @todo Check out the \#GP case if EIP < CS.Base or EIP > CS.Limit when
11956 * using the 32-bit operand size override. How can that be restarted? See
11957 * weird pseudo code in intel manual. */
11958
11959 /* NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
11960 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
11961 * the loop causes guest crashes, but when logging it's nice to skip a few million
11962 * lines of useless output. */
11963#if defined(LOG_ENABLED)
11964 if ((LogIs3Enabled() || LogIs4Enabled()) && -(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm)
11965 switch (pVCpu->iem.s.enmEffAddrMode)
11966 {
11967 case IEMMODE_16BIT:
11968 IEM_MC_BEGIN(0,0);
11969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11970 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
11971 IEM_MC_ADVANCE_RIP_AND_FINISH();
11972 IEM_MC_END();
11973 break;
11974
11975 case IEMMODE_32BIT:
11976 IEM_MC_BEGIN(0,0);
11977 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11978 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
11979 IEM_MC_ADVANCE_RIP_AND_FINISH();
11980 IEM_MC_END();
11981 break;
11982
11983 case IEMMODE_64BIT:
11984 IEM_MC_BEGIN(0,0);
11985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11986 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
11987 IEM_MC_ADVANCE_RIP_AND_FINISH();
11988 IEM_MC_END();
11989 break;
11990
11991 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11992 }
11993#endif
11994
11995 switch (pVCpu->iem.s.enmEffAddrMode)
11996 {
11997 case IEMMODE_16BIT:
11998 IEM_MC_BEGIN(0,0);
11999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12000 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
12001 IEM_MC_IF_CX_IS_NZ() {
12002 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12003 } IEM_MC_ELSE() {
12004 IEM_MC_ADVANCE_RIP_AND_FINISH();
12005 } IEM_MC_ENDIF();
12006 IEM_MC_END();
12007 break;
12008
12009 case IEMMODE_32BIT:
12010 IEM_MC_BEGIN(0,0);
12011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12012 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
12013 IEM_MC_IF_ECX_IS_NZ() {
12014 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12015 } IEM_MC_ELSE() {
12016 IEM_MC_ADVANCE_RIP_AND_FINISH();
12017 } IEM_MC_ENDIF();
12018 IEM_MC_END();
12019 break;
12020
12021 case IEMMODE_64BIT:
12022 IEM_MC_BEGIN(0,0);
12023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12024 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
12025 IEM_MC_IF_RCX_IS_NZ() {
12026 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12027 } IEM_MC_ELSE() {
12028 IEM_MC_ADVANCE_RIP_AND_FINISH();
12029 } IEM_MC_ENDIF();
12030 IEM_MC_END();
12031 break;
12032
12033 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12034 }
12035}
12036
12037
12038/**
12039 * @opcode 0xe3
12040 */
12041FNIEMOP_DEF(iemOp_jecxz_Jb)
12042{
12043 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
12044 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12045 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12046
12047 switch (pVCpu->iem.s.enmEffAddrMode)
12048 {
12049 case IEMMODE_16BIT:
12050 IEM_MC_BEGIN(0,0);
12051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12052 IEM_MC_IF_CX_IS_NZ() {
12053 IEM_MC_ADVANCE_RIP_AND_FINISH();
12054 } IEM_MC_ELSE() {
12055 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12056 } IEM_MC_ENDIF();
12057 IEM_MC_END();
12058 break;
12059
12060 case IEMMODE_32BIT:
12061 IEM_MC_BEGIN(0,0);
12062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12063 IEM_MC_IF_ECX_IS_NZ() {
12064 IEM_MC_ADVANCE_RIP_AND_FINISH();
12065 } IEM_MC_ELSE() {
12066 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12067 } IEM_MC_ENDIF();
12068 IEM_MC_END();
12069 break;
12070
12071 case IEMMODE_64BIT:
12072 IEM_MC_BEGIN(0,0);
12073 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12074 IEM_MC_IF_RCX_IS_NZ() {
12075 IEM_MC_ADVANCE_RIP_AND_FINISH();
12076 } IEM_MC_ELSE() {
12077 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12078 } IEM_MC_ENDIF();
12079 IEM_MC_END();
12080 break;
12081
12082 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12083 }
12084}
12085
12086
12087/** Opcode 0xe4 */
12088FNIEMOP_DEF(iemOp_in_AL_Ib)
12089{
12090 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
12091 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12092 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12093 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12094 iemCImpl_in, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12095}
12096
12097
12098/** Opcode 0xe5 */
12099FNIEMOP_DEF(iemOp_in_eAX_Ib)
12100{
12101 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
12102 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12104 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12105 iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12106 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12107}
12108
12109
12110/** Opcode 0xe6 */
12111FNIEMOP_DEF(iemOp_out_Ib_AL)
12112{
12113 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
12114 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12116 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12117 iemCImpl_out, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12118}
12119
12120
12121/** Opcode 0xe7 */
12122FNIEMOP_DEF(iemOp_out_Ib_eAX)
12123{
12124 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
12125 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12126 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12127 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12128 iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12129 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12130}
12131
12132
12133/**
12134 * @opcode 0xe8
12135 */
12136FNIEMOP_DEF(iemOp_call_Jv)
12137{
12138 IEMOP_MNEMONIC(call_Jv, "call Jv");
12139 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12140 switch (pVCpu->iem.s.enmEffOpSize)
12141 {
12142 case IEMMODE_16BIT:
12143 {
12144 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12145 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE, iemCImpl_call_rel_16, (int16_t)u16Imm);
12146 }
12147
12148 case IEMMODE_32BIT:
12149 {
12150 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12151 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE, iemCImpl_call_rel_32, (int32_t)u32Imm);
12152 }
12153
12154 case IEMMODE_64BIT:
12155 {
12156 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12157 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE, iemCImpl_call_rel_64, u64Imm);
12158 }
12159
12160 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12161 }
12162}
12163
12164
12165/**
12166 * @opcode 0xe9
12167 */
12168FNIEMOP_DEF(iemOp_jmp_Jv)
12169{
12170 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
12171 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12172 switch (pVCpu->iem.s.enmEffOpSize)
12173 {
12174 case IEMMODE_16BIT:
12175 {
12176 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
12177 IEM_MC_BEGIN(0, 0);
12178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12179 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
12180 IEM_MC_END();
12181 break;
12182 }
12183
12184 case IEMMODE_64BIT:
12185 case IEMMODE_32BIT:
12186 {
12187 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
12188 IEM_MC_BEGIN(0, 0);
12189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12190 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
12191 IEM_MC_END();
12192 break;
12193 }
12194
12195 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12196 }
12197}
12198
12199
12200/**
12201 * @opcode 0xea
12202 */
12203FNIEMOP_DEF(iemOp_jmp_Ap)
12204{
12205 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
12206 IEMOP_HLP_NO_64BIT();
12207
12208 /* Decode the far pointer address and pass it on to the far call C implementation. */
12209 uint32_t off32Seg;
12210 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
12211 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
12212 else
12213 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
12214 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
12215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12216 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
12217 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
12218 iemCImpl_FarJmp, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
12219}
12220
12221
12222/**
12223 * @opcode 0xeb
12224 */
12225FNIEMOP_DEF(iemOp_jmp_Jb)
12226{
12227 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
12228 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12229 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12230
12231 IEM_MC_BEGIN(0, 0);
12232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12233 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12234 IEM_MC_END();
12235}
12236
12237
12238/** Opcode 0xec */
12239FNIEMOP_DEF(iemOp_in_AL_DX)
12240{
12241 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
12242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12243 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12244 iemCImpl_in_eAX_DX, 1, pVCpu->iem.s.enmEffAddrMode);
12245}
12246
12247
12248/** Opcode 0xed */
12249FNIEMOP_DEF(iemOp_in_eAX_DX)
12250{
12251 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
12252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12253 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12254 iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12255 pVCpu->iem.s.enmEffAddrMode);
12256}
12257
12258
12259/** Opcode 0xee */
12260FNIEMOP_DEF(iemOp_out_DX_AL)
12261{
12262 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
12263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12264 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12265 iemCImpl_out_DX_eAX, 1, pVCpu->iem.s.enmEffAddrMode);
12266}
12267
12268
12269/** Opcode 0xef */
12270FNIEMOP_DEF(iemOp_out_DX_eAX)
12271{
12272 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
12273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12274 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12275 iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12276 pVCpu->iem.s.enmEffAddrMode);
12277}
12278
12279
12280/**
12281 * @opcode 0xf0
12282 */
12283FNIEMOP_DEF(iemOp_lock)
12284{
12285 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
12286 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12287 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
12288
12289 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12290 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12291}
12292
12293
12294/**
12295 * @opcode 0xf1
12296 */
12297FNIEMOP_DEF(iemOp_int1)
12298{
12299 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
12300 /** @todo Does not generate \#UD on 286, or so they say... Was allegedly a
12301 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
12302 * LOADALL memo. Needs some testing. */
12303 IEMOP_HLP_MIN_386();
12304 /** @todo testcase! */
12305 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
12306 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
12307 iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
12308}
12309
12310
12311/**
12312 * @opcode 0xf2
12313 */
12314FNIEMOP_DEF(iemOp_repne)
12315{
12316 /* This overrides any previous REPE prefix. */
12317 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
12318 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
12319 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
12320
12321 /* For the 4 entry opcode tables, REPNZ overrides any previous
12322 REPZ and operand size prefixes. */
12323 pVCpu->iem.s.idxPrefix = 3;
12324
12325 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12326 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12327}
12328
12329
12330/**
12331 * @opcode 0xf3
12332 */
12333FNIEMOP_DEF(iemOp_repe)
12334{
12335 /* This overrides any previous REPNE prefix. */
12336 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
12337 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
12338 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
12339
12340 /* For the 4 entry opcode tables, REPNZ overrides any previous
12341 REPNZ and operand size prefixes. */
12342 pVCpu->iem.s.idxPrefix = 2;
12343
12344 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12345 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12346}
12347
12348
12349/**
12350 * @opcode 0xf4
12351 */
12352FNIEMOP_DEF(iemOp_hlt)
12353{
12354 IEMOP_MNEMONIC(hlt, "hlt");
12355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12356 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, iemCImpl_hlt);
12357}
12358
12359
12360/**
12361 * @opcode 0xf5
12362 */
12363FNIEMOP_DEF(iemOp_cmc)
12364{
12365 IEMOP_MNEMONIC(cmc, "cmc");
12366 IEM_MC_BEGIN(0, 0);
12367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12368 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
12369 IEM_MC_ADVANCE_RIP_AND_FINISH();
12370 IEM_MC_END();
12371}
12372
12373
12374/**
12375 * Body for of 'inc/dec/not/neg Eb'.
12376 */
12377#define IEMOP_BODY_UNARY_Eb(a_bRm, a_fnNormalU8, a_fnLockedU8) \
12378 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
12379 { \
12380 /* register access */ \
12381 IEM_MC_BEGIN(2, 0); \
12382 IEMOP_HLP_DONE_DECODING(); \
12383 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12384 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12385 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
12386 IEM_MC_REF_EFLAGS(pEFlags); \
12387 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
12388 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12389 IEM_MC_END(); \
12390 } \
12391 else \
12392 { \
12393 /* memory access. */ \
12394 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
12395 { \
12396 IEM_MC_BEGIN(2, 2); \
12397 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12398 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12399 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12400 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12401 \
12402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
12403 IEMOP_HLP_DONE_DECODING(); \
12404 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12405 IEM_MC_FETCH_EFLAGS(EFlags); \
12406 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
12407 \
12408 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
12409 IEM_MC_COMMIT_EFLAGS(EFlags); \
12410 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12411 IEM_MC_END(); \
12412 } \
12413 else \
12414 { \
12415 IEM_MC_BEGIN(2, 2); \
12416 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12417 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12418 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12419 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12420 \
12421 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
12422 IEMOP_HLP_DONE_DECODING(); \
12423 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12424 IEM_MC_FETCH_EFLAGS(EFlags); \
12425 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU8, pu8Dst, pEFlags); \
12426 \
12427 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
12428 IEM_MC_COMMIT_EFLAGS(EFlags); \
12429 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12430 IEM_MC_END(); \
12431 } \
12432 } \
12433 (void)0
12434
12435
12436/**
12437 * Body for 'inc/dec/not/neg Ev' (groups 3 and 5).
12438 */
12439#define IEMOP_BODY_UNARY_Ev(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
12440 if (IEM_IS_MODRM_REG_MODE(bRm)) \
12441 { \
12442 /* \
12443 * Register target \
12444 */ \
12445 switch (pVCpu->iem.s.enmEffOpSize) \
12446 { \
12447 case IEMMODE_16BIT: \
12448 IEM_MC_BEGIN(2, 0); \
12449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12450 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
12451 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12452 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
12453 IEM_MC_REF_EFLAGS(pEFlags); \
12454 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
12455 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12456 IEM_MC_END(); \
12457 break; \
12458 \
12459 case IEMMODE_32BIT: \
12460 IEM_MC_BEGIN(2, 0); \
12461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12462 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
12463 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12464 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
12465 IEM_MC_REF_EFLAGS(pEFlags); \
12466 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
12467 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
12468 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12469 IEM_MC_END(); \
12470 break; \
12471 \
12472 case IEMMODE_64BIT: \
12473 IEM_MC_BEGIN(2, 0); \
12474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12475 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
12476 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12477 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
12478 IEM_MC_REF_EFLAGS(pEFlags); \
12479 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
12480 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12481 IEM_MC_END(); \
12482 break; \
12483 \
12484 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12485 } \
12486 } \
12487 else \
12488 { \
12489 /* \
12490 * Memory target. \
12491 */ \
12492 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
12493 { \
12494 switch (pVCpu->iem.s.enmEffOpSize) \
12495 { \
12496 case IEMMODE_16BIT: \
12497 IEM_MC_BEGIN(2, 3); \
12498 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
12499 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12500 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12501 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12502 \
12503 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12505 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12506 IEM_MC_FETCH_EFLAGS(EFlags); \
12507 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
12508 \
12509 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
12510 IEM_MC_COMMIT_EFLAGS(EFlags); \
12511 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12512 IEM_MC_END(); \
12513 break; \
12514 \
12515 case IEMMODE_32BIT: \
12516 IEM_MC_BEGIN(2, 3); \
12517 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
12518 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12519 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12520 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12521 \
12522 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12524 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12525 IEM_MC_FETCH_EFLAGS(EFlags); \
12526 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
12527 \
12528 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
12529 IEM_MC_COMMIT_EFLAGS(EFlags); \
12530 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12531 IEM_MC_END(); \
12532 break; \
12533 \
12534 case IEMMODE_64BIT: \
12535 IEM_MC_BEGIN(2, 3); \
12536 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
12537 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12538 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12539 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12540 \
12541 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12543 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12544 IEM_MC_FETCH_EFLAGS(EFlags); \
12545 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
12546 \
12547 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
12548 IEM_MC_COMMIT_EFLAGS(EFlags); \
12549 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12550 IEM_MC_END(); \
12551 break; \
12552 \
12553 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12554 } \
12555 } \
12556 else \
12557 { \
12558 (void)0
12559
12560#define IEMOP_BODY_UNARY_Ev_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
12561 switch (pVCpu->iem.s.enmEffOpSize) \
12562 { \
12563 case IEMMODE_16BIT: \
12564 IEM_MC_BEGIN(2, 3); \
12565 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
12566 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12567 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12568 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12569 \
12570 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12571 IEMOP_HLP_DONE_DECODING(); \
12572 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12573 IEM_MC_FETCH_EFLAGS(EFlags); \
12574 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU16, pu16Dst, pEFlags); \
12575 \
12576 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
12577 IEM_MC_COMMIT_EFLAGS(EFlags); \
12578 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12579 IEM_MC_END(); \
12580 break; \
12581 \
12582 case IEMMODE_32BIT: \
12583 IEM_MC_BEGIN(2, 3); \
12584 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
12585 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12586 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12587 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12588 \
12589 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12590 IEMOP_HLP_DONE_DECODING(); \
12591 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12592 IEM_MC_FETCH_EFLAGS(EFlags); \
12593 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU32, pu32Dst, pEFlags); \
12594 \
12595 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
12596 IEM_MC_COMMIT_EFLAGS(EFlags); \
12597 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12598 IEM_MC_END(); \
12599 break; \
12600 \
12601 case IEMMODE_64BIT: \
12602 IEM_MC_BEGIN(2, 3); \
12603 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
12604 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12605 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12606 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12607 \
12608 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12609 IEMOP_HLP_DONE_DECODING(); \
12610 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12611 IEM_MC_FETCH_EFLAGS(EFlags); \
12612 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU64, pu64Dst, pEFlags); \
12613 \
12614 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
12615 IEM_MC_COMMIT_EFLAGS(EFlags); \
12616 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12617 IEM_MC_END(); \
12618 break; \
12619 \
12620 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12621 } \
12622 } \
12623 } \
12624 (void)0
12625
12626
12627/**
12628 * @opmaps grp3_f6
12629 * @opcode /0
12630 * @todo also /1
12631 */
12632FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
12633{
12634 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
12635 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12636
12637 if (IEM_IS_MODRM_REG_MODE(bRm))
12638 {
12639 /* register access */
12640 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12641 IEM_MC_BEGIN(3, 0);
12642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12643 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12644 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
12645 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12646 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12647 IEM_MC_REF_EFLAGS(pEFlags);
12648 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
12649 IEM_MC_ADVANCE_RIP_AND_FINISH();
12650 IEM_MC_END();
12651 }
12652 else
12653 {
12654 /* memory access. */
12655 IEM_MC_BEGIN(3, 3);
12656 IEM_MC_ARG(uint8_t const *, pu8Dst, 0);
12657 IEM_MC_ARG(uint8_t, u8Src, 1);
12658 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
12659 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12660 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12661
12662 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12663 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12664 IEM_MC_ASSIGN(u8Src, u8Imm);
12665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12666 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12667 IEM_MC_FETCH_EFLAGS(EFlags);
12668 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
12669
12670 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu8Dst, bUnmapInfo);
12671 IEM_MC_COMMIT_EFLAGS(EFlags);
12672 IEM_MC_ADVANCE_RIP_AND_FINISH();
12673 IEM_MC_END();
12674 }
12675}
12676
12677
12678/** Opcode 0xf6 /4, /5, /6 and /7. */
12679FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
12680{
12681 if (IEM_IS_MODRM_REG_MODE(bRm))
12682 {
12683 /* register access */
12684 IEM_MC_BEGIN(3, 1);
12685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12686 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12687 IEM_MC_ARG(uint8_t, u8Value, 1);
12688 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12689 IEM_MC_LOCAL(int32_t, rc);
12690
12691 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12692 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12693 IEM_MC_REF_EFLAGS(pEFlags);
12694 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
12695 IEM_MC_IF_LOCAL_IS_Z(rc) {
12696 IEM_MC_ADVANCE_RIP_AND_FINISH();
12697 } IEM_MC_ELSE() {
12698 IEM_MC_RAISE_DIVIDE_ERROR();
12699 } IEM_MC_ENDIF();
12700
12701 IEM_MC_END();
12702 }
12703 else
12704 {
12705 /* memory access. */
12706 IEM_MC_BEGIN(3, 2);
12707 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12708 IEM_MC_ARG(uint8_t, u8Value, 1);
12709 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12710 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12711 IEM_MC_LOCAL(int32_t, rc);
12712
12713 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12715 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12716 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12717 IEM_MC_REF_EFLAGS(pEFlags);
12718 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
12719 IEM_MC_IF_LOCAL_IS_Z(rc) {
12720 IEM_MC_ADVANCE_RIP_AND_FINISH();
12721 } IEM_MC_ELSE() {
12722 IEM_MC_RAISE_DIVIDE_ERROR();
12723 } IEM_MC_ENDIF();
12724
12725 IEM_MC_END();
12726 }
12727}
12728
12729
12730/** Opcode 0xf7 /4, /5, /6 and /7. */
12731FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
12732{
12733 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12734
12735 if (IEM_IS_MODRM_REG_MODE(bRm))
12736 {
12737 /* register access */
12738 switch (pVCpu->iem.s.enmEffOpSize)
12739 {
12740 case IEMMODE_16BIT:
12741 {
12742 IEM_MC_BEGIN(4, 1);
12743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12744 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12745 IEM_MC_ARG(uint16_t *, pu16DX, 1);
12746 IEM_MC_ARG(uint16_t, u16Value, 2);
12747 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12748 IEM_MC_LOCAL(int32_t, rc);
12749
12750 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12751 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12752 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
12753 IEM_MC_REF_EFLAGS(pEFlags);
12754 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
12755 IEM_MC_IF_LOCAL_IS_Z(rc) {
12756 IEM_MC_ADVANCE_RIP_AND_FINISH();
12757 } IEM_MC_ELSE() {
12758 IEM_MC_RAISE_DIVIDE_ERROR();
12759 } IEM_MC_ENDIF();
12760
12761 IEM_MC_END();
12762 break;
12763 }
12764
12765 case IEMMODE_32BIT:
12766 {
12767 IEM_MC_BEGIN(4, 1);
12768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12769 IEM_MC_ARG(uint32_t *, pu32AX, 0);
12770 IEM_MC_ARG(uint32_t *, pu32DX, 1);
12771 IEM_MC_ARG(uint32_t, u32Value, 2);
12772 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12773 IEM_MC_LOCAL(int32_t, rc);
12774
12775 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12776 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
12777 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
12778 IEM_MC_REF_EFLAGS(pEFlags);
12779 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
12780 IEM_MC_IF_LOCAL_IS_Z(rc) {
12781 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
12782 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
12783 IEM_MC_ADVANCE_RIP_AND_FINISH();
12784 } IEM_MC_ELSE() {
12785 IEM_MC_RAISE_DIVIDE_ERROR();
12786 } IEM_MC_ENDIF();
12787
12788 IEM_MC_END();
12789 break;
12790 }
12791
12792 case IEMMODE_64BIT:
12793 {
12794 IEM_MC_BEGIN(4, 1);
12795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12796 IEM_MC_ARG(uint64_t *, pu64AX, 0);
12797 IEM_MC_ARG(uint64_t *, pu64DX, 1);
12798 IEM_MC_ARG(uint64_t, u64Value, 2);
12799 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12800 IEM_MC_LOCAL(int32_t, rc);
12801
12802 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12803 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
12804 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
12805 IEM_MC_REF_EFLAGS(pEFlags);
12806 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
12807 IEM_MC_IF_LOCAL_IS_Z(rc) {
12808 IEM_MC_ADVANCE_RIP_AND_FINISH();
12809 } IEM_MC_ELSE() {
12810 IEM_MC_RAISE_DIVIDE_ERROR();
12811 } IEM_MC_ENDIF();
12812
12813 IEM_MC_END();
12814 break;
12815 }
12816
12817 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12818 }
12819 }
12820 else
12821 {
12822 /* memory access. */
12823 switch (pVCpu->iem.s.enmEffOpSize)
12824 {
12825 case IEMMODE_16BIT:
12826 {
12827 IEM_MC_BEGIN(4, 2);
12828 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12829 IEM_MC_ARG(uint16_t *, pu16DX, 1);
12830 IEM_MC_ARG(uint16_t, u16Value, 2);
12831 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12832 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12833 IEM_MC_LOCAL(int32_t, rc);
12834
12835 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12837 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12838 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12839 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
12840 IEM_MC_REF_EFLAGS(pEFlags);
12841 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
12842 IEM_MC_IF_LOCAL_IS_Z(rc) {
12843 IEM_MC_ADVANCE_RIP_AND_FINISH();
12844 } IEM_MC_ELSE() {
12845 IEM_MC_RAISE_DIVIDE_ERROR();
12846 } IEM_MC_ENDIF();
12847
12848 IEM_MC_END();
12849 break;
12850 }
12851
12852 case IEMMODE_32BIT:
12853 {
12854 IEM_MC_BEGIN(4, 2);
12855 IEM_MC_ARG(uint32_t *, pu32AX, 0);
12856 IEM_MC_ARG(uint32_t *, pu32DX, 1);
12857 IEM_MC_ARG(uint32_t, u32Value, 2);
12858 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12859 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12860 IEM_MC_LOCAL(int32_t, rc);
12861
12862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12864 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12865 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
12866 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
12867 IEM_MC_REF_EFLAGS(pEFlags);
12868 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
12869 IEM_MC_IF_LOCAL_IS_Z(rc) {
12870 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
12871 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
12872 IEM_MC_ADVANCE_RIP_AND_FINISH();
12873 } IEM_MC_ELSE() {
12874 IEM_MC_RAISE_DIVIDE_ERROR();
12875 } IEM_MC_ENDIF();
12876
12877 IEM_MC_END();
12878 break;
12879 }
12880
12881 case IEMMODE_64BIT:
12882 {
12883 IEM_MC_BEGIN(4, 2);
12884 IEM_MC_ARG(uint64_t *, pu64AX, 0);
12885 IEM_MC_ARG(uint64_t *, pu64DX, 1);
12886 IEM_MC_ARG(uint64_t, u64Value, 2);
12887 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12888 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12889 IEM_MC_LOCAL(int32_t, rc);
12890
12891 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12892 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12893 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12894 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
12895 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
12896 IEM_MC_REF_EFLAGS(pEFlags);
12897 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
12898 IEM_MC_IF_LOCAL_IS_Z(rc) {
12899 IEM_MC_ADVANCE_RIP_AND_FINISH();
12900 } IEM_MC_ELSE() {
12901 IEM_MC_RAISE_DIVIDE_ERROR();
12902 } IEM_MC_ENDIF();
12903
12904 IEM_MC_END();
12905 break;
12906 }
12907
12908 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12909 }
12910 }
12911}
12912
12913
12914/**
12915 * @opmaps grp3_f6
12916 * @opcode /2
12917 */
12918FNIEMOP_DEF_1(iemOp_grp3_not_Eb, uint8_t, bRm)
12919{
12920 IEMOP_MNEMONIC(not_Eb, "not Eb");
12921 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_not_u8, iemAImpl_not_u8_locked);
12922}
12923
12924
12925/**
12926 * @opmaps grp3_f6
12927 * @opcode /3
12928 */
12929FNIEMOP_DEF_1(iemOp_grp3_neg_Eb, uint8_t, bRm)
12930{
12931 IEMOP_MNEMONIC(net_Eb, "neg Eb");
12932 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_neg_u8, iemAImpl_neg_u8_locked);
12933}
12934
12935
12936/**
12937 * @opcode 0xf6
12938 */
12939FNIEMOP_DEF(iemOp_Grp3_Eb)
12940{
12941 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12942 switch (IEM_GET_MODRM_REG_8(bRm))
12943 {
12944 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
12945 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
12946 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Eb, bRm);
12947 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Eb, bRm);
12948 case 4:
12949 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
12950 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12951 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
12952 case 5:
12953 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
12954 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12955 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
12956 case 6:
12957 IEMOP_MNEMONIC(div_Eb, "div Eb");
12958 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12959 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
12960 case 7:
12961 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
12962 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12963 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
12964 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12965 }
12966}
12967
12968
12969/** Opcode 0xf7 /0. */
12970FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
12971{
12972 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
12973 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12974
12975 if (IEM_IS_MODRM_REG_MODE(bRm))
12976 {
12977 /* register access */
12978 switch (pVCpu->iem.s.enmEffOpSize)
12979 {
12980 case IEMMODE_16BIT:
12981 {
12982 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12983 IEM_MC_BEGIN(3, 0);
12984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12985 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12986 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
12987 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12988 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12989 IEM_MC_REF_EFLAGS(pEFlags);
12990 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
12991 IEM_MC_ADVANCE_RIP_AND_FINISH();
12992 IEM_MC_END();
12993 break;
12994 }
12995
12996 case IEMMODE_32BIT:
12997 {
12998 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12999 IEM_MC_BEGIN(3, 0);
13000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13001 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13002 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
13003 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13004 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13005 IEM_MC_REF_EFLAGS(pEFlags);
13006 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
13007 /* No clearing the high dword here - test doesn't write back the result. */
13008 IEM_MC_ADVANCE_RIP_AND_FINISH();
13009 IEM_MC_END();
13010 break;
13011 }
13012
13013 case IEMMODE_64BIT:
13014 {
13015 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13016 IEM_MC_BEGIN(3, 0);
13017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13018 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13019 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
13020 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13021 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13022 IEM_MC_REF_EFLAGS(pEFlags);
13023 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
13024 IEM_MC_ADVANCE_RIP_AND_FINISH();
13025 IEM_MC_END();
13026 break;
13027 }
13028
13029 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13030 }
13031 }
13032 else
13033 {
13034 /* memory access. */
13035 switch (pVCpu->iem.s.enmEffOpSize)
13036 {
13037 case IEMMODE_16BIT:
13038 {
13039 IEM_MC_BEGIN(3, 3);
13040 IEM_MC_ARG(uint16_t const *, pu16Dst, 0);
13041 IEM_MC_ARG(uint16_t, u16Src, 1);
13042 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13043 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13044 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13045
13046 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
13047 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13048 IEM_MC_ASSIGN(u16Src, u16Imm);
13049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13050 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13051 IEM_MC_FETCH_EFLAGS(EFlags);
13052 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
13053
13054 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo);
13055 IEM_MC_COMMIT_EFLAGS(EFlags);
13056 IEM_MC_ADVANCE_RIP_AND_FINISH();
13057 IEM_MC_END();
13058 break;
13059 }
13060
13061 case IEMMODE_32BIT:
13062 {
13063 IEM_MC_BEGIN(3, 3);
13064 IEM_MC_ARG(uint32_t const *, pu32Dst, 0);
13065 IEM_MC_ARG(uint32_t, u32Src, 1);
13066 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13067 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13068 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13069
13070 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13071 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13072 IEM_MC_ASSIGN(u32Src, u32Imm);
13073 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13074 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13075 IEM_MC_FETCH_EFLAGS(EFlags);
13076 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
13077
13078 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo);
13079 IEM_MC_COMMIT_EFLAGS(EFlags);
13080 IEM_MC_ADVANCE_RIP_AND_FINISH();
13081 IEM_MC_END();
13082 break;
13083 }
13084
13085 case IEMMODE_64BIT:
13086 {
13087 IEM_MC_BEGIN(3, 3);
13088 IEM_MC_ARG(uint64_t const *, pu64Dst, 0);
13089 IEM_MC_ARG(uint64_t, u64Src, 1);
13090 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13091 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13092 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13093
13094 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13095 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13096 IEM_MC_ASSIGN(u64Src, u64Imm);
13097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13098 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13099 IEM_MC_FETCH_EFLAGS(EFlags);
13100 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
13101
13102 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo);
13103 IEM_MC_COMMIT_EFLAGS(EFlags);
13104 IEM_MC_ADVANCE_RIP_AND_FINISH();
13105 IEM_MC_END();
13106 break;
13107 }
13108
13109 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13110 }
13111 }
13112}
13113
13114
13115/** Opcode 0xf7 /2. */
13116FNIEMOP_DEF_1(iemOp_grp3_not_Ev, uint8_t, bRm)
13117{
13118 IEMOP_MNEMONIC(not_Ev, "not Ev");
13119 IEMOP_BODY_UNARY_Ev( iemAImpl_not_u16, iemAImpl_not_u32, iemAImpl_not_u64);
13120 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_not_u16_locked, iemAImpl_not_u32_locked, iemAImpl_not_u64_locked);
13121}
13122
13123
13124/** Opcode 0xf7 /3. */
13125FNIEMOP_DEF_1(iemOp_grp3_neg_Ev, uint8_t, bRm)
13126{
13127 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
13128 IEMOP_BODY_UNARY_Ev( iemAImpl_neg_u16, iemAImpl_neg_u32, iemAImpl_neg_u64);
13129 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_neg_u16_locked, iemAImpl_neg_u32_locked, iemAImpl_neg_u64_locked);
13130}
13131
13132
13133/**
13134 * @opcode 0xf7
13135 */
13136FNIEMOP_DEF(iemOp_Grp3_Ev)
13137{
13138 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13139 switch (IEM_GET_MODRM_REG_8(bRm))
13140 {
13141 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
13142 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
13143 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Ev, bRm);
13144 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Ev, bRm);
13145 case 4:
13146 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
13147 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13148 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
13149 case 5:
13150 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
13151 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13152 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
13153 case 6:
13154 IEMOP_MNEMONIC(div_Ev, "div Ev");
13155 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13156 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
13157 case 7:
13158 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
13159 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13160 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
13161 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13162 }
13163}
13164
13165
13166/**
13167 * @opcode 0xf8
13168 */
13169FNIEMOP_DEF(iemOp_clc)
13170{
13171 IEMOP_MNEMONIC(clc, "clc");
13172 IEM_MC_BEGIN(0, 0);
13173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13174 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
13175 IEM_MC_ADVANCE_RIP_AND_FINISH();
13176 IEM_MC_END();
13177}
13178
13179
13180/**
13181 * @opcode 0xf9
13182 */
13183FNIEMOP_DEF(iemOp_stc)
13184{
13185 IEMOP_MNEMONIC(stc, "stc");
13186 IEM_MC_BEGIN(0, 0);
13187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13188 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
13189 IEM_MC_ADVANCE_RIP_AND_FINISH();
13190 IEM_MC_END();
13191}
13192
13193
13194/**
13195 * @opcode 0xfa
13196 */
13197FNIEMOP_DEF(iemOp_cli)
13198{
13199 IEMOP_MNEMONIC(cli, "cli");
13200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13201 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_CHECK_IRQ_BEFORE, iemCImpl_cli);
13202}
13203
13204
13205FNIEMOP_DEF(iemOp_sti)
13206{
13207 IEMOP_MNEMONIC(sti, "sti");
13208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13209 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_AFTER | IEM_CIMPL_F_VMEXIT, iemCImpl_sti);
13210}
13211
13212
13213/**
13214 * @opcode 0xfc
13215 */
13216FNIEMOP_DEF(iemOp_cld)
13217{
13218 IEMOP_MNEMONIC(cld, "cld");
13219 IEM_MC_BEGIN(0, 0);
13220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13221 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
13222 IEM_MC_ADVANCE_RIP_AND_FINISH();
13223 IEM_MC_END();
13224}
13225
13226
13227/**
13228 * @opcode 0xfd
13229 */
13230FNIEMOP_DEF(iemOp_std)
13231{
13232 IEMOP_MNEMONIC(std, "std");
13233 IEM_MC_BEGIN(0, 0);
13234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13235 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
13236 IEM_MC_ADVANCE_RIP_AND_FINISH();
13237 IEM_MC_END();
13238}
13239
13240
13241/**
13242 * @opmaps grp4
13243 * @opcode /0
13244 */
13245FNIEMOP_DEF_1(iemOp_Grp4_inc_Eb, uint8_t, bRm)
13246{
13247 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
13248 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_inc_u8, iemAImpl_inc_u8_locked);
13249}
13250
13251
13252/**
13253 * @opmaps grp4
13254 * @opcode /1
13255 */
13256FNIEMOP_DEF_1(iemOp_Grp4_dec_Eb, uint8_t, bRm)
13257{
13258 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
13259 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_dec_u8, iemAImpl_dec_u8_locked);
13260}
13261
13262
13263/**
13264 * @opcode 0xfe
13265 */
13266FNIEMOP_DEF(iemOp_Grp4)
13267{
13268 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13269 switch (IEM_GET_MODRM_REG_8(bRm))
13270 {
13271 case 0: return FNIEMOP_CALL_1(iemOp_Grp4_inc_Eb, bRm);
13272 case 1: return FNIEMOP_CALL_1(iemOp_Grp4_dec_Eb, bRm);
13273 default:
13274 /** @todo is the eff-addr decoded? */
13275 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
13276 IEMOP_RAISE_INVALID_OPCODE_RET();
13277 }
13278}
13279
13280/** Opcode 0xff /0. */
13281FNIEMOP_DEF_1(iemOp_Grp5_inc_Ev, uint8_t, bRm)
13282{
13283 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
13284 IEMOP_BODY_UNARY_Ev( iemAImpl_inc_u16, iemAImpl_inc_u32, iemAImpl_inc_u64);
13285 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_inc_u16_locked, iemAImpl_inc_u32_locked, iemAImpl_inc_u64_locked);
13286}
13287
13288
13289/** Opcode 0xff /1. */
13290FNIEMOP_DEF_1(iemOp_Grp5_dec_Ev, uint8_t, bRm)
13291{
13292 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
13293 IEMOP_BODY_UNARY_Ev( iemAImpl_dec_u16, iemAImpl_dec_u32, iemAImpl_dec_u64);
13294 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_dec_u16_locked, iemAImpl_dec_u32_locked, iemAImpl_dec_u64_locked);
13295}
13296
13297
13298/**
13299 * Opcode 0xff /2.
13300 * @param bRm The RM byte.
13301 */
13302FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
13303{
13304 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
13305 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13306
13307 if (IEM_IS_MODRM_REG_MODE(bRm))
13308 {
13309 /* The new RIP is taken from a register. */
13310 switch (pVCpu->iem.s.enmEffOpSize)
13311 {
13312 case IEMMODE_16BIT:
13313 IEM_MC_BEGIN(1, 0);
13314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13315 IEM_MC_ARG(uint16_t, u16Target, 0);
13316 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13317 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_16, u16Target);
13318 IEM_MC_END();
13319 break;
13320
13321 case IEMMODE_32BIT:
13322 IEM_MC_BEGIN(1, 0);
13323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13324 IEM_MC_ARG(uint32_t, u32Target, 0);
13325 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13326 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_32, u32Target);
13327 IEM_MC_END();
13328 break;
13329
13330 case IEMMODE_64BIT:
13331 IEM_MC_BEGIN(1, 0);
13332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13333 IEM_MC_ARG(uint64_t, u64Target, 0);
13334 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13335 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_64, u64Target);
13336 IEM_MC_END();
13337 break;
13338
13339 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13340 }
13341 }
13342 else
13343 {
13344 /* The new RIP is taken from a register. */
13345 switch (pVCpu->iem.s.enmEffOpSize)
13346 {
13347 case IEMMODE_16BIT:
13348 IEM_MC_BEGIN(1, 1);
13349 IEM_MC_ARG(uint16_t, u16Target, 0);
13350 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13351 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13353 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13354 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_16, u16Target);
13355 IEM_MC_END();
13356 break;
13357
13358 case IEMMODE_32BIT:
13359 IEM_MC_BEGIN(1, 1);
13360 IEM_MC_ARG(uint32_t, u32Target, 0);
13361 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13362 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13364 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13365 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_32, u32Target);
13366 IEM_MC_END();
13367 break;
13368
13369 case IEMMODE_64BIT:
13370 IEM_MC_BEGIN(1, 1);
13371 IEM_MC_ARG(uint64_t, u64Target, 0);
13372 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13373 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13375 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13376 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_64, u64Target);
13377 IEM_MC_END();
13378 break;
13379
13380 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13381 }
13382 }
13383}
13384
13385#define IEMOP_BODY_GRP5_FAR_EP(a_bRm, a_fnCImpl) \
13386 /* Registers? How?? */ \
13387 if (RT_LIKELY(IEM_IS_MODRM_MEM_MODE(a_bRm))) \
13388 { /* likely */ } \
13389 else \
13390 IEMOP_RAISE_INVALID_OPCODE_RET(); /* callf eax is not legal */ \
13391 \
13392 /* 64-bit mode: Default is 32-bit, but only intel respects a REX.W prefix. */ \
13393 /** @todo what does VIA do? */ \
13394 if (!IEM_IS_64BIT_CODE(pVCpu) || pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT || IEM_IS_GUEST_CPU_INTEL(pVCpu)) \
13395 { /* likely */ } \
13396 else \
13397 pVCpu->iem.s.enmEffOpSize = IEMMODE_32BIT; \
13398 \
13399 /* Far pointer loaded from memory. */ \
13400 switch (pVCpu->iem.s.enmEffOpSize) \
13401 { \
13402 case IEMMODE_16BIT: \
13403 IEM_MC_BEGIN(3, 1); \
13404 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13405 IEM_MC_ARG(uint16_t, offSeg, 1); \
13406 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2); \
13407 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13408 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13409 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13410 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13411 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2); \
13412 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR \
13413 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, \
13414 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13415 IEM_MC_END(); \
13416 break; \
13417 \
13418 case IEMMODE_32BIT: \
13419 IEM_MC_BEGIN(3, 1); \
13420 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13421 IEM_MC_ARG(uint32_t, offSeg, 1); \
13422 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2); \
13423 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13424 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13426 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13427 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4); \
13428 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR \
13429 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, \
13430 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13431 IEM_MC_END(); \
13432 break; \
13433 \
13434 case IEMMODE_64BIT: \
13435 Assert(!IEM_IS_GUEST_CPU_AMD(pVCpu)); \
13436 IEM_MC_BEGIN(3, 1); \
13437 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13438 IEM_MC_ARG(uint64_t, offSeg, 1); \
13439 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_64BIT, 2); \
13440 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13441 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13443 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13444 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8); \
13445 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_MODE /* no gates */, \
13446 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13447 IEM_MC_END(); \
13448 break; \
13449 \
13450 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13451 } do {} while (0)
13452
13453
13454/**
13455 * Opcode 0xff /3.
13456 * @param bRm The RM byte.
13457 */
13458FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
13459{
13460 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
13461 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_callf);
13462}
13463
13464
13465/**
13466 * Opcode 0xff /4.
13467 * @param bRm The RM byte.
13468 */
13469FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
13470{
13471 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
13472 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13473
13474 if (IEM_IS_MODRM_REG_MODE(bRm))
13475 {
13476 /* The new RIP is taken from a register. */
13477 switch (pVCpu->iem.s.enmEffOpSize)
13478 {
13479 case IEMMODE_16BIT:
13480 IEM_MC_BEGIN(0, 1);
13481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13482 IEM_MC_LOCAL(uint16_t, u16Target);
13483 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13484 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
13485 IEM_MC_END();
13486 break;
13487
13488 case IEMMODE_32BIT:
13489 IEM_MC_BEGIN(0, 1);
13490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13491 IEM_MC_LOCAL(uint32_t, u32Target);
13492 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13493 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
13494 IEM_MC_END();
13495 break;
13496
13497 case IEMMODE_64BIT:
13498 IEM_MC_BEGIN(0, 1);
13499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13500 IEM_MC_LOCAL(uint64_t, u64Target);
13501 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13502 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
13503 IEM_MC_END();
13504 break;
13505
13506 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13507 }
13508 }
13509 else
13510 {
13511 /* The new RIP is taken from a memory location. */
13512 switch (pVCpu->iem.s.enmEffOpSize)
13513 {
13514 case IEMMODE_16BIT:
13515 IEM_MC_BEGIN(0, 2);
13516 IEM_MC_LOCAL(uint16_t, u16Target);
13517 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13518 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13520 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13521 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
13522 IEM_MC_END();
13523 break;
13524
13525 case IEMMODE_32BIT:
13526 IEM_MC_BEGIN(0, 2);
13527 IEM_MC_LOCAL(uint32_t, u32Target);
13528 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13529 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13531 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13532 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
13533 IEM_MC_END();
13534 break;
13535
13536 case IEMMODE_64BIT:
13537 IEM_MC_BEGIN(0, 2);
13538 IEM_MC_LOCAL(uint64_t, u64Target);
13539 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13540 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13542 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13543 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
13544 IEM_MC_END();
13545 break;
13546
13547 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13548 }
13549 }
13550}
13551
13552
13553/**
13554 * Opcode 0xff /5.
13555 * @param bRm The RM byte.
13556 */
13557FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
13558{
13559 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
13560 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_FarJmp);
13561}
13562
13563
13564/**
13565 * Opcode 0xff /6.
13566 * @param bRm The RM byte.
13567 */
13568FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
13569{
13570 IEMOP_MNEMONIC(push_Ev, "push Ev");
13571
13572 /* Registers are handled by a common worker. */
13573 if (IEM_IS_MODRM_REG_MODE(bRm))
13574 return FNIEMOP_CALL_1(iemOpCommonPushGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
13575
13576 /* Memory we do here. */
13577 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13578 switch (pVCpu->iem.s.enmEffOpSize)
13579 {
13580 case IEMMODE_16BIT:
13581 IEM_MC_BEGIN(0, 2);
13582 IEM_MC_LOCAL(uint16_t, u16Src);
13583 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13584 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13586 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13587 IEM_MC_PUSH_U16(u16Src);
13588 IEM_MC_ADVANCE_RIP_AND_FINISH();
13589 IEM_MC_END();
13590 break;
13591
13592 case IEMMODE_32BIT:
13593 IEM_MC_BEGIN(0, 2);
13594 IEM_MC_LOCAL(uint32_t, u32Src);
13595 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13596 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13598 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13599 IEM_MC_PUSH_U32(u32Src);
13600 IEM_MC_ADVANCE_RIP_AND_FINISH();
13601 IEM_MC_END();
13602 break;
13603
13604 case IEMMODE_64BIT:
13605 IEM_MC_BEGIN(0, 2);
13606 IEM_MC_LOCAL(uint64_t, u64Src);
13607 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13608 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13610 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13611 IEM_MC_PUSH_U64(u64Src);
13612 IEM_MC_ADVANCE_RIP_AND_FINISH();
13613 IEM_MC_END();
13614 break;
13615
13616 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13617 }
13618}
13619
13620
13621/**
13622 * @opcode 0xff
13623 */
13624FNIEMOP_DEF(iemOp_Grp5)
13625{
13626 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13627 switch (IEM_GET_MODRM_REG_8(bRm))
13628 {
13629 case 0:
13630 return FNIEMOP_CALL_1(iemOp_Grp5_inc_Ev, bRm);
13631 case 1:
13632 return FNIEMOP_CALL_1(iemOp_Grp5_dec_Ev, bRm);
13633 case 2:
13634 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
13635 case 3:
13636 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
13637 case 4:
13638 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
13639 case 5:
13640 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
13641 case 6:
13642 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
13643 case 7:
13644 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
13645 IEMOP_RAISE_INVALID_OPCODE_RET();
13646 }
13647 AssertFailedReturn(VERR_IEM_IPE_3);
13648}
13649
13650
13651
13652const PFNIEMOP g_apfnOneByteMap[256] =
13653{
13654 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
13655 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
13656 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
13657 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
13658 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
13659 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
13660 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
13661 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
13662 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
13663 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
13664 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
13665 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
13666 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
13667 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
13668 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
13669 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
13670 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
13671 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
13672 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
13673 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
13674 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
13675 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
13676 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
13677 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
13678 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
13679 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
13680 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
13681 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
13682 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
13683 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
13684 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
13685 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
13686 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
13687 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
13688 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
13689 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
13690 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
13691 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
13692 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
13693 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
13694 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
13695 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
13696 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
13697 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
13698 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
13699 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
13700 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
13701 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
13702 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
13703 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
13704 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
13705 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
13706 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
13707 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
13708 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
13709 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
13710 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
13711 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
13712 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
13713 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
13714 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
13715 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
13716 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
13717 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
13718};
13719
13720
13721/** @} */
13722
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette