VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstOneByte.cpp.h@ 101958

Last change on this file since 101958 was 101958, checked in by vboxsync, 17 months ago

VMM/IEM: Flush PC and other shadow guest register copies when emitting CIMPL calls in the native recompiler. bugref:10371

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 490.6 KB
Line 
1/* $Id: IEMAllInstOneByte.cpp.h 101958 2023-11-08 10:54:58Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*******************************************************************************
30* Global Variables *
31*******************************************************************************/
32extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
33
34/* Instruction group definitions: */
35
36/** @defgroup og_gen General
37 * @{ */
38 /** @defgroup og_gen_arith Arithmetic
39 * @{ */
40 /** @defgroup og_gen_arith_bin Binary numbers */
41 /** @defgroup og_gen_arith_dec Decimal numbers */
42 /** @} */
43/** @} */
44
45/** @defgroup og_stack Stack
46 * @{ */
47 /** @defgroup og_stack_sreg Segment registers */
48/** @} */
49
50/** @defgroup og_prefix Prefixes */
51/** @defgroup og_escapes Escape bytes */
52
53
54
55/** @name One byte opcodes.
56 * @{
57 */
58
59/**
60 * Body for instructions like ADD, AND, OR, TEST, CMP, ++ with a byte
61 * memory/register as the destination.
62 *
63 * Used with IEMOP_BODY_BINARY_rm_r8_NO_LOCK or IEMOP_BODY_BINARY_rm_r8_LOCKED.
64 */
65#define IEMOP_BODY_BINARY_rm_r8_RW(a_fnNormalU8) \
66 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
67 \
68 /* \
69 * If rm is denoting a register, no more instruction bytes. \
70 */ \
71 if (IEM_IS_MODRM_REG_MODE(bRm)) \
72 { \
73 IEM_MC_BEGIN(3, 0, 0, 0); \
74 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
75 IEM_MC_ARG(uint8_t, u8Src, 1); \
76 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
77 \
78 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
79 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
80 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
81 IEM_MC_REF_EFLAGS(pEFlags); \
82 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
83 \
84 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
85 IEM_MC_END(); \
86 } \
87 else \
88 { \
89 /* \
90 * We're accessing memory. \
91 * Note! We're putting the eflags on the stack here so we can commit them \
92 * after the memory. \
93 */ \
94 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
95 { \
96 IEM_MC_BEGIN(3, 3, 0, 0); \
97 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
98 IEM_MC_ARG(uint8_t, u8Src, 1); \
99 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
101 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
102 \
103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
104 IEMOP_HLP_DONE_DECODING(); \
105 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
106 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
107 IEM_MC_FETCH_EFLAGS(EFlags); \
108 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
109 \
110 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
111 IEM_MC_COMMIT_EFLAGS(EFlags); \
112 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
113 IEM_MC_END(); \
114 } \
115 else \
116 { \
117 (void)0
118
119/**
120 * Body for instructions like TEST & CMP, ++ with a byte memory/registers as
121 * operands.
122 *
123 * Used with IEMOP_BODY_BINARY_rm_r8_NO_LOCK or IEMOP_BODY_BINARY_rm_r8_LOCKED.
124 */
125#define IEMOP_BODY_BINARY_rm_r8_RO(a_fnNormalU8) \
126 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
127 \
128 /* \
129 * If rm is denoting a register, no more instruction bytes. \
130 */ \
131 if (IEM_IS_MODRM_REG_MODE(bRm)) \
132 { \
133 IEM_MC_BEGIN(3, 0, 0, 0); \
134 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
135 IEM_MC_ARG(uint8_t, u8Src, 1); \
136 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
137 \
138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
139 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
140 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
141 IEM_MC_REF_EFLAGS(pEFlags); \
142 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
143 \
144 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
145 IEM_MC_END(); \
146 } \
147 else \
148 { \
149 /* \
150 * We're accessing memory. \
151 * Note! We're putting the eflags on the stack here so we can commit them \
152 * after the memory. \
153 */ \
154 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
155 { \
156 IEM_MC_BEGIN(3, 3, 0, 0); \
157 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
158 IEM_MC_ARG(uint8_t, u8Src, 1); \
159 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
160 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
161 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
162 \
163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
164 IEMOP_HLP_DONE_DECODING(); \
165 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
166 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
167 IEM_MC_FETCH_EFLAGS(EFlags); \
168 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
169 \
170 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu8Dst, bUnmapInfo); \
171 IEM_MC_COMMIT_EFLAGS(EFlags); \
172 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
173 IEM_MC_END(); \
174 } \
175 else \
176 { \
177 (void)0
178
179#define IEMOP_BODY_BINARY_rm_r8_NO_LOCK() \
180 IEMOP_HLP_DONE_DECODING(); \
181 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
182 } \
183 } \
184 (void)0
185
186#define IEMOP_BODY_BINARY_rm_r8_LOCKED(a_fnLockedU8) \
187 IEM_MC_BEGIN(3, 3, 0, 0); \
188 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
189 IEM_MC_ARG(uint8_t, u8Src, 1); \
190 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
192 IEM_MC_LOCAL(uint8_t, bMapInfoDst); \
193 \
194 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
195 IEMOP_HLP_DONE_DECODING(); \
196 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bMapInfoDst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
197 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
198 IEM_MC_FETCH_EFLAGS(EFlags); \
199 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
200 \
201 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bMapInfoDst); \
202 IEM_MC_COMMIT_EFLAGS(EFlags); \
203 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
204 IEM_MC_END(); \
205 } \
206 } \
207 (void)0
208
209/**
210 * Body for byte instructions like ADD, AND, OR, ++ with a register as the
211 * destination.
212 */
213#define IEMOP_BODY_BINARY_r8_rm(a_fnNormalU8) \
214 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
215 \
216 /* \
217 * If rm is denoting a register, no more instruction bytes. \
218 */ \
219 if (IEM_IS_MODRM_REG_MODE(bRm)) \
220 { \
221 IEM_MC_BEGIN(3, 0, 0, 0); \
222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
223 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
224 IEM_MC_ARG(uint8_t, u8Src, 1); \
225 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
226 \
227 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
228 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
229 IEM_MC_REF_EFLAGS(pEFlags); \
230 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
231 \
232 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
233 IEM_MC_END(); \
234 } \
235 else \
236 { \
237 /* \
238 * We're accessing memory. \
239 */ \
240 IEM_MC_BEGIN(3, 1, 0, 0); \
241 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
242 IEM_MC_ARG(uint8_t, u8Src, 1); \
243 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
244 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
245 \
246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
248 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
249 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
250 IEM_MC_REF_EFLAGS(pEFlags); \
251 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
252 \
253 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
254 IEM_MC_END(); \
255 } \
256 (void)0
257
258
259/**
260 * Body for word/dword/qword instructions like ADD, AND, OR, ++ with
261 * memory/register as the destination.
262 */
263#define IEMOP_BODY_BINARY_rm_rv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
264 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
265 \
266 /* \
267 * If rm is denoting a register, no more instruction bytes. \
268 */ \
269 if (IEM_IS_MODRM_REG_MODE(bRm)) \
270 { \
271 switch (pVCpu->iem.s.enmEffOpSize) \
272 { \
273 case IEMMODE_16BIT: \
274 IEM_MC_BEGIN(3, 0, 0, 0); \
275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
276 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
277 IEM_MC_ARG(uint16_t, u16Src, 1); \
278 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
279 \
280 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
281 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
282 IEM_MC_REF_EFLAGS(pEFlags); \
283 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
284 \
285 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
286 IEM_MC_END(); \
287 break; \
288 \
289 case IEMMODE_32BIT: \
290 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
292 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
293 IEM_MC_ARG(uint32_t, u32Src, 1); \
294 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
295 \
296 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
297 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
298 IEM_MC_REF_EFLAGS(pEFlags); \
299 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
300 \
301 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
302 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
303 IEM_MC_END(); \
304 break; \
305 \
306 case IEMMODE_64BIT: \
307 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
309 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
310 IEM_MC_ARG(uint64_t, u64Src, 1); \
311 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
312 \
313 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
314 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
315 IEM_MC_REF_EFLAGS(pEFlags); \
316 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
317 \
318 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
319 IEM_MC_END(); \
320 break; \
321 \
322 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
323 } \
324 } \
325 else \
326 { \
327 /* \
328 * We're accessing memory. \
329 * Note! We're putting the eflags on the stack here so we can commit them \
330 * after the memory. \
331 */ \
332 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
333 { \
334 switch (pVCpu->iem.s.enmEffOpSize) \
335 { \
336 case IEMMODE_16BIT: \
337 IEM_MC_BEGIN(3, 3, 0, 0); \
338 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
339 IEM_MC_ARG(uint16_t, u16Src, 1); \
340 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
341 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
342 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
343 \
344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
345 IEMOP_HLP_DONE_DECODING(); \
346 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
347 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
348 IEM_MC_FETCH_EFLAGS(EFlags); \
349 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
350 \
351 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
352 IEM_MC_COMMIT_EFLAGS(EFlags); \
353 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
354 IEM_MC_END(); \
355 break; \
356 \
357 case IEMMODE_32BIT: \
358 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
359 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
360 IEM_MC_ARG(uint32_t, u32Src, 1); \
361 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
362 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
363 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
364 \
365 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
366 IEMOP_HLP_DONE_DECODING(); \
367 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
368 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
369 IEM_MC_FETCH_EFLAGS(EFlags); \
370 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
371 \
372 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
373 IEM_MC_COMMIT_EFLAGS(EFlags); \
374 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
375 IEM_MC_END(); \
376 break; \
377 \
378 case IEMMODE_64BIT: \
379 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
380 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
381 IEM_MC_ARG(uint64_t, u64Src, 1); \
382 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
383 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
384 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
385 \
386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
387 IEMOP_HLP_DONE_DECODING(); \
388 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
389 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
390 IEM_MC_FETCH_EFLAGS(EFlags); \
391 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
392 \
393 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
394 IEM_MC_COMMIT_EFLAGS(EFlags); \
395 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
396 IEM_MC_END(); \
397 break; \
398 \
399 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
400 } \
401 } \
402 else \
403 { \
404 (void)0
405/* Separate macro to work around parsing issue in IEMAllInstPython.py */
406#define IEMOP_BODY_BINARY_rm_rv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
407 switch (pVCpu->iem.s.enmEffOpSize) \
408 { \
409 case IEMMODE_16BIT: \
410 IEM_MC_BEGIN(3, 3, 0, 0); \
411 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
412 IEM_MC_ARG(uint16_t, u16Src, 1); \
413 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
415 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
416 \
417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
418 IEMOP_HLP_DONE_DECODING(); \
419 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
420 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
421 IEM_MC_FETCH_EFLAGS(EFlags); \
422 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
423 \
424 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
425 IEM_MC_COMMIT_EFLAGS(EFlags); \
426 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
427 IEM_MC_END(); \
428 break; \
429 \
430 case IEMMODE_32BIT: \
431 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
432 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
433 IEM_MC_ARG(uint32_t, u32Src, 1); \
434 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
436 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
437 \
438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
439 IEMOP_HLP_DONE_DECODING(); \
440 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
441 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
442 IEM_MC_FETCH_EFLAGS(EFlags); \
443 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
444 \
445 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo /* CMP,TEST */); \
446 IEM_MC_COMMIT_EFLAGS(EFlags); \
447 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
448 IEM_MC_END(); \
449 break; \
450 \
451 case IEMMODE_64BIT: \
452 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
453 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
454 IEM_MC_ARG(uint64_t, u64Src, 1); \
455 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
456 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
457 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
458 \
459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
460 IEMOP_HLP_DONE_DECODING(); \
461 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
462 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
463 IEM_MC_FETCH_EFLAGS(EFlags); \
464 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
465 \
466 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
467 IEM_MC_COMMIT_EFLAGS(EFlags); \
468 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
469 IEM_MC_END(); \
470 break; \
471 \
472 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
473 } \
474 } \
475 } \
476 (void)0
477
478/**
479 * Body for read-only word/dword/qword instructions like TEST and CMP with
480 * memory/register as the destination.
481 */
482#define IEMOP_BODY_BINARY_rm_rv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
483 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
484 \
485 /* \
486 * If rm is denoting a register, no more instruction bytes. \
487 */ \
488 if (IEM_IS_MODRM_REG_MODE(bRm)) \
489 { \
490 switch (pVCpu->iem.s.enmEffOpSize) \
491 { \
492 case IEMMODE_16BIT: \
493 IEM_MC_BEGIN(3, 0, 0, 0); \
494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
495 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
496 IEM_MC_ARG(uint16_t, u16Src, 1); \
497 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
498 \
499 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
500 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
501 IEM_MC_REF_EFLAGS(pEFlags); \
502 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
503 \
504 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
505 IEM_MC_END(); \
506 break; \
507 \
508 case IEMMODE_32BIT: \
509 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
511 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
512 IEM_MC_ARG(uint32_t, u32Src, 1); \
513 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
514 \
515 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
516 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
517 IEM_MC_REF_EFLAGS(pEFlags); \
518 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
519 \
520 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
521 IEM_MC_END(); \
522 break; \
523 \
524 case IEMMODE_64BIT: \
525 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
527 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
528 IEM_MC_ARG(uint64_t, u64Src, 1); \
529 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
530 \
531 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
532 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
533 IEM_MC_REF_EFLAGS(pEFlags); \
534 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
535 \
536 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
537 IEM_MC_END(); \
538 break; \
539 \
540 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
541 } \
542 } \
543 else \
544 { \
545 /* \
546 * We're accessing memory. \
547 * Note! We're putting the eflags on the stack here so we can commit them \
548 * after the memory. \
549 */ \
550 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
551 { \
552 switch (pVCpu->iem.s.enmEffOpSize) \
553 { \
554 case IEMMODE_16BIT: \
555 IEM_MC_BEGIN(3, 3, 0, 0); \
556 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
557 IEM_MC_ARG(uint16_t, u16Src, 1); \
558 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
560 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
561 \
562 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
563 IEMOP_HLP_DONE_DECODING(); \
564 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
565 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
566 IEM_MC_FETCH_EFLAGS(EFlags); \
567 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
568 \
569 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
570 IEM_MC_COMMIT_EFLAGS(EFlags); \
571 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
572 IEM_MC_END(); \
573 break; \
574 \
575 case IEMMODE_32BIT: \
576 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
577 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
578 IEM_MC_ARG(uint32_t, u32Src, 1); \
579 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
580 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
581 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
582 \
583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
584 IEMOP_HLP_DONE_DECODING(); \
585 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
586 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
587 IEM_MC_FETCH_EFLAGS(EFlags); \
588 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
589 \
590 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
591 IEM_MC_COMMIT_EFLAGS(EFlags); \
592 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
593 IEM_MC_END(); \
594 break; \
595 \
596 case IEMMODE_64BIT: \
597 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
598 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
599 IEM_MC_ARG(uint64_t, u64Src, 1); \
600 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
602 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
603 \
604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
605 IEMOP_HLP_DONE_DECODING(); \
606 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
607 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
608 IEM_MC_FETCH_EFLAGS(EFlags); \
609 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
610 \
611 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
612 IEM_MC_COMMIT_EFLAGS(EFlags); \
613 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
614 IEM_MC_END(); \
615 break; \
616 \
617 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
618 } \
619 } \
620 else \
621 { \
622 IEMOP_HLP_DONE_DECODING(); \
623 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
624 } \
625 } \
626 (void)0
627
628
629/**
630 * Body for instructions like ADD, AND, OR, ++ with working on AL with
631 * a byte immediate.
632 */
633#define IEMOP_BODY_BINARY_AL_Ib(a_fnNormalU8) \
634 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
635 \
636 IEM_MC_BEGIN(3, 0, 0, 0); \
637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
638 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
639 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1); \
640 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
641 \
642 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX); \
643 IEM_MC_REF_EFLAGS(pEFlags); \
644 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
645 \
646 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
647 IEM_MC_END()
648
649/**
650 * Body for instructions like ADD, AND, OR, ++ with working on
651 * AX/EAX/RAX with a word/dword immediate.
652 */
653#define IEMOP_BODY_BINARY_rAX_Iz(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fModifiesDstReg) \
654 switch (pVCpu->iem.s.enmEffOpSize) \
655 { \
656 case IEMMODE_16BIT: \
657 { \
658 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
659 \
660 IEM_MC_BEGIN(3, 0, 0, 0); \
661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
662 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
663 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1); \
664 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
665 \
666 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX); \
667 IEM_MC_REF_EFLAGS(pEFlags); \
668 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
669 \
670 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
671 IEM_MC_END(); \
672 } \
673 \
674 case IEMMODE_32BIT: \
675 { \
676 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
677 \
678 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
680 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
681 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1); \
682 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
683 \
684 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX); \
685 IEM_MC_REF_EFLAGS(pEFlags); \
686 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
687 \
688 if (a_fModifiesDstReg) \
689 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
690 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
691 IEM_MC_END(); \
692 } \
693 \
694 case IEMMODE_64BIT: \
695 { \
696 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
697 \
698 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
700 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
701 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1); \
702 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
703 \
704 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX); \
705 IEM_MC_REF_EFLAGS(pEFlags); \
706 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
707 \
708 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
709 IEM_MC_END(); \
710 } \
711 \
712 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
713 } \
714 (void)0
715
716
717
718/* Instruction specification format - work in progress: */
719
720/**
721 * @opcode 0x00
722 * @opmnemonic add
723 * @op1 rm:Eb
724 * @op2 reg:Gb
725 * @opmaps one
726 * @openc ModR/M
727 * @opflmodify cf,pf,af,zf,sf,of
728 * @ophints harmless ignores_op_sizes
729 * @opstats add_Eb_Gb
730 * @opgroup og_gen_arith_bin
731 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
732 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
733 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
734 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
735 */
736FNIEMOP_DEF(iemOp_add_Eb_Gb)
737{
738 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
739 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_add_u8);
740 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_add_u8_locked);
741}
742
743
744/**
745 * @opcode 0x01
746 * @opgroup og_gen_arith_bin
747 * @opflmodify cf,pf,af,zf,sf,of
748 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
749 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
750 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
751 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
752 */
753FNIEMOP_DEF(iemOp_add_Ev_Gv)
754{
755 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
756 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
757 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
758}
759
760
761/**
762 * @opcode 0x02
763 * @opgroup og_gen_arith_bin
764 * @opflmodify cf,pf,af,zf,sf,of
765 * @opcopytests iemOp_add_Eb_Gb
766 */
767FNIEMOP_DEF(iemOp_add_Gb_Eb)
768{
769 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
770 IEMOP_BODY_BINARY_r8_rm(iemAImpl_add_u8);
771}
772
773
774/**
775 * @opcode 0x03
776 * @opgroup og_gen_arith_bin
777 * @opflmodify cf,pf,af,zf,sf,of
778 * @opcopytests iemOp_add_Ev_Gv
779 */
780FNIEMOP_DEF(iemOp_add_Gv_Ev)
781{
782 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
783 IEMOP_BODY_BINARY_rv_rm(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1, 0);
784}
785
786
787/**
788 * @opcode 0x04
789 * @opgroup og_gen_arith_bin
790 * @opflmodify cf,pf,af,zf,sf,of
791 * @opcopytests iemOp_add_Eb_Gb
792 */
793FNIEMOP_DEF(iemOp_add_Al_Ib)
794{
795 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
796 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_add_u8);
797}
798
799
800/**
801 * @opcode 0x05
802 * @opgroup og_gen_arith_bin
803 * @opflmodify cf,pf,af,zf,sf,of
804 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
805 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
806 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
807 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
808 */
809FNIEMOP_DEF(iemOp_add_eAX_Iz)
810{
811 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
812 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
813}
814
815
816/**
817 * @opcode 0x06
818 * @opgroup og_stack_sreg
819 */
820FNIEMOP_DEF(iemOp_push_ES)
821{
822 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
823 IEMOP_HLP_NO_64BIT();
824 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
825}
826
827
828/**
829 * @opcode 0x07
830 * @opgroup og_stack_sreg
831 */
832FNIEMOP_DEF(iemOp_pop_ES)
833{
834 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
835 IEMOP_HLP_NO_64BIT();
836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
837 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE, iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
838}
839
840
841/**
842 * @opcode 0x08
843 * @opgroup og_gen_arith_bin
844 * @opflmodify cf,pf,af,zf,sf,of
845 * @opflundef af
846 * @opflclear of,cf
847 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
848 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
849 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
850 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
851 */
852FNIEMOP_DEF(iemOp_or_Eb_Gb)
853{
854 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
855 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
856 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_or_u8);
857 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_or_u8_locked);
858}
859
860
861/*
862 * @opcode 0x09
863 * @opgroup og_gen_arith_bin
864 * @opflmodify cf,pf,af,zf,sf,of
865 * @opflundef af
866 * @opflclear of,cf
867 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
868 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
869 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
870 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
871 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
872 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
873 */
874FNIEMOP_DEF(iemOp_or_Ev_Gv)
875{
876 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
877 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
878 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
879 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
880}
881
882
883/**
884 * @opcode 0x0a
885 * @opgroup og_gen_arith_bin
886 * @opflmodify cf,pf,af,zf,sf,of
887 * @opflundef af
888 * @opflclear of,cf
889 * @opcopytests iemOp_or_Eb_Gb
890 */
891FNIEMOP_DEF(iemOp_or_Gb_Eb)
892{
893 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
894 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
895 IEMOP_BODY_BINARY_r8_rm(iemAImpl_or_u8);
896}
897
898
899/**
900 * @opcode 0x0b
901 * @opgroup og_gen_arith_bin
902 * @opflmodify cf,pf,af,zf,sf,of
903 * @opflundef af
904 * @opflclear of,cf
905 * @opcopytests iemOp_or_Ev_Gv
906 */
907FNIEMOP_DEF(iemOp_or_Gv_Ev)
908{
909 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
910 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
911 IEMOP_BODY_BINARY_rv_rm(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1, 0);
912}
913
914
915/**
916 * @opcode 0x0c
917 * @opgroup og_gen_arith_bin
918 * @opflmodify cf,pf,af,zf,sf,of
919 * @opflundef af
920 * @opflclear of,cf
921 * @opcopytests iemOp_or_Eb_Gb
922 */
923FNIEMOP_DEF(iemOp_or_Al_Ib)
924{
925 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
926 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
927 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_or_u8);
928}
929
930
931/**
932 * @opcode 0x0d
933 * @opgroup og_gen_arith_bin
934 * @opflmodify cf,pf,af,zf,sf,of
935 * @opflundef af
936 * @opflclear of,cf
937 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
938 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
939 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
940 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
941 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
942 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
943 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
944 */
945FNIEMOP_DEF(iemOp_or_eAX_Iz)
946{
947 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
948 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
949 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
950}
951
952
953/**
954 * @opcode 0x0e
955 * @opgroup og_stack_sreg
956 */
957FNIEMOP_DEF(iemOp_push_CS)
958{
959 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, 0);
960 IEMOP_HLP_NO_64BIT();
961 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
962}
963
964
965/**
966 * @opcode 0x0f
967 * @opmnemonic EscTwo0f
968 * @openc two0f
969 * @opdisenum OP_2B_ESC
970 * @ophints harmless
971 * @opgroup og_escapes
972 */
973FNIEMOP_DEF(iemOp_2byteEscape)
974{
975#if 0 /// @todo def VBOX_STRICT
976 /* Sanity check the table the first time around. */
977 static bool s_fTested = false;
978 if (RT_LIKELY(s_fTested)) { /* likely */ }
979 else
980 {
981 s_fTested = true;
982 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
983 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
984 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
985 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
986 }
987#endif
988
989 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
990 {
991 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
992 IEMOP_HLP_MIN_286();
993 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
994 }
995 /* @opdone */
996
997 /*
998 * On the 8086 this is a POP CS instruction.
999 * For the time being we don't specify this this.
1000 */
1001 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, IEMOPHINT_SKIP_PYTHON);
1002 IEMOP_HLP_NO_64BIT();
1003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1004 /** @todo eliminate END_TB here */
1005 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_END_TB,
1006 iemCImpl_pop_Sreg, X86_SREG_CS, pVCpu->iem.s.enmEffOpSize);
1007}
1008
1009/**
1010 * @opcode 0x10
1011 * @opgroup og_gen_arith_bin
1012 * @opfltest cf
1013 * @opflmodify cf,pf,af,zf,sf,of
1014 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1015 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1016 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1017 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1018 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1019 */
1020FNIEMOP_DEF(iemOp_adc_Eb_Gb)
1021{
1022 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1023 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_adc_u8);
1024 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_adc_u8_locked);
1025}
1026
1027
1028/**
1029 * @opcode 0x11
1030 * @opgroup og_gen_arith_bin
1031 * @opfltest cf
1032 * @opflmodify cf,pf,af,zf,sf,of
1033 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1034 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1035 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1036 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1037 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1038 */
1039FNIEMOP_DEF(iemOp_adc_Ev_Gv)
1040{
1041 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1042 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
1043 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
1044}
1045
1046
1047/**
1048 * @opcode 0x12
1049 * @opgroup og_gen_arith_bin
1050 * @opfltest cf
1051 * @opflmodify cf,pf,af,zf,sf,of
1052 * @opcopytests iemOp_adc_Eb_Gb
1053 */
1054FNIEMOP_DEF(iemOp_adc_Gb_Eb)
1055{
1056 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1057 IEMOP_BODY_BINARY_r8_rm(iemAImpl_adc_u8);
1058}
1059
1060
1061/**
1062 * @opcode 0x13
1063 * @opgroup og_gen_arith_bin
1064 * @opfltest cf
1065 * @opflmodify cf,pf,af,zf,sf,of
1066 * @opcopytests iemOp_adc_Ev_Gv
1067 */
1068FNIEMOP_DEF(iemOp_adc_Gv_Ev)
1069{
1070 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1071 IEMOP_BODY_BINARY_rv_rm(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1, 0);
1072}
1073
1074
1075/**
1076 * @opcode 0x14
1077 * @opgroup og_gen_arith_bin
1078 * @opfltest cf
1079 * @opflmodify cf,pf,af,zf,sf,of
1080 * @opcopytests iemOp_adc_Eb_Gb
1081 */
1082FNIEMOP_DEF(iemOp_adc_Al_Ib)
1083{
1084 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1085 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_adc_u8);
1086}
1087
1088
1089/**
1090 * @opcode 0x15
1091 * @opgroup og_gen_arith_bin
1092 * @opfltest cf
1093 * @opflmodify cf,pf,af,zf,sf,of
1094 * @opcopytests iemOp_adc_Ev_Gv
1095 */
1096FNIEMOP_DEF(iemOp_adc_eAX_Iz)
1097{
1098 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1099 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
1100}
1101
1102
1103/**
1104 * @opcode 0x16
1105 */
1106FNIEMOP_DEF(iemOp_push_SS)
1107{
1108 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1109 IEMOP_HLP_NO_64BIT();
1110 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
1111}
1112
1113
1114/**
1115 * @opcode 0x17
1116 * @opgroup og_gen_arith_bin
1117 * @opfltest cf
1118 * @opflmodify cf,pf,af,zf,sf,of
1119 */
1120FNIEMOP_DEF(iemOp_pop_SS)
1121{
1122 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
1123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1124 IEMOP_HLP_NO_64BIT();
1125 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_INHIBIT_SHADOW,
1126 iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
1127}
1128
1129
1130/**
1131 * @opcode 0x18
1132 * @opgroup og_gen_arith_bin
1133 * @opfltest cf
1134 * @opflmodify cf,pf,af,zf,sf,of
1135 */
1136FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
1137{
1138 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1139 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_sbb_u8);
1140 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sbb_u8_locked);
1141}
1142
1143
1144/**
1145 * @opcode 0x19
1146 * @opgroup og_gen_arith_bin
1147 * @opfltest cf
1148 * @opflmodify cf,pf,af,zf,sf,of
1149 */
1150FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
1151{
1152 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1153 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
1154 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
1155}
1156
1157
1158/**
1159 * @opcode 0x1a
1160 * @opgroup og_gen_arith_bin
1161 * @opfltest cf
1162 * @opflmodify cf,pf,af,zf,sf,of
1163 */
1164FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
1165{
1166 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1167 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sbb_u8);
1168}
1169
1170
1171/**
1172 * @opcode 0x1b
1173 * @opgroup og_gen_arith_bin
1174 * @opfltest cf
1175 * @opflmodify cf,pf,af,zf,sf,of
1176 */
1177FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
1178{
1179 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1180 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1, 0);
1181}
1182
1183
1184/**
1185 * @opcode 0x1c
1186 * @opgroup og_gen_arith_bin
1187 * @opfltest cf
1188 * @opflmodify cf,pf,af,zf,sf,of
1189 */
1190FNIEMOP_DEF(iemOp_sbb_Al_Ib)
1191{
1192 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1193 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sbb_u8);
1194}
1195
1196
1197/**
1198 * @opcode 0x1d
1199 * @opgroup og_gen_arith_bin
1200 * @opfltest cf
1201 * @opflmodify cf,pf,af,zf,sf,of
1202 */
1203FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
1204{
1205 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1206 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
1207}
1208
1209
1210/**
1211 * @opcode 0x1e
1212 * @opgroup og_stack_sreg
1213 */
1214FNIEMOP_DEF(iemOp_push_DS)
1215{
1216 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1217 IEMOP_HLP_NO_64BIT();
1218 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
1219}
1220
1221
1222/**
1223 * @opcode 0x1f
1224 * @opgroup og_stack_sreg
1225 */
1226FNIEMOP_DEF(iemOp_pop_DS)
1227{
1228 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1230 IEMOP_HLP_NO_64BIT();
1231 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE, iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
1232}
1233
1234
1235/**
1236 * @opcode 0x20
1237 * @opgroup og_gen_arith_bin
1238 * @opflmodify cf,pf,af,zf,sf,of
1239 * @opflundef af
1240 * @opflclear of,cf
1241 */
1242FNIEMOP_DEF(iemOp_and_Eb_Gb)
1243{
1244 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1245 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1246 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_and_u8);
1247 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_and_u8_locked);
1248}
1249
1250
1251/**
1252 * @opcode 0x21
1253 * @opgroup og_gen_arith_bin
1254 * @opflmodify cf,pf,af,zf,sf,of
1255 * @opflundef af
1256 * @opflclear of,cf
1257 */
1258FNIEMOP_DEF(iemOp_and_Ev_Gv)
1259{
1260 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1261 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1262 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
1263 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
1264}
1265
1266
1267/**
1268 * @opcode 0x22
1269 * @opgroup og_gen_arith_bin
1270 * @opflmodify cf,pf,af,zf,sf,of
1271 * @opflundef af
1272 * @opflclear of,cf
1273 */
1274FNIEMOP_DEF(iemOp_and_Gb_Eb)
1275{
1276 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1277 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1278 IEMOP_BODY_BINARY_r8_rm(iemAImpl_and_u8);
1279}
1280
1281
1282/**
1283 * @opcode 0x23
1284 * @opgroup og_gen_arith_bin
1285 * @opflmodify cf,pf,af,zf,sf,of
1286 * @opflundef af
1287 * @opflclear of,cf
1288 */
1289FNIEMOP_DEF(iemOp_and_Gv_Ev)
1290{
1291 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1292 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1293 IEMOP_BODY_BINARY_rv_rm(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1, 0);
1294}
1295
1296
1297/**
1298 * @opcode 0x24
1299 * @opgroup og_gen_arith_bin
1300 * @opflmodify cf,pf,af,zf,sf,of
1301 * @opflundef af
1302 * @opflclear of,cf
1303 */
1304FNIEMOP_DEF(iemOp_and_Al_Ib)
1305{
1306 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
1307 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1308 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_and_u8);
1309}
1310
1311
1312/**
1313 * @opcode 0x25
1314 * @opgroup og_gen_arith_bin
1315 * @opflmodify cf,pf,af,zf,sf,of
1316 * @opflundef af
1317 * @opflclear of,cf
1318 */
1319FNIEMOP_DEF(iemOp_and_eAX_Iz)
1320{
1321 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1322 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1323 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1324}
1325
1326
1327/**
1328 * @opcode 0x26
1329 * @opmnemonic SEG
1330 * @op1 ES
1331 * @opgroup og_prefix
1332 * @openc prefix
1333 * @opdisenum OP_SEG
1334 * @ophints harmless
1335 */
1336FNIEMOP_DEF(iemOp_seg_ES)
1337{
1338 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
1339 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
1340 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
1341
1342 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1343 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1344}
1345
1346
1347/**
1348 * @opcode 0x27
1349 * @opfltest af,cf
1350 * @opflmodify cf,pf,af,zf,sf,of
1351 * @opflundef of
1352 */
1353FNIEMOP_DEF(iemOp_daa)
1354{
1355 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1356 IEMOP_HLP_NO_64BIT();
1357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1358 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1359 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_daa);
1360}
1361
1362
1363/**
1364 * @opcode 0x28
1365 * @opgroup og_gen_arith_bin
1366 * @opflmodify cf,pf,af,zf,sf,of
1367 */
1368FNIEMOP_DEF(iemOp_sub_Eb_Gb)
1369{
1370 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1371 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_sub_u8);
1372 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sub_u8_locked);
1373}
1374
1375
1376/**
1377 * @opcode 0x29
1378 * @opgroup og_gen_arith_bin
1379 * @opflmodify cf,pf,af,zf,sf,of
1380 */
1381FNIEMOP_DEF(iemOp_sub_Ev_Gv)
1382{
1383 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1384 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
1385 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
1386}
1387
1388
1389/**
1390 * @opcode 0x2a
1391 * @opgroup og_gen_arith_bin
1392 * @opflmodify cf,pf,af,zf,sf,of
1393 */
1394FNIEMOP_DEF(iemOp_sub_Gb_Eb)
1395{
1396 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1397 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sub_u8);
1398}
1399
1400
1401/**
1402 * @opcode 0x2b
1403 * @opgroup og_gen_arith_bin
1404 * @opflmodify cf,pf,af,zf,sf,of
1405 */
1406FNIEMOP_DEF(iemOp_sub_Gv_Ev)
1407{
1408 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1409 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1, 0);
1410}
1411
1412
1413/**
1414 * @opcode 0x2c
1415 * @opgroup og_gen_arith_bin
1416 * @opflmodify cf,pf,af,zf,sf,of
1417 */
1418FNIEMOP_DEF(iemOp_sub_Al_Ib)
1419{
1420 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1421 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sub_u8);
1422}
1423
1424
1425/**
1426 * @opcode 0x2d
1427 * @opgroup og_gen_arith_bin
1428 * @opflmodify cf,pf,af,zf,sf,of
1429 */
1430FNIEMOP_DEF(iemOp_sub_eAX_Iz)
1431{
1432 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1433 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1434}
1435
1436
1437/**
1438 * @opcode 0x2e
1439 * @opmnemonic SEG
1440 * @op1 CS
1441 * @opgroup og_prefix
1442 * @openc prefix
1443 * @opdisenum OP_SEG
1444 * @ophints harmless
1445 */
1446FNIEMOP_DEF(iemOp_seg_CS)
1447{
1448 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
1449 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
1450 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
1451
1452 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1453 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1454}
1455
1456
1457/**
1458 * @opcode 0x2f
1459 * @opfltest af,cf
1460 * @opflmodify cf,pf,af,zf,sf,of
1461 * @opflundef of
1462 */
1463FNIEMOP_DEF(iemOp_das)
1464{
1465 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1466 IEMOP_HLP_NO_64BIT();
1467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1468 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1469 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_das);
1470}
1471
1472
1473/**
1474 * @opcode 0x30
1475 * @opgroup og_gen_arith_bin
1476 * @opflmodify cf,pf,af,zf,sf,of
1477 * @opflundef af
1478 * @opflclear of,cf
1479 */
1480FNIEMOP_DEF(iemOp_xor_Eb_Gb)
1481{
1482 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1483 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1484 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_xor_u8);
1485 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_xor_u8_locked);
1486}
1487
1488
1489/**
1490 * @opcode 0x31
1491 * @opgroup og_gen_arith_bin
1492 * @opflmodify cf,pf,af,zf,sf,of
1493 * @opflundef af
1494 * @opflclear of,cf
1495 */
1496FNIEMOP_DEF(iemOp_xor_Ev_Gv)
1497{
1498 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1499 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1500 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
1501 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
1502}
1503
1504
1505/**
1506 * @opcode 0x32
1507 * @opgroup og_gen_arith_bin
1508 * @opflmodify cf,pf,af,zf,sf,of
1509 * @opflundef af
1510 * @opflclear of,cf
1511 */
1512FNIEMOP_DEF(iemOp_xor_Gb_Eb)
1513{
1514 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1515 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1516 IEMOP_BODY_BINARY_r8_rm(iemAImpl_xor_u8);
1517}
1518
1519
1520/**
1521 * @opcode 0x33
1522 * @opgroup og_gen_arith_bin
1523 * @opflmodify cf,pf,af,zf,sf,of
1524 * @opflundef af
1525 * @opflclear of,cf
1526 */
1527FNIEMOP_DEF(iemOp_xor_Gv_Ev)
1528{
1529 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1530 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1531 IEMOP_BODY_BINARY_rv_rm(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1, 0);
1532}
1533
1534
1535/**
1536 * @opcode 0x34
1537 * @opgroup og_gen_arith_bin
1538 * @opflmodify cf,pf,af,zf,sf,of
1539 * @opflundef af
1540 * @opflclear of,cf
1541 */
1542FNIEMOP_DEF(iemOp_xor_Al_Ib)
1543{
1544 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1545 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1546 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_xor_u8);
1547}
1548
1549
1550/**
1551 * @opcode 0x35
1552 * @opgroup og_gen_arith_bin
1553 * @opflmodify cf,pf,af,zf,sf,of
1554 * @opflundef af
1555 * @opflclear of,cf
1556 */
1557FNIEMOP_DEF(iemOp_xor_eAX_Iz)
1558{
1559 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1560 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1561 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1562}
1563
1564
1565/**
1566 * @opcode 0x36
1567 * @opmnemonic SEG
1568 * @op1 SS
1569 * @opgroup og_prefix
1570 * @openc prefix
1571 * @opdisenum OP_SEG
1572 * @ophints harmless
1573 */
1574FNIEMOP_DEF(iemOp_seg_SS)
1575{
1576 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
1577 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
1578 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
1579
1580 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1581 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1582}
1583
1584
1585/**
1586 * @opcode 0x37
1587 * @opfltest af,cf
1588 * @opflmodify cf,pf,af,zf,sf,of
1589 * @opflundef pf,zf,sf,of
1590 * @opgroup og_gen_arith_dec
1591 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
1592 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
1593 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1594 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1595 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1596 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
1597 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
1598 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
1599 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
1600 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
1601 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
1602 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
1603 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
1604 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
1605 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
1606 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1607 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1608 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1609 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1610 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
1611 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
1612 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
1613 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
1614 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
1615 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
1616 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1617 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
1618 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
1619 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
1620 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
1621 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
1622 */
1623FNIEMOP_DEF(iemOp_aaa)
1624{
1625 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1626 IEMOP_HLP_NO_64BIT();
1627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1628 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1629
1630 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aaa);
1631}
1632
1633
1634/**
1635 * @opcode 0x38
1636 */
1637FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
1638{
1639 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
1640 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_cmp_u8);
1641 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
1642}
1643
1644
1645/**
1646 * @opcode 0x39
1647 */
1648FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
1649{
1650 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
1651 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
1652}
1653
1654
1655/**
1656 * @opcode 0x3a
1657 */
1658FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
1659{
1660 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
1661 IEMOP_BODY_BINARY_r8_rm(iemAImpl_cmp_u8);
1662}
1663
1664
1665/**
1666 * @opcode 0x3b
1667 */
1668FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
1669{
1670 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
1671 IEMOP_BODY_BINARY_rv_rm(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0, 0);
1672}
1673
1674
1675/**
1676 * @opcode 0x3c
1677 */
1678FNIEMOP_DEF(iemOp_cmp_Al_Ib)
1679{
1680 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
1681 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_cmp_u8);
1682}
1683
1684
1685/**
1686 * @opcode 0x3d
1687 */
1688FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1689{
1690 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1691 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1692}
1693
1694
1695/**
1696 * @opcode 0x3e
1697 */
1698FNIEMOP_DEF(iemOp_seg_DS)
1699{
1700 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1701 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1702 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1703
1704 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1705 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1706}
1707
1708
1709/**
1710 * @opcode 0x3f
1711 * @opfltest af,cf
1712 * @opflmodify cf,pf,af,zf,sf,of
1713 * @opflundef pf,zf,sf,of
1714 * @opgroup og_gen_arith_dec
1715 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1716 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1717 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1718 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1719 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1720 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1721 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1722 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1723 * @optest amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1724 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1725 * @optest amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1726 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1727 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1728 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1729 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1730 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1731 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1732 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1733 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1734 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1735 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
1736 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1737 * @optest amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
1738 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1739 * @optest amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
1740 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1741 * @optest amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
1742 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1743 * @optest amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
1744 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1745 * @optest amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
1746 * @optest intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1747 * @optest amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1748 * @optest intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1749 * @optest amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1750 */
1751FNIEMOP_DEF(iemOp_aas)
1752{
1753 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1754 IEMOP_HLP_NO_64BIT();
1755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1756 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1757
1758 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aas);
1759}
1760
1761
1762/**
1763 * Common 'inc/dec register' helper.
1764 *
1765 * Not for 64-bit code, only for what became the rex prefixes.
1766 */
1767#define IEMOP_BODY_UNARY_GReg(a_fnNormalU16, a_fnNormalU32, a_iReg) \
1768 switch (pVCpu->iem.s.enmEffOpSize) \
1769 { \
1770 case IEMMODE_16BIT: \
1771 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_64BIT, 0); \
1772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1773 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
1774 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1775 IEM_MC_REF_GREG_U16(pu16Dst, a_iReg); \
1776 IEM_MC_REF_EFLAGS(pEFlags); \
1777 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
1778 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1779 IEM_MC_END(); \
1780 break; \
1781 \
1782 case IEMMODE_32BIT: \
1783 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0); \
1784 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1785 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
1786 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1787 IEM_MC_REF_GREG_U32(pu32Dst, a_iReg); \
1788 IEM_MC_REF_EFLAGS(pEFlags); \
1789 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
1790 IEM_MC_CLEAR_HIGH_GREG_U64(a_iReg); \
1791 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1792 IEM_MC_END(); \
1793 break; \
1794 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1795 } \
1796 (void)0
1797
1798/**
1799 * @opcode 0x40
1800 */
1801FNIEMOP_DEF(iemOp_inc_eAX)
1802{
1803 /*
1804 * This is a REX prefix in 64-bit mode.
1805 */
1806 if (IEM_IS_64BIT_CODE(pVCpu))
1807 {
1808 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1809 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1810
1811 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1812 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1813 }
1814
1815 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1816 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xAX);
1817}
1818
1819
1820/**
1821 * @opcode 0x41
1822 */
1823FNIEMOP_DEF(iemOp_inc_eCX)
1824{
1825 /*
1826 * This is a REX prefix in 64-bit mode.
1827 */
1828 if (IEM_IS_64BIT_CODE(pVCpu))
1829 {
1830 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1831 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1832 pVCpu->iem.s.uRexB = 1 << 3;
1833
1834 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1835 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1836 }
1837
1838 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1839 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xCX);
1840}
1841
1842
1843/**
1844 * @opcode 0x42
1845 */
1846FNIEMOP_DEF(iemOp_inc_eDX)
1847{
1848 /*
1849 * This is a REX prefix in 64-bit mode.
1850 */
1851 if (IEM_IS_64BIT_CODE(pVCpu))
1852 {
1853 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1854 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1855 pVCpu->iem.s.uRexIndex = 1 << 3;
1856
1857 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1858 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1859 }
1860
1861 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1862 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDX);
1863}
1864
1865
1866
1867/**
1868 * @opcode 0x43
1869 */
1870FNIEMOP_DEF(iemOp_inc_eBX)
1871{
1872 /*
1873 * This is a REX prefix in 64-bit mode.
1874 */
1875 if (IEM_IS_64BIT_CODE(pVCpu))
1876 {
1877 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1878 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1879 pVCpu->iem.s.uRexB = 1 << 3;
1880 pVCpu->iem.s.uRexIndex = 1 << 3;
1881
1882 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1883 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1884 }
1885
1886 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1887 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBX);
1888}
1889
1890
1891/**
1892 * @opcode 0x44
1893 */
1894FNIEMOP_DEF(iemOp_inc_eSP)
1895{
1896 /*
1897 * This is a REX prefix in 64-bit mode.
1898 */
1899 if (IEM_IS_64BIT_CODE(pVCpu))
1900 {
1901 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1902 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1903 pVCpu->iem.s.uRexReg = 1 << 3;
1904
1905 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1906 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1907 }
1908
1909 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1910 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSP);
1911}
1912
1913
1914/**
1915 * @opcode 0x45
1916 */
1917FNIEMOP_DEF(iemOp_inc_eBP)
1918{
1919 /*
1920 * This is a REX prefix in 64-bit mode.
1921 */
1922 if (IEM_IS_64BIT_CODE(pVCpu))
1923 {
1924 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1925 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1926 pVCpu->iem.s.uRexReg = 1 << 3;
1927 pVCpu->iem.s.uRexB = 1 << 3;
1928
1929 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1930 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1931 }
1932
1933 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1934 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBP);
1935}
1936
1937
1938/**
1939 * @opcode 0x46
1940 */
1941FNIEMOP_DEF(iemOp_inc_eSI)
1942{
1943 /*
1944 * This is a REX prefix in 64-bit mode.
1945 */
1946 if (IEM_IS_64BIT_CODE(pVCpu))
1947 {
1948 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1949 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1950 pVCpu->iem.s.uRexReg = 1 << 3;
1951 pVCpu->iem.s.uRexIndex = 1 << 3;
1952
1953 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1954 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1955 }
1956
1957 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1958 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSI);
1959}
1960
1961
1962/**
1963 * @opcode 0x47
1964 */
1965FNIEMOP_DEF(iemOp_inc_eDI)
1966{
1967 /*
1968 * This is a REX prefix in 64-bit mode.
1969 */
1970 if (IEM_IS_64BIT_CODE(pVCpu))
1971 {
1972 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1973 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1974 pVCpu->iem.s.uRexReg = 1 << 3;
1975 pVCpu->iem.s.uRexB = 1 << 3;
1976 pVCpu->iem.s.uRexIndex = 1 << 3;
1977
1978 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1979 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1980 }
1981
1982 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1983 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDI);
1984}
1985
1986
1987/**
1988 * @opcode 0x48
1989 */
1990FNIEMOP_DEF(iemOp_dec_eAX)
1991{
1992 /*
1993 * This is a REX prefix in 64-bit mode.
1994 */
1995 if (IEM_IS_64BIT_CODE(pVCpu))
1996 {
1997 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
1998 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
1999 iemRecalEffOpSize(pVCpu);
2000
2001 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2002 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2003 }
2004
2005 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
2006 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xAX);
2007}
2008
2009
2010/**
2011 * @opcode 0x49
2012 */
2013FNIEMOP_DEF(iemOp_dec_eCX)
2014{
2015 /*
2016 * This is a REX prefix in 64-bit mode.
2017 */
2018 if (IEM_IS_64BIT_CODE(pVCpu))
2019 {
2020 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
2021 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2022 pVCpu->iem.s.uRexB = 1 << 3;
2023 iemRecalEffOpSize(pVCpu);
2024
2025 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2026 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2027 }
2028
2029 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
2030 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xCX);
2031}
2032
2033
2034/**
2035 * @opcode 0x4a
2036 */
2037FNIEMOP_DEF(iemOp_dec_eDX)
2038{
2039 /*
2040 * This is a REX prefix in 64-bit mode.
2041 */
2042 if (IEM_IS_64BIT_CODE(pVCpu))
2043 {
2044 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
2045 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2046 pVCpu->iem.s.uRexIndex = 1 << 3;
2047 iemRecalEffOpSize(pVCpu);
2048
2049 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2050 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2051 }
2052
2053 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
2054 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDX);
2055}
2056
2057
2058/**
2059 * @opcode 0x4b
2060 */
2061FNIEMOP_DEF(iemOp_dec_eBX)
2062{
2063 /*
2064 * This is a REX prefix in 64-bit mode.
2065 */
2066 if (IEM_IS_64BIT_CODE(pVCpu))
2067 {
2068 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
2069 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2070 pVCpu->iem.s.uRexB = 1 << 3;
2071 pVCpu->iem.s.uRexIndex = 1 << 3;
2072 iemRecalEffOpSize(pVCpu);
2073
2074 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2075 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2076 }
2077
2078 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
2079 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBX);
2080}
2081
2082
2083/**
2084 * @opcode 0x4c
2085 */
2086FNIEMOP_DEF(iemOp_dec_eSP)
2087{
2088 /*
2089 * This is a REX prefix in 64-bit mode.
2090 */
2091 if (IEM_IS_64BIT_CODE(pVCpu))
2092 {
2093 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
2094 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
2095 pVCpu->iem.s.uRexReg = 1 << 3;
2096 iemRecalEffOpSize(pVCpu);
2097
2098 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2099 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2100 }
2101
2102 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
2103 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSP);
2104}
2105
2106
2107/**
2108 * @opcode 0x4d
2109 */
2110FNIEMOP_DEF(iemOp_dec_eBP)
2111{
2112 /*
2113 * This is a REX prefix in 64-bit mode.
2114 */
2115 if (IEM_IS_64BIT_CODE(pVCpu))
2116 {
2117 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
2118 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2119 pVCpu->iem.s.uRexReg = 1 << 3;
2120 pVCpu->iem.s.uRexB = 1 << 3;
2121 iemRecalEffOpSize(pVCpu);
2122
2123 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2124 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2125 }
2126
2127 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
2128 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBP);
2129}
2130
2131
2132/**
2133 * @opcode 0x4e
2134 */
2135FNIEMOP_DEF(iemOp_dec_eSI)
2136{
2137 /*
2138 * This is a REX prefix in 64-bit mode.
2139 */
2140 if (IEM_IS_64BIT_CODE(pVCpu))
2141 {
2142 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
2143 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2144 pVCpu->iem.s.uRexReg = 1 << 3;
2145 pVCpu->iem.s.uRexIndex = 1 << 3;
2146 iemRecalEffOpSize(pVCpu);
2147
2148 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2149 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2150 }
2151
2152 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
2153 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSI);
2154}
2155
2156
2157/**
2158 * @opcode 0x4f
2159 */
2160FNIEMOP_DEF(iemOp_dec_eDI)
2161{
2162 /*
2163 * This is a REX prefix in 64-bit mode.
2164 */
2165 if (IEM_IS_64BIT_CODE(pVCpu))
2166 {
2167 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
2168 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2169 pVCpu->iem.s.uRexReg = 1 << 3;
2170 pVCpu->iem.s.uRexB = 1 << 3;
2171 pVCpu->iem.s.uRexIndex = 1 << 3;
2172 iemRecalEffOpSize(pVCpu);
2173
2174 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2175 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2176 }
2177
2178 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
2179 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDI);
2180}
2181
2182
2183/**
2184 * Common 'push register' helper.
2185 */
2186FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
2187{
2188 if (IEM_IS_64BIT_CODE(pVCpu))
2189 {
2190 iReg |= pVCpu->iem.s.uRexB;
2191 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2192 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2193 }
2194
2195 switch (pVCpu->iem.s.enmEffOpSize)
2196 {
2197 case IEMMODE_16BIT:
2198 IEM_MC_BEGIN(0, 1, 0, 0);
2199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2200 IEM_MC_LOCAL(uint16_t, u16Value);
2201 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
2202 IEM_MC_PUSH_U16(u16Value);
2203 IEM_MC_ADVANCE_RIP_AND_FINISH();
2204 IEM_MC_END();
2205 break;
2206
2207 case IEMMODE_32BIT:
2208 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2210 IEM_MC_LOCAL(uint32_t, u32Value);
2211 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
2212 IEM_MC_PUSH_U32(u32Value);
2213 IEM_MC_ADVANCE_RIP_AND_FINISH();
2214 IEM_MC_END();
2215 break;
2216
2217 case IEMMODE_64BIT:
2218 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2220 IEM_MC_LOCAL(uint64_t, u64Value);
2221 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
2222 IEM_MC_PUSH_U64(u64Value);
2223 IEM_MC_ADVANCE_RIP_AND_FINISH();
2224 IEM_MC_END();
2225 break;
2226
2227 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2228 }
2229}
2230
2231
2232/**
2233 * @opcode 0x50
2234 */
2235FNIEMOP_DEF(iemOp_push_eAX)
2236{
2237 IEMOP_MNEMONIC(push_rAX, "push rAX");
2238 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
2239}
2240
2241
2242/**
2243 * @opcode 0x51
2244 */
2245FNIEMOP_DEF(iemOp_push_eCX)
2246{
2247 IEMOP_MNEMONIC(push_rCX, "push rCX");
2248 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
2249}
2250
2251
2252/**
2253 * @opcode 0x52
2254 */
2255FNIEMOP_DEF(iemOp_push_eDX)
2256{
2257 IEMOP_MNEMONIC(push_rDX, "push rDX");
2258 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
2259}
2260
2261
2262/**
2263 * @opcode 0x53
2264 */
2265FNIEMOP_DEF(iemOp_push_eBX)
2266{
2267 IEMOP_MNEMONIC(push_rBX, "push rBX");
2268 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
2269}
2270
2271
2272/**
2273 * @opcode 0x54
2274 */
2275FNIEMOP_DEF(iemOp_push_eSP)
2276{
2277 IEMOP_MNEMONIC(push_rSP, "push rSP");
2278 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
2279 {
2280 IEM_MC_BEGIN(0, 1, IEM_MC_F_ONLY_8086, 0);
2281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2282 IEM_MC_LOCAL(uint16_t, u16Value);
2283 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
2284 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
2285 IEM_MC_PUSH_U16(u16Value);
2286 IEM_MC_ADVANCE_RIP_AND_FINISH();
2287 IEM_MC_END();
2288 }
2289 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
2290}
2291
2292
2293/**
2294 * @opcode 0x55
2295 */
2296FNIEMOP_DEF(iemOp_push_eBP)
2297{
2298 IEMOP_MNEMONIC(push_rBP, "push rBP");
2299 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
2300}
2301
2302
2303/**
2304 * @opcode 0x56
2305 */
2306FNIEMOP_DEF(iemOp_push_eSI)
2307{
2308 IEMOP_MNEMONIC(push_rSI, "push rSI");
2309 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
2310}
2311
2312
2313/**
2314 * @opcode 0x57
2315 */
2316FNIEMOP_DEF(iemOp_push_eDI)
2317{
2318 IEMOP_MNEMONIC(push_rDI, "push rDI");
2319 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
2320}
2321
2322
2323/**
2324 * Common 'pop register' helper.
2325 */
2326FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
2327{
2328 if (IEM_IS_64BIT_CODE(pVCpu))
2329 {
2330 iReg |= pVCpu->iem.s.uRexB;
2331 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2332 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2333 }
2334
2335 switch (pVCpu->iem.s.enmEffOpSize)
2336 {
2337 case IEMMODE_16BIT:
2338 IEM_MC_BEGIN(0, 1, 0, 0);
2339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2340 IEM_MC_LOCAL(uint16_t *, pu16Dst);
2341 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
2342 IEM_MC_POP_U16(pu16Dst);
2343 IEM_MC_ADVANCE_RIP_AND_FINISH();
2344 IEM_MC_END();
2345 break;
2346
2347 case IEMMODE_32BIT:
2348 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2350 IEM_MC_LOCAL(uint32_t *, pu32Dst);
2351 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
2352 IEM_MC_POP_U32(pu32Dst);
2353 IEM_MC_CLEAR_HIGH_GREG_U64(iReg); /** @todo testcase*/
2354 IEM_MC_ADVANCE_RIP_AND_FINISH();
2355 IEM_MC_END();
2356 break;
2357
2358 case IEMMODE_64BIT:
2359 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2361 IEM_MC_LOCAL(uint64_t *, pu64Dst);
2362 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
2363 IEM_MC_POP_U64(pu64Dst);
2364 IEM_MC_ADVANCE_RIP_AND_FINISH();
2365 IEM_MC_END();
2366 break;
2367
2368 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2369 }
2370}
2371
2372
2373/**
2374 * @opcode 0x58
2375 */
2376FNIEMOP_DEF(iemOp_pop_eAX)
2377{
2378 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
2379 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
2380}
2381
2382
2383/**
2384 * @opcode 0x59
2385 */
2386FNIEMOP_DEF(iemOp_pop_eCX)
2387{
2388 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
2389 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
2390}
2391
2392
2393/**
2394 * @opcode 0x5a
2395 */
2396FNIEMOP_DEF(iemOp_pop_eDX)
2397{
2398 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
2399 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
2400}
2401
2402
2403/**
2404 * @opcode 0x5b
2405 */
2406FNIEMOP_DEF(iemOp_pop_eBX)
2407{
2408 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
2409 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
2410}
2411
2412
2413/**
2414 * @opcode 0x5c
2415 */
2416FNIEMOP_DEF(iemOp_pop_eSP)
2417{
2418 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
2419 if (IEM_IS_64BIT_CODE(pVCpu))
2420 {
2421 if (pVCpu->iem.s.uRexB)
2422 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
2423 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2424 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2425 }
2426
2427 /** @todo add testcase for this instruction. */
2428 switch (pVCpu->iem.s.enmEffOpSize)
2429 {
2430 case IEMMODE_16BIT:
2431 IEM_MC_BEGIN(0, 1, 0, 0);
2432 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2433 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2434 IEM_MC_LOCAL(uint16_t, u16Dst);
2435 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
2436 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
2437 IEM_MC_ADVANCE_RIP_AND_FINISH();
2438 IEM_MC_END();
2439 break;
2440
2441 case IEMMODE_32BIT:
2442 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
2443 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2444 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2445 IEM_MC_LOCAL(uint32_t, u32Dst);
2446 IEM_MC_POP_U32(&u32Dst);
2447 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
2448 IEM_MC_ADVANCE_RIP_AND_FINISH();
2449 IEM_MC_END();
2450 break;
2451
2452 case IEMMODE_64BIT:
2453 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2454 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2455 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2456 IEM_MC_LOCAL(uint64_t, u64Dst);
2457 IEM_MC_POP_U64(&u64Dst);
2458 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
2459 IEM_MC_ADVANCE_RIP_AND_FINISH();
2460 IEM_MC_END();
2461 break;
2462
2463 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2464 }
2465}
2466
2467
2468/**
2469 * @opcode 0x5d
2470 */
2471FNIEMOP_DEF(iemOp_pop_eBP)
2472{
2473 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
2474 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
2475}
2476
2477
2478/**
2479 * @opcode 0x5e
2480 */
2481FNIEMOP_DEF(iemOp_pop_eSI)
2482{
2483 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
2484 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
2485}
2486
2487
2488/**
2489 * @opcode 0x5f
2490 */
2491FNIEMOP_DEF(iemOp_pop_eDI)
2492{
2493 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
2494 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
2495}
2496
2497
2498/**
2499 * @opcode 0x60
2500 */
2501FNIEMOP_DEF(iemOp_pusha)
2502{
2503 IEMOP_MNEMONIC(pusha, "pusha");
2504 IEMOP_HLP_MIN_186();
2505 IEMOP_HLP_NO_64BIT();
2506 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2507 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_pusha_16);
2508 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2509 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_pusha_32);
2510}
2511
2512
2513/**
2514 * @opcode 0x61
2515 */
2516FNIEMOP_DEF(iemOp_popa__mvex)
2517{
2518 if (!IEM_IS_64BIT_CODE(pVCpu))
2519 {
2520 IEMOP_MNEMONIC(popa, "popa");
2521 IEMOP_HLP_MIN_186();
2522 IEMOP_HLP_NO_64BIT();
2523 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2524 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_popa_16);
2525 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2526 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_popa_32);
2527 }
2528 IEMOP_MNEMONIC(mvex, "mvex");
2529 Log(("mvex prefix is not supported!\n"));
2530 IEMOP_RAISE_INVALID_OPCODE_RET();
2531}
2532
2533
2534/**
2535 * @opcode 0x62
2536 * @opmnemonic bound
2537 * @op1 Gv_RO
2538 * @op2 Ma
2539 * @opmincpu 80186
2540 * @ophints harmless x86_invalid_64
2541 * @optest op1=0 op2=0 ->
2542 * @optest op1=1 op2=0 -> value.xcpt=5
2543 * @optest o16 / op1=0xffff op2=0x0000fffe ->
2544 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
2545 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
2546 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
2547 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
2548 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
2549 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
2550 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
2551 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
2552 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
2553 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
2554 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
2555 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
2556 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
2557 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
2558 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
2559 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
2560 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
2561 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
2562 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
2563 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
2564 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
2565 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
2566 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
2567 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
2568 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
2569 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
2570 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
2571 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
2572 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
2573 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
2574 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
2575 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
2576 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
2577 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
2578 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
2579 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
2580 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
2581 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
2582 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
2583 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
2584 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
2585 */
2586FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
2587{
2588 /* The BOUND instruction is invalid 64-bit mode. In legacy and
2589 compatability mode it is invalid with MOD=3.
2590
2591 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
2592 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
2593 given as R and X without an exact description, so we assume it builds on
2594 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
2595 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
2596 uint8_t bRm;
2597 if (!IEM_IS_64BIT_CODE(pVCpu))
2598 {
2599 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2600 IEMOP_HLP_MIN_186();
2601 IEM_OPCODE_GET_NEXT_U8(&bRm);
2602 if (IEM_IS_MODRM_MEM_MODE(bRm))
2603 {
2604 /** @todo testcase: check that there are two memory accesses involved. Check
2605 * whether they're both read before the \#BR triggers. */
2606 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2607 {
2608 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186 | IEM_MC_F_NOT_64BIT, 0);
2609 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2610 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
2611 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
2612 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2613
2614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2616
2617 IEM_MC_FETCH_GREG_U16(u16Index, IEM_GET_MODRM_REG_8(bRm));
2618 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2619 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
2620
2621 IEM_MC_CALL_CIMPL_3(0, iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
2622 IEM_MC_END();
2623 }
2624 else /* 32-bit operands */
2625 {
2626 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2627 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2628 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
2629 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
2630 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2631
2632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2634
2635 IEM_MC_FETCH_GREG_U32(u32Index, IEM_GET_MODRM_REG_8(bRm));
2636 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2637 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
2638
2639 IEM_MC_CALL_CIMPL_3(0, iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
2640 IEM_MC_END();
2641 }
2642 }
2643
2644 /*
2645 * @opdone
2646 */
2647 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2648 {
2649 /* Note that there is no need for the CPU to fetch further bytes
2650 here because MODRM.MOD == 3. */
2651 Log(("evex not supported by the guest CPU!\n"));
2652 IEMOP_RAISE_INVALID_OPCODE_RET();
2653 }
2654 }
2655 else
2656 {
2657 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
2658 * does modr/m read, whereas AMD probably doesn't... */
2659 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2660 {
2661 Log(("evex not supported by the guest CPU!\n"));
2662 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
2663 }
2664 IEM_OPCODE_GET_NEXT_U8(&bRm);
2665 }
2666
2667 IEMOP_MNEMONIC(evex, "evex");
2668 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
2669 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
2670 Log(("evex prefix is not implemented!\n"));
2671 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2672}
2673
2674
2675/** Opcode 0x63 - non-64-bit modes. */
2676FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
2677{
2678 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
2679 IEMOP_HLP_MIN_286();
2680 IEMOP_HLP_NO_REAL_OR_V86_MODE();
2681 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2682
2683 if (IEM_IS_MODRM_REG_MODE(bRm))
2684 {
2685 /* Register */
2686 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2687 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2688 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2689 IEM_MC_ARG(uint16_t, u16Src, 1);
2690 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2691
2692 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2693 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM_8(bRm));
2694 IEM_MC_REF_EFLAGS(pEFlags);
2695 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2696
2697 IEM_MC_ADVANCE_RIP_AND_FINISH();
2698 IEM_MC_END();
2699 }
2700 else
2701 {
2702 /* Memory */
2703 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2704 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2705 IEM_MC_ARG(uint16_t, u16Src, 1);
2706 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2707 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2708 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
2709
2710 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2711 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2712 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2713 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2714 IEM_MC_FETCH_EFLAGS(EFlags);
2715 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2716
2717 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
2718 IEM_MC_COMMIT_EFLAGS(EFlags);
2719 IEM_MC_ADVANCE_RIP_AND_FINISH();
2720 IEM_MC_END();
2721 }
2722}
2723
2724
2725/**
2726 * @opcode 0x63
2727 *
2728 * @note This is a weird one. It works like a regular move instruction if
2729 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2730 * @todo This definitely needs a testcase to verify the odd cases. */
2731FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2732{
2733 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2734
2735 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2736 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2737
2738 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2739 {
2740 if (IEM_IS_MODRM_REG_MODE(bRm))
2741 {
2742 /*
2743 * Register to register.
2744 */
2745 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2747 IEM_MC_LOCAL(uint64_t, u64Value);
2748 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
2749 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2750 IEM_MC_ADVANCE_RIP_AND_FINISH();
2751 IEM_MC_END();
2752 }
2753 else
2754 {
2755 /*
2756 * We're loading a register from memory.
2757 */
2758 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
2759 IEM_MC_LOCAL(uint64_t, u64Value);
2760 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2761 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2763 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2764 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2765 IEM_MC_ADVANCE_RIP_AND_FINISH();
2766 IEM_MC_END();
2767 }
2768 }
2769 else
2770 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
2771}
2772
2773
2774/**
2775 * @opcode 0x64
2776 * @opmnemonic segfs
2777 * @opmincpu 80386
2778 * @opgroup og_prefixes
2779 */
2780FNIEMOP_DEF(iemOp_seg_FS)
2781{
2782 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2783 IEMOP_HLP_MIN_386();
2784
2785 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2786 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2787
2788 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2789 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2790}
2791
2792
2793/**
2794 * @opcode 0x65
2795 * @opmnemonic seggs
2796 * @opmincpu 80386
2797 * @opgroup og_prefixes
2798 */
2799FNIEMOP_DEF(iemOp_seg_GS)
2800{
2801 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2802 IEMOP_HLP_MIN_386();
2803
2804 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2805 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2806
2807 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2808 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2809}
2810
2811
2812/**
2813 * @opcode 0x66
2814 * @opmnemonic opsize
2815 * @openc prefix
2816 * @opmincpu 80386
2817 * @ophints harmless
2818 * @opgroup og_prefixes
2819 */
2820FNIEMOP_DEF(iemOp_op_size)
2821{
2822 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2823 IEMOP_HLP_MIN_386();
2824
2825 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2826 iemRecalEffOpSize(pVCpu);
2827
2828 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2829 when REPZ or REPNZ are present. */
2830 if (pVCpu->iem.s.idxPrefix == 0)
2831 pVCpu->iem.s.idxPrefix = 1;
2832
2833 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2834 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2835}
2836
2837
2838/**
2839 * @opcode 0x67
2840 * @opmnemonic addrsize
2841 * @openc prefix
2842 * @opmincpu 80386
2843 * @ophints harmless
2844 * @opgroup og_prefixes
2845 */
2846FNIEMOP_DEF(iemOp_addr_size)
2847{
2848 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2849 IEMOP_HLP_MIN_386();
2850
2851 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2852 switch (pVCpu->iem.s.enmDefAddrMode)
2853 {
2854 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2855 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2856 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2857 default: AssertFailed();
2858 }
2859
2860 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2861 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2862}
2863
2864
2865/**
2866 * @opcode 0x68
2867 */
2868FNIEMOP_DEF(iemOp_push_Iz)
2869{
2870 IEMOP_MNEMONIC(push_Iz, "push Iz");
2871 IEMOP_HLP_MIN_186();
2872 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2873 switch (pVCpu->iem.s.enmEffOpSize)
2874 {
2875 case IEMMODE_16BIT:
2876 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_186, 0);
2877 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2879 IEM_MC_PUSH_U16(u16Imm);
2880 IEM_MC_ADVANCE_RIP_AND_FINISH();
2881 IEM_MC_END();
2882 break;
2883
2884 case IEMMODE_32BIT:
2885 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2886 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2888 IEM_MC_PUSH_U32(u32Imm);
2889 IEM_MC_ADVANCE_RIP_AND_FINISH();
2890 IEM_MC_END();
2891 break;
2892
2893 case IEMMODE_64BIT:
2894 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
2895 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2897 IEM_MC_PUSH_U64(u64Imm);
2898 IEM_MC_ADVANCE_RIP_AND_FINISH();
2899 IEM_MC_END();
2900 break;
2901
2902 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2903 }
2904}
2905
2906
2907/**
2908 * @opcode 0x69
2909 */
2910FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
2911{
2912 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
2913 IEMOP_HLP_MIN_186();
2914 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2915 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2916
2917 switch (pVCpu->iem.s.enmEffOpSize)
2918 {
2919 case IEMMODE_16BIT:
2920 {
2921 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
2922 if (IEM_IS_MODRM_REG_MODE(bRm))
2923 {
2924 /* register operand */
2925 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2926 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186, 0);
2927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2928 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2929 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
2930 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2931 IEM_MC_LOCAL(uint16_t, u16Tmp);
2932
2933 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2934 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2935 IEM_MC_REF_EFLAGS(pEFlags);
2936 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2937 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2938
2939 IEM_MC_ADVANCE_RIP_AND_FINISH();
2940 IEM_MC_END();
2941 }
2942 else
2943 {
2944 /* memory operand */
2945 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186, 0);
2946 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2947 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2948
2949 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2951
2952 IEM_MC_LOCAL(uint16_t, u16Tmp);
2953 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2954
2955 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
2956 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
2957 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2958 IEM_MC_REF_EFLAGS(pEFlags);
2959 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2960 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2961
2962 IEM_MC_ADVANCE_RIP_AND_FINISH();
2963 IEM_MC_END();
2964 }
2965 break;
2966 }
2967
2968 case IEMMODE_32BIT:
2969 {
2970 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
2971 if (IEM_IS_MODRM_REG_MODE(bRm))
2972 {
2973 /* register operand */
2974 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2975 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
2976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2977 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2978 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
2979 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2980 IEM_MC_LOCAL(uint32_t, u32Tmp);
2981
2982 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2983 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2984 IEM_MC_REF_EFLAGS(pEFlags);
2985 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2986 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2987
2988 IEM_MC_ADVANCE_RIP_AND_FINISH();
2989 IEM_MC_END();
2990 }
2991 else
2992 {
2993 /* memory operand */
2994 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0);
2995 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2996 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2997
2998 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3000
3001 IEM_MC_LOCAL(uint32_t, u32Tmp);
3002 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3003
3004 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3005 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
3006 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3007 IEM_MC_REF_EFLAGS(pEFlags);
3008 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3009 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3010
3011 IEM_MC_ADVANCE_RIP_AND_FINISH();
3012 IEM_MC_END();
3013 }
3014 break;
3015 }
3016
3017 case IEMMODE_64BIT:
3018 {
3019 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3020 if (IEM_IS_MODRM_REG_MODE(bRm))
3021 {
3022 /* register operand */
3023 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3024 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
3025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3026 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3027 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
3028 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3029 IEM_MC_LOCAL(uint64_t, u64Tmp);
3030
3031 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3032 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3033 IEM_MC_REF_EFLAGS(pEFlags);
3034 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3035 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3036
3037 IEM_MC_ADVANCE_RIP_AND_FINISH();
3038 IEM_MC_END();
3039 }
3040 else
3041 {
3042 /* memory operand */
3043 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3044 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3045 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3046
3047 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); /* Not using IEM_OPCODE_GET_NEXT_S32_SX_U64 to reduce the */
3048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /* parameter count for the threaded function for this block. */
3049
3050 IEM_MC_LOCAL(uint64_t, u64Tmp);
3051 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3052
3053 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3054 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int32_t)u32Imm, 1);
3055 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3056 IEM_MC_REF_EFLAGS(pEFlags);
3057 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3058 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3059
3060 IEM_MC_ADVANCE_RIP_AND_FINISH();
3061 IEM_MC_END();
3062 }
3063 break;
3064 }
3065
3066 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3067 }
3068}
3069
3070
3071/**
3072 * @opcode 0x6a
3073 */
3074FNIEMOP_DEF(iemOp_push_Ib)
3075{
3076 IEMOP_MNEMONIC(push_Ib, "push Ib");
3077 IEMOP_HLP_MIN_186();
3078 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3079 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3080
3081 switch (pVCpu->iem.s.enmEffOpSize)
3082 {
3083 case IEMMODE_16BIT:
3084 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_186, 0);
3085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3086 IEM_MC_PUSH_U16(i8Imm);
3087 IEM_MC_ADVANCE_RIP_AND_FINISH();
3088 IEM_MC_END();
3089 break;
3090 case IEMMODE_32BIT:
3091 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
3092 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3093 IEM_MC_PUSH_U32(i8Imm);
3094 IEM_MC_ADVANCE_RIP_AND_FINISH();
3095 IEM_MC_END();
3096 break;
3097 case IEMMODE_64BIT:
3098 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
3099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3100 IEM_MC_PUSH_U64(i8Imm);
3101 IEM_MC_ADVANCE_RIP_AND_FINISH();
3102 IEM_MC_END();
3103 break;
3104 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3105 }
3106}
3107
3108
3109/**
3110 * @opcode 0x6b
3111 */
3112FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
3113{
3114 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
3115 IEMOP_HLP_MIN_186();
3116 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3117 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3118
3119 switch (pVCpu->iem.s.enmEffOpSize)
3120 {
3121 case IEMMODE_16BIT:
3122 {
3123 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
3124 if (IEM_IS_MODRM_REG_MODE(bRm))
3125 {
3126 /* register operand */
3127 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186, 0);
3128 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3130 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3131 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
3132 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3133 IEM_MC_LOCAL(uint16_t, u16Tmp);
3134
3135 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3136 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
3137 IEM_MC_REF_EFLAGS(pEFlags);
3138 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3139 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3140
3141 IEM_MC_ADVANCE_RIP_AND_FINISH();
3142 IEM_MC_END();
3143 }
3144 else
3145 {
3146 /* memory operand */
3147 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186, 0);
3148
3149 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3150 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3151
3152 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
3153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3154
3155 IEM_MC_LOCAL(uint16_t, u16Tmp);
3156 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3157
3158 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3159 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
3160 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3161 IEM_MC_REF_EFLAGS(pEFlags);
3162 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3163 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3164
3165 IEM_MC_ADVANCE_RIP_AND_FINISH();
3166 IEM_MC_END();
3167 }
3168 break;
3169 }
3170
3171 case IEMMODE_32BIT:
3172 {
3173 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
3174 if (IEM_IS_MODRM_REG_MODE(bRm))
3175 {
3176 /* register operand */
3177 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3178 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
3179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3180 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3181 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
3182 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3183 IEM_MC_LOCAL(uint32_t, u32Tmp);
3184
3185 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3186 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
3187 IEM_MC_REF_EFLAGS(pEFlags);
3188 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3189 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3190
3191 IEM_MC_ADVANCE_RIP_AND_FINISH();
3192 IEM_MC_END();
3193 }
3194 else
3195 {
3196 /* memory operand */
3197 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0);
3198 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3199 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3200
3201 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
3202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3203
3204 IEM_MC_LOCAL(uint32_t, u32Tmp);
3205 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3206
3207 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3208 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
3209 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3210 IEM_MC_REF_EFLAGS(pEFlags);
3211 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3212 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3213
3214 IEM_MC_ADVANCE_RIP_AND_FINISH();
3215 IEM_MC_END();
3216 }
3217 break;
3218 }
3219
3220 case IEMMODE_64BIT:
3221 {
3222 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3223 if (IEM_IS_MODRM_REG_MODE(bRm))
3224 {
3225 /* register operand */
3226 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3227 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
3228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3229 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3230 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 1);
3231 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3232 IEM_MC_LOCAL(uint64_t, u64Tmp);
3233
3234 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3235 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3236 IEM_MC_REF_EFLAGS(pEFlags);
3237 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3238 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3239
3240 IEM_MC_ADVANCE_RIP_AND_FINISH();
3241 IEM_MC_END();
3242 }
3243 else
3244 {
3245 /* memory operand */
3246 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3247 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3248 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3249
3250 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); /* Not using IEM_OPCODE_GET_NEXT_S8_SX_U64 to reduce the threaded parameter count. */
3251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3252
3253 IEM_MC_LOCAL(uint64_t, u64Tmp);
3254 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3255
3256 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3257 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 1);
3258 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3259 IEM_MC_REF_EFLAGS(pEFlags);
3260 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3261 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3262
3263 IEM_MC_ADVANCE_RIP_AND_FINISH();
3264 IEM_MC_END();
3265 }
3266 break;
3267 }
3268
3269 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3270 }
3271}
3272
3273
3274/**
3275 * @opcode 0x6c
3276 */
3277FNIEMOP_DEF(iemOp_insb_Yb_DX)
3278{
3279 IEMOP_HLP_MIN_186();
3280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3281 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3282 {
3283 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
3284 switch (pVCpu->iem.s.enmEffAddrMode)
3285 {
3286 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3287 iemCImpl_rep_ins_op8_addr16, false);
3288 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3289 iemCImpl_rep_ins_op8_addr32, false);
3290 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3291 iemCImpl_rep_ins_op8_addr64, false);
3292 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3293 }
3294 }
3295 else
3296 {
3297 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
3298 switch (pVCpu->iem.s.enmEffAddrMode)
3299 {
3300 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3301 iemCImpl_ins_op8_addr16, false);
3302 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3303 iemCImpl_ins_op8_addr32, false);
3304 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3305 iemCImpl_ins_op8_addr64, false);
3306 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3307 }
3308 }
3309}
3310
3311
3312/**
3313 * @opcode 0x6d
3314 */
3315FNIEMOP_DEF(iemOp_inswd_Yv_DX)
3316{
3317 IEMOP_HLP_MIN_186();
3318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3319 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3320 {
3321 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
3322 switch (pVCpu->iem.s.enmEffOpSize)
3323 {
3324 case IEMMODE_16BIT:
3325 switch (pVCpu->iem.s.enmEffAddrMode)
3326 {
3327 case IEMMODE_16BIT:
3328 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3329 iemCImpl_rep_ins_op16_addr16, false);
3330 case IEMMODE_32BIT:
3331 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3332 iemCImpl_rep_ins_op16_addr32, false);
3333 case IEMMODE_64BIT:
3334 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3335 iemCImpl_rep_ins_op16_addr64, false);
3336 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3337 }
3338 break;
3339 case IEMMODE_64BIT:
3340 case IEMMODE_32BIT:
3341 switch (pVCpu->iem.s.enmEffAddrMode)
3342 {
3343 case IEMMODE_16BIT:
3344 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3345 iemCImpl_rep_ins_op32_addr16, false);
3346 case IEMMODE_32BIT:
3347 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3348 iemCImpl_rep_ins_op32_addr32, false);
3349 case IEMMODE_64BIT:
3350 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3351 iemCImpl_rep_ins_op32_addr64, false);
3352 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3353 }
3354 break;
3355 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3356 }
3357 }
3358 else
3359 {
3360 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
3361 switch (pVCpu->iem.s.enmEffOpSize)
3362 {
3363 case IEMMODE_16BIT:
3364 switch (pVCpu->iem.s.enmEffAddrMode)
3365 {
3366 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3367 iemCImpl_ins_op16_addr16, false);
3368 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3369 iemCImpl_ins_op16_addr32, false);
3370 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3371 iemCImpl_ins_op16_addr64, false);
3372 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3373 }
3374 break;
3375 case IEMMODE_64BIT:
3376 case IEMMODE_32BIT:
3377 switch (pVCpu->iem.s.enmEffAddrMode)
3378 {
3379 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3380 iemCImpl_ins_op32_addr16, false);
3381 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3382 iemCImpl_ins_op32_addr32, false);
3383 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3384 iemCImpl_ins_op32_addr64, false);
3385 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3386 }
3387 break;
3388 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3389 }
3390 }
3391}
3392
3393
3394/**
3395 * @opcode 0x6e
3396 */
3397FNIEMOP_DEF(iemOp_outsb_Yb_DX)
3398{
3399 IEMOP_HLP_MIN_186();
3400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3401 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3402 {
3403 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
3404 switch (pVCpu->iem.s.enmEffAddrMode)
3405 {
3406 case IEMMODE_16BIT:
3407 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3408 iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3409 case IEMMODE_32BIT:
3410 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3411 iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3412 case IEMMODE_64BIT:
3413 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3414 iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3415 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3416 }
3417 }
3418 else
3419 {
3420 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
3421 switch (pVCpu->iem.s.enmEffAddrMode)
3422 {
3423 case IEMMODE_16BIT:
3424 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3425 iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3426 case IEMMODE_32BIT:
3427 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3428 iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3429 case IEMMODE_64BIT:
3430 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3431 iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3432 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3433 }
3434 }
3435}
3436
3437
3438/**
3439 * @opcode 0x6f
3440 */
3441FNIEMOP_DEF(iemOp_outswd_Yv_DX)
3442{
3443 IEMOP_HLP_MIN_186();
3444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3445 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3446 {
3447 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
3448 switch (pVCpu->iem.s.enmEffOpSize)
3449 {
3450 case IEMMODE_16BIT:
3451 switch (pVCpu->iem.s.enmEffAddrMode)
3452 {
3453 case IEMMODE_16BIT:
3454 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3455 iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3456 case IEMMODE_32BIT:
3457 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3458 iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3459 case IEMMODE_64BIT:
3460 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3461 iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3462 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3463 }
3464 break;
3465 case IEMMODE_64BIT:
3466 case IEMMODE_32BIT:
3467 switch (pVCpu->iem.s.enmEffAddrMode)
3468 {
3469 case IEMMODE_16BIT:
3470 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3471 iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3472 case IEMMODE_32BIT:
3473 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3474 iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3475 case IEMMODE_64BIT:
3476 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3477 iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3478 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3479 }
3480 break;
3481 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3482 }
3483 }
3484 else
3485 {
3486 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
3487 switch (pVCpu->iem.s.enmEffOpSize)
3488 {
3489 case IEMMODE_16BIT:
3490 switch (pVCpu->iem.s.enmEffAddrMode)
3491 {
3492 case IEMMODE_16BIT:
3493 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3494 iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3495 case IEMMODE_32BIT:
3496 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3497 iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3498 case IEMMODE_64BIT:
3499 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3500 iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3501 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3502 }
3503 break;
3504 case IEMMODE_64BIT:
3505 case IEMMODE_32BIT:
3506 switch (pVCpu->iem.s.enmEffAddrMode)
3507 {
3508 case IEMMODE_16BIT:
3509 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3510 iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3511 case IEMMODE_32BIT:
3512 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3513 iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3514 case IEMMODE_64BIT:
3515 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3516 iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3517 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3518 }
3519 break;
3520 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3521 }
3522 }
3523}
3524
3525
3526/**
3527 * @opcode 0x70
3528 */
3529FNIEMOP_DEF(iemOp_jo_Jb)
3530{
3531 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
3532 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3533 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3534
3535 IEM_MC_BEGIN(0, 0, 0, 0);
3536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3537 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3538 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3539 } IEM_MC_ELSE() {
3540 IEM_MC_ADVANCE_RIP_AND_FINISH();
3541 } IEM_MC_ENDIF();
3542 IEM_MC_END();
3543}
3544
3545
3546/**
3547 * @opcode 0x71
3548 */
3549FNIEMOP_DEF(iemOp_jno_Jb)
3550{
3551 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
3552 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3553 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3554
3555 IEM_MC_BEGIN(0, 0, 0, 0);
3556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3557 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3558 IEM_MC_ADVANCE_RIP_AND_FINISH();
3559 } IEM_MC_ELSE() {
3560 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3561 } IEM_MC_ENDIF();
3562 IEM_MC_END();
3563}
3564
3565/**
3566 * @opcode 0x72
3567 */
3568FNIEMOP_DEF(iemOp_jc_Jb)
3569{
3570 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
3571 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3572 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3573
3574 IEM_MC_BEGIN(0, 0, 0, 0);
3575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3576 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3577 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3578 } IEM_MC_ELSE() {
3579 IEM_MC_ADVANCE_RIP_AND_FINISH();
3580 } IEM_MC_ENDIF();
3581 IEM_MC_END();
3582}
3583
3584
3585/**
3586 * @opcode 0x73
3587 */
3588FNIEMOP_DEF(iemOp_jnc_Jb)
3589{
3590 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
3591 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3592 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3593
3594 IEM_MC_BEGIN(0, 0, 0, 0);
3595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3596 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3597 IEM_MC_ADVANCE_RIP_AND_FINISH();
3598 } IEM_MC_ELSE() {
3599 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3600 } IEM_MC_ENDIF();
3601 IEM_MC_END();
3602}
3603
3604
3605/**
3606 * @opcode 0x74
3607 */
3608FNIEMOP_DEF(iemOp_je_Jb)
3609{
3610 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
3611 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3612 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3613
3614 IEM_MC_BEGIN(0, 0, 0, 0);
3615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3616 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3617 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3618 } IEM_MC_ELSE() {
3619 IEM_MC_ADVANCE_RIP_AND_FINISH();
3620 } IEM_MC_ENDIF();
3621 IEM_MC_END();
3622}
3623
3624
3625/**
3626 * @opcode 0x75
3627 */
3628FNIEMOP_DEF(iemOp_jne_Jb)
3629{
3630 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
3631 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3632 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3633
3634 IEM_MC_BEGIN(0, 0, 0, 0);
3635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3636 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3637 IEM_MC_ADVANCE_RIP_AND_FINISH();
3638 } IEM_MC_ELSE() {
3639 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3640 } IEM_MC_ENDIF();
3641 IEM_MC_END();
3642}
3643
3644
3645/**
3646 * @opcode 0x76
3647 */
3648FNIEMOP_DEF(iemOp_jbe_Jb)
3649{
3650 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
3651 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3652 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3653
3654 IEM_MC_BEGIN(0, 0, 0, 0);
3655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3656 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3657 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3658 } IEM_MC_ELSE() {
3659 IEM_MC_ADVANCE_RIP_AND_FINISH();
3660 } IEM_MC_ENDIF();
3661 IEM_MC_END();
3662}
3663
3664
3665/**
3666 * @opcode 0x77
3667 */
3668FNIEMOP_DEF(iemOp_jnbe_Jb)
3669{
3670 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
3671 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3672 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3673
3674 IEM_MC_BEGIN(0, 0, 0, 0);
3675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3676 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3677 IEM_MC_ADVANCE_RIP_AND_FINISH();
3678 } IEM_MC_ELSE() {
3679 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3680 } IEM_MC_ENDIF();
3681 IEM_MC_END();
3682}
3683
3684
3685/**
3686 * @opcode 0x78
3687 */
3688FNIEMOP_DEF(iemOp_js_Jb)
3689{
3690 IEMOP_MNEMONIC(js_Jb, "js Jb");
3691 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3692 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3693
3694 IEM_MC_BEGIN(0, 0, 0, 0);
3695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3696 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3697 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3698 } IEM_MC_ELSE() {
3699 IEM_MC_ADVANCE_RIP_AND_FINISH();
3700 } IEM_MC_ENDIF();
3701 IEM_MC_END();
3702}
3703
3704
3705/**
3706 * @opcode 0x79
3707 */
3708FNIEMOP_DEF(iemOp_jns_Jb)
3709{
3710 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
3711 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3712 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3713
3714 IEM_MC_BEGIN(0, 0, 0, 0);
3715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3716 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3717 IEM_MC_ADVANCE_RIP_AND_FINISH();
3718 } IEM_MC_ELSE() {
3719 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3720 } IEM_MC_ENDIF();
3721 IEM_MC_END();
3722}
3723
3724
3725/**
3726 * @opcode 0x7a
3727 */
3728FNIEMOP_DEF(iemOp_jp_Jb)
3729{
3730 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
3731 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3732 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3733
3734 IEM_MC_BEGIN(0, 0, 0, 0);
3735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3736 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3737 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3738 } IEM_MC_ELSE() {
3739 IEM_MC_ADVANCE_RIP_AND_FINISH();
3740 } IEM_MC_ENDIF();
3741 IEM_MC_END();
3742}
3743
3744
3745/**
3746 * @opcode 0x7b
3747 */
3748FNIEMOP_DEF(iemOp_jnp_Jb)
3749{
3750 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
3751 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3752 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3753
3754 IEM_MC_BEGIN(0, 0, 0, 0);
3755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3756 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3757 IEM_MC_ADVANCE_RIP_AND_FINISH();
3758 } IEM_MC_ELSE() {
3759 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3760 } IEM_MC_ENDIF();
3761 IEM_MC_END();
3762}
3763
3764
3765/**
3766 * @opcode 0x7c
3767 */
3768FNIEMOP_DEF(iemOp_jl_Jb)
3769{
3770 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
3771 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3772 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3773
3774 IEM_MC_BEGIN(0, 0, 0, 0);
3775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3776 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3777 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3778 } IEM_MC_ELSE() {
3779 IEM_MC_ADVANCE_RIP_AND_FINISH();
3780 } IEM_MC_ENDIF();
3781 IEM_MC_END();
3782}
3783
3784
3785/**
3786 * @opcode 0x7d
3787 */
3788FNIEMOP_DEF(iemOp_jnl_Jb)
3789{
3790 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
3791 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3792 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3793
3794 IEM_MC_BEGIN(0, 0, 0, 0);
3795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3796 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3797 IEM_MC_ADVANCE_RIP_AND_FINISH();
3798 } IEM_MC_ELSE() {
3799 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3800 } IEM_MC_ENDIF();
3801 IEM_MC_END();
3802}
3803
3804
3805/**
3806 * @opcode 0x7e
3807 */
3808FNIEMOP_DEF(iemOp_jle_Jb)
3809{
3810 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
3811 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3812 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3813
3814 IEM_MC_BEGIN(0, 0, 0, 0);
3815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3816 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3817 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3818 } IEM_MC_ELSE() {
3819 IEM_MC_ADVANCE_RIP_AND_FINISH();
3820 } IEM_MC_ENDIF();
3821 IEM_MC_END();
3822}
3823
3824
3825/**
3826 * @opcode 0x7f
3827 */
3828FNIEMOP_DEF(iemOp_jnle_Jb)
3829{
3830 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
3831 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3832 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3833
3834 IEM_MC_BEGIN(0, 0, 0, 0);
3835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3836 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3837 IEM_MC_ADVANCE_RIP_AND_FINISH();
3838 } IEM_MC_ELSE() {
3839 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3840 } IEM_MC_ENDIF();
3841 IEM_MC_END();
3842}
3843
3844
3845/**
3846 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
3847 * iemOp_Grp1_Eb_Ib_80.
3848 */
3849#define IEMOP_BODY_BINARY_Eb_Ib_RW(a_fnNormalU8) \
3850 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3851 { \
3852 /* register target */ \
3853 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3854 IEM_MC_BEGIN(3, 0, 0, 0); \
3855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3856 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3857 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3858 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3859 \
3860 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3861 IEM_MC_REF_EFLAGS(pEFlags); \
3862 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3863 \
3864 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3865 IEM_MC_END(); \
3866 } \
3867 else \
3868 { \
3869 /* memory target */ \
3870 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
3871 { \
3872 IEM_MC_BEGIN(3, 3, 0, 0); \
3873 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3874 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3875 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3876 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3877 \
3878 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3879 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3880 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3881 IEMOP_HLP_DONE_DECODING(); \
3882 \
3883 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3884 IEM_MC_FETCH_EFLAGS(EFlags); \
3885 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3886 \
3887 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
3888 IEM_MC_COMMIT_EFLAGS(EFlags); \
3889 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3890 IEM_MC_END(); \
3891 } \
3892 else \
3893 { \
3894 (void)0
3895
3896#define IEMOP_BODY_BINARY_Eb_Ib_LOCKED(a_fnLockedU8) \
3897 IEM_MC_BEGIN(3, 3, 0, 0); \
3898 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3899 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3900 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3901 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3902 \
3903 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3904 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3905 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3906 IEMOP_HLP_DONE_DECODING(); \
3907 \
3908 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3909 IEM_MC_FETCH_EFLAGS(EFlags); \
3910 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
3911 \
3912 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
3913 IEM_MC_COMMIT_EFLAGS(EFlags); \
3914 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3915 IEM_MC_END(); \
3916 } \
3917 } \
3918 (void)0
3919
3920#define IEMOP_BODY_BINARY_Eb_Ib_RO(a_fnNormalU8) \
3921 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3922 { \
3923 /* register target */ \
3924 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3925 IEM_MC_BEGIN(3, 0, 0, 0); \
3926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3927 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3928 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3929 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3930 \
3931 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3932 IEM_MC_REF_EFLAGS(pEFlags); \
3933 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3934 \
3935 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3936 IEM_MC_END(); \
3937 } \
3938 else \
3939 { \
3940 /* memory target */ \
3941 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
3942 { \
3943 IEM_MC_BEGIN(3, 3, 0, 0); \
3944 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
3945 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3946 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3947 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3948 \
3949 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3950 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3951 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3952 IEMOP_HLP_DONE_DECODING(); \
3953 \
3954 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3955 IEM_MC_FETCH_EFLAGS(EFlags); \
3956 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3957 \
3958 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu8Dst, bUnmapInfo); \
3959 IEM_MC_COMMIT_EFLAGS(EFlags); \
3960 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3961 IEM_MC_END(); \
3962 } \
3963 else \
3964 { \
3965 (void)0
3966
3967#define IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK() \
3968 IEMOP_HLP_DONE_DECODING(); \
3969 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
3970 } \
3971 } \
3972 (void)0
3973
3974
3975
3976/**
3977 * @opmaps grp1_80,grp1_83
3978 * @opcode /0
3979 */
3980FNIEMOP_DEF_1(iemOp_Grp1_add_Eb_Ib, uint8_t, bRm)
3981{
3982 IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib");
3983 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_add_u8);
3984 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_add_u8_locked);
3985}
3986
3987
3988/**
3989 * @opmaps grp1_80,grp1_83
3990 * @opcode /1
3991 */
3992FNIEMOP_DEF_1(iemOp_Grp1_or_Eb_Ib, uint8_t, bRm)
3993{
3994 IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib");
3995 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_or_u8);
3996 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_or_u8_locked);
3997}
3998
3999
4000/**
4001 * @opmaps grp1_80,grp1_83
4002 * @opcode /2
4003 */
4004FNIEMOP_DEF_1(iemOp_Grp1_adc_Eb_Ib, uint8_t, bRm)
4005{
4006 IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib");
4007 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_adc_u8);
4008 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_adc_u8_locked);
4009}
4010
4011
4012/**
4013 * @opmaps grp1_80,grp1_83
4014 * @opcode /3
4015 */
4016FNIEMOP_DEF_1(iemOp_Grp1_sbb_Eb_Ib, uint8_t, bRm)
4017{
4018 IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib");
4019 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sbb_u8);
4020 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sbb_u8_locked);
4021}
4022
4023
4024/**
4025 * @opmaps grp1_80,grp1_83
4026 * @opcode /4
4027 */
4028FNIEMOP_DEF_1(iemOp_Grp1_and_Eb_Ib, uint8_t, bRm)
4029{
4030 IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib");
4031 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_and_u8);
4032 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_and_u8_locked);
4033}
4034
4035
4036/**
4037 * @opmaps grp1_80,grp1_83
4038 * @opcode /5
4039 */
4040FNIEMOP_DEF_1(iemOp_Grp1_sub_Eb_Ib, uint8_t, bRm)
4041{
4042 IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib");
4043 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sub_u8);
4044 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sub_u8_locked);
4045}
4046
4047
4048/**
4049 * @opmaps grp1_80,grp1_83
4050 * @opcode /6
4051 */
4052FNIEMOP_DEF_1(iemOp_Grp1_xor_Eb_Ib, uint8_t, bRm)
4053{
4054 IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib");
4055 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_xor_u8);
4056 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_xor_u8_locked);
4057}
4058
4059
4060/**
4061 * @opmaps grp1_80,grp1_83
4062 * @opcode /7
4063 */
4064FNIEMOP_DEF_1(iemOp_Grp1_cmp_Eb_Ib, uint8_t, bRm)
4065{
4066 IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib");
4067 IEMOP_BODY_BINARY_Eb_Ib_RO(iemAImpl_cmp_u8);
4068 IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK();
4069}
4070
4071
4072/**
4073 * @opcode 0x80
4074 */
4075FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
4076{
4077 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4078 switch (IEM_GET_MODRM_REG_8(bRm))
4079 {
4080 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Eb_Ib, bRm);
4081 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Eb_Ib, bRm);
4082 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Eb_Ib, bRm);
4083 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Eb_Ib, bRm);
4084 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Eb_Ib, bRm);
4085 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Eb_Ib, bRm);
4086 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Eb_Ib, bRm);
4087 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Eb_Ib, bRm);
4088 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4089 }
4090}
4091
4092
4093/**
4094 * Body for a group 1 binary operator.
4095 */
4096#define IEMOP_BODY_BINARY_Ev_Iz_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4097 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4098 { \
4099 /* register target */ \
4100 switch (pVCpu->iem.s.enmEffOpSize) \
4101 { \
4102 case IEMMODE_16BIT: \
4103 { \
4104 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4105 IEM_MC_BEGIN(3, 0, 0, 0); \
4106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4107 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4108 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4109 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4110 \
4111 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4112 IEM_MC_REF_EFLAGS(pEFlags); \
4113 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4114 \
4115 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4116 IEM_MC_END(); \
4117 break; \
4118 } \
4119 \
4120 case IEMMODE_32BIT: \
4121 { \
4122 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4123 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4125 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4126 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4127 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4128 \
4129 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4130 IEM_MC_REF_EFLAGS(pEFlags); \
4131 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4132 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
4133 \
4134 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4135 IEM_MC_END(); \
4136 break; \
4137 } \
4138 \
4139 case IEMMODE_64BIT: \
4140 { \
4141 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4142 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4144 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4145 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4146 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4147 \
4148 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4149 IEM_MC_REF_EFLAGS(pEFlags); \
4150 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4151 \
4152 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4153 IEM_MC_END(); \
4154 break; \
4155 } \
4156 \
4157 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4158 } \
4159 } \
4160 else \
4161 { \
4162 /* memory target */ \
4163 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4164 { \
4165 switch (pVCpu->iem.s.enmEffOpSize) \
4166 { \
4167 case IEMMODE_16BIT: \
4168 { \
4169 IEM_MC_BEGIN(3, 3, 0, 0); \
4170 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4171 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4172 \
4173 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4174 IEMOP_HLP_DONE_DECODING(); \
4175 \
4176 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4177 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4178 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4179 \
4180 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4181 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4182 IEM_MC_FETCH_EFLAGS(EFlags); \
4183 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4184 \
4185 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4186 IEM_MC_COMMIT_EFLAGS(EFlags); \
4187 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4188 IEM_MC_END(); \
4189 break; \
4190 } \
4191 \
4192 case IEMMODE_32BIT: \
4193 { \
4194 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4195 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4196 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4197 \
4198 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4199 IEMOP_HLP_DONE_DECODING(); \
4200 \
4201 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4202 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4203 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4204 \
4205 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4206 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4207 IEM_MC_FETCH_EFLAGS(EFlags); \
4208 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4209 \
4210 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4211 IEM_MC_COMMIT_EFLAGS(EFlags); \
4212 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4213 IEM_MC_END(); \
4214 break; \
4215 } \
4216 \
4217 case IEMMODE_64BIT: \
4218 { \
4219 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4220 \
4221 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4222 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4223 \
4224 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4225 IEMOP_HLP_DONE_DECODING(); \
4226 \
4227 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4228 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4229 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4230 \
4231 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4232 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4233 IEM_MC_FETCH_EFLAGS(EFlags); \
4234 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4235 \
4236 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4237 IEM_MC_COMMIT_EFLAGS(EFlags); \
4238 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4239 IEM_MC_END(); \
4240 break; \
4241 } \
4242 \
4243 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4244 } \
4245 } \
4246 else \
4247 { \
4248 (void)0
4249/* This must be a separate macro due to parsing restrictions in IEMAllInstPython.py. */
4250#define IEMOP_BODY_BINARY_Ev_Iz_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4251 switch (pVCpu->iem.s.enmEffOpSize) \
4252 { \
4253 case IEMMODE_16BIT: \
4254 { \
4255 IEM_MC_BEGIN(3, 3, 0, 0); \
4256 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4258 \
4259 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4260 IEMOP_HLP_DONE_DECODING(); \
4261 \
4262 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4263 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4264 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4265 \
4266 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4267 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4268 IEM_MC_FETCH_EFLAGS(EFlags); \
4269 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4270 \
4271 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4272 IEM_MC_COMMIT_EFLAGS(EFlags); \
4273 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4274 IEM_MC_END(); \
4275 break; \
4276 } \
4277 \
4278 case IEMMODE_32BIT: \
4279 { \
4280 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4281 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4282 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4283 \
4284 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4285 IEMOP_HLP_DONE_DECODING(); \
4286 \
4287 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4288 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4289 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4290 \
4291 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4292 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4293 IEM_MC_FETCH_EFLAGS(EFlags); \
4294 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4295 \
4296 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4297 IEM_MC_COMMIT_EFLAGS(EFlags); \
4298 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4299 IEM_MC_END(); \
4300 break; \
4301 } \
4302 \
4303 case IEMMODE_64BIT: \
4304 { \
4305 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4306 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4307 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4308 \
4309 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4310 IEMOP_HLP_DONE_DECODING(); \
4311 \
4312 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4313 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4314 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4315 \
4316 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4317 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4318 IEM_MC_FETCH_EFLAGS(EFlags); \
4319 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4320 \
4321 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4322 IEM_MC_COMMIT_EFLAGS(EFlags); \
4323 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4324 IEM_MC_END(); \
4325 break; \
4326 } \
4327 \
4328 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4329 } \
4330 } \
4331 } \
4332 (void)0
4333
4334/* read-only version */
4335#define IEMOP_BODY_BINARY_Ev_Iz_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4336 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4337 { \
4338 /* register target */ \
4339 switch (pVCpu->iem.s.enmEffOpSize) \
4340 { \
4341 case IEMMODE_16BIT: \
4342 { \
4343 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4344 IEM_MC_BEGIN(3, 0, 0, 0); \
4345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4346 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4347 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4348 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4349 \
4350 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4351 IEM_MC_REF_EFLAGS(pEFlags); \
4352 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4353 \
4354 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4355 IEM_MC_END(); \
4356 break; \
4357 } \
4358 \
4359 case IEMMODE_32BIT: \
4360 { \
4361 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4362 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4364 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4365 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4366 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4367 \
4368 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4369 IEM_MC_REF_EFLAGS(pEFlags); \
4370 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4371 \
4372 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4373 IEM_MC_END(); \
4374 break; \
4375 } \
4376 \
4377 case IEMMODE_64BIT: \
4378 { \
4379 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4380 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4382 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4383 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4384 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4385 \
4386 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4387 IEM_MC_REF_EFLAGS(pEFlags); \
4388 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4389 \
4390 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4391 IEM_MC_END(); \
4392 break; \
4393 } \
4394 \
4395 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4396 } \
4397 } \
4398 else \
4399 { \
4400 /* memory target */ \
4401 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4402 { \
4403 switch (pVCpu->iem.s.enmEffOpSize) \
4404 { \
4405 case IEMMODE_16BIT: \
4406 { \
4407 IEM_MC_BEGIN(3, 3, 0, 0); \
4408 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4409 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4410 \
4411 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4412 IEMOP_HLP_DONE_DECODING(); \
4413 \
4414 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4415 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
4416 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4417 \
4418 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4419 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4420 IEM_MC_FETCH_EFLAGS(EFlags); \
4421 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4422 \
4423 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
4424 IEM_MC_COMMIT_EFLAGS(EFlags); \
4425 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4426 IEM_MC_END(); \
4427 break; \
4428 } \
4429 \
4430 case IEMMODE_32BIT: \
4431 { \
4432 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4433 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4434 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4435 \
4436 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4437 IEMOP_HLP_DONE_DECODING(); \
4438 \
4439 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4440 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
4441 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4442 \
4443 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4444 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4445 IEM_MC_FETCH_EFLAGS(EFlags); \
4446 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4447 \
4448 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
4449 IEM_MC_COMMIT_EFLAGS(EFlags); \
4450 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4451 IEM_MC_END(); \
4452 break; \
4453 } \
4454 \
4455 case IEMMODE_64BIT: \
4456 { \
4457 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4458 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4460 \
4461 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4462 IEMOP_HLP_DONE_DECODING(); \
4463 \
4464 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4465 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
4466 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4467 \
4468 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4469 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4470 IEM_MC_FETCH_EFLAGS(EFlags); \
4471 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4472 \
4473 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
4474 IEM_MC_COMMIT_EFLAGS(EFlags); \
4475 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4476 IEM_MC_END(); \
4477 break; \
4478 } \
4479 \
4480 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4481 } \
4482 } \
4483 else \
4484 { \
4485 IEMOP_HLP_DONE_DECODING(); \
4486 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4487 } \
4488 } \
4489 (void)0
4490
4491
4492/**
4493 * @opmaps grp1_81
4494 * @opcode /0
4495 */
4496FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Iz, uint8_t, bRm)
4497{
4498 IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz");
4499 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
4500 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4501}
4502
4503
4504/**
4505 * @opmaps grp1_81
4506 * @opcode /1
4507 */
4508FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Iz, uint8_t, bRm)
4509{
4510 IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz");
4511 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
4512 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4513}
4514
4515
4516/**
4517 * @opmaps grp1_81
4518 * @opcode /2
4519 */
4520FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Iz, uint8_t, bRm)
4521{
4522 IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz");
4523 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
4524 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
4525}
4526
4527
4528/**
4529 * @opmaps grp1_81
4530 * @opcode /3
4531 */
4532FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Iz, uint8_t, bRm)
4533{
4534 IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz");
4535 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
4536 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
4537}
4538
4539
4540/**
4541 * @opmaps grp1_81
4542 * @opcode /4
4543 */
4544FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Iz, uint8_t, bRm)
4545{
4546 IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz");
4547 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
4548 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
4549}
4550
4551
4552/**
4553 * @opmaps grp1_81
4554 * @opcode /5
4555 */
4556FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Iz, uint8_t, bRm)
4557{
4558 IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz");
4559 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
4560 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
4561}
4562
4563
4564/**
4565 * @opmaps grp1_81
4566 * @opcode /6
4567 */
4568FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Iz, uint8_t, bRm)
4569{
4570 IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz");
4571 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
4572 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
4573}
4574
4575
4576/**
4577 * @opmaps grp1_81
4578 * @opcode /7
4579 */
4580FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Iz, uint8_t, bRm)
4581{
4582 IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz");
4583 IEMOP_BODY_BINARY_Ev_Iz_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
4584}
4585
4586
4587/**
4588 * @opcode 0x81
4589 */
4590FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
4591{
4592 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4593 switch (IEM_GET_MODRM_REG_8(bRm))
4594 {
4595 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Iz, bRm);
4596 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Iz, bRm);
4597 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Iz, bRm);
4598 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Iz, bRm);
4599 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Iz, bRm);
4600 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Iz, bRm);
4601 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Iz, bRm);
4602 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Iz, bRm);
4603 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4604 }
4605}
4606
4607
4608/**
4609 * @opcode 0x82
4610 * @opmnemonic grp1_82
4611 * @opgroup og_groups
4612 */
4613FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
4614{
4615 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
4616 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
4617}
4618
4619
4620/**
4621 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
4622 * iemOp_Grp1_Ev_Ib.
4623 */
4624#define IEMOP_BODY_BINARY_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4625 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4626 { \
4627 /* \
4628 * Register target \
4629 */ \
4630 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4631 switch (pVCpu->iem.s.enmEffOpSize) \
4632 { \
4633 case IEMMODE_16BIT: \
4634 IEM_MC_BEGIN(3, 0, 0, 0); \
4635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4636 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4637 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4638 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4639 \
4640 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4641 IEM_MC_REF_EFLAGS(pEFlags); \
4642 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4643 \
4644 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4645 IEM_MC_END(); \
4646 break; \
4647 \
4648 case IEMMODE_32BIT: \
4649 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4651 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4652 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4653 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4654 \
4655 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4656 IEM_MC_REF_EFLAGS(pEFlags); \
4657 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4658 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
4659 \
4660 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4661 IEM_MC_END(); \
4662 break; \
4663 \
4664 case IEMMODE_64BIT: \
4665 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4667 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4668 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4669 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4670 \
4671 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4672 IEM_MC_REF_EFLAGS(pEFlags); \
4673 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4674 \
4675 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4676 IEM_MC_END(); \
4677 break; \
4678 \
4679 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4680 } \
4681 } \
4682 else \
4683 { \
4684 /* \
4685 * Memory target. \
4686 */ \
4687 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4688 { \
4689 switch (pVCpu->iem.s.enmEffOpSize) \
4690 { \
4691 case IEMMODE_16BIT: \
4692 IEM_MC_BEGIN(3, 3, 0, 0); \
4693 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4694 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4695 \
4696 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4697 IEMOP_HLP_DONE_DECODING(); \
4698 \
4699 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4700 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4701 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4702 \
4703 IEM_MC_ARG_CONST(uint16_t, u16Src, (int16_t)(int8_t)u8Imm, 1); \
4704 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4705 IEM_MC_FETCH_EFLAGS(EFlags); \
4706 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4707 \
4708 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4709 IEM_MC_COMMIT_EFLAGS(EFlags); \
4710 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4711 IEM_MC_END(); \
4712 break; \
4713 \
4714 case IEMMODE_32BIT: \
4715 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4716 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4717 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4718 \
4719 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4720 IEMOP_HLP_DONE_DECODING(); \
4721 \
4722 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4723 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4724 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4725 \
4726 IEM_MC_ARG_CONST(uint32_t, u32Src, (int32_t)(int8_t)u8Imm, 1); \
4727 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4728 IEM_MC_FETCH_EFLAGS(EFlags); \
4729 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4730 \
4731 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4732 IEM_MC_COMMIT_EFLAGS(EFlags); \
4733 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4734 IEM_MC_END(); \
4735 break; \
4736 \
4737 case IEMMODE_64BIT: \
4738 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4739 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4740 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4741 \
4742 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4743 IEMOP_HLP_DONE_DECODING(); \
4744 \
4745 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4746 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4747 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4748 \
4749 IEM_MC_ARG_CONST(uint64_t, u64Src, (int64_t)(int8_t)u8Imm, 1); \
4750 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4751 IEM_MC_FETCH_EFLAGS(EFlags); \
4752 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4753 \
4754 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4755 IEM_MC_COMMIT_EFLAGS(EFlags); \
4756 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4757 IEM_MC_END(); \
4758 break; \
4759 \
4760 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4761 } \
4762 } \
4763 else \
4764 { \
4765 (void)0
4766/* Separate macro to work around parsing issue in IEMAllInstPython.py */
4767#define IEMOP_BODY_BINARY_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4768 switch (pVCpu->iem.s.enmEffOpSize) \
4769 { \
4770 case IEMMODE_16BIT: \
4771 IEM_MC_BEGIN(3, 3, 0, 0); \
4772 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4773 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4774 \
4775 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4776 IEMOP_HLP_DONE_DECODING(); \
4777 \
4778 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4779 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4780 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4781 \
4782 IEM_MC_ARG_CONST(uint16_t, u16Src, (int16_t)(int8_t)u8Imm, 1); \
4783 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4784 IEM_MC_FETCH_EFLAGS(EFlags); \
4785 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4786 \
4787 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4788 IEM_MC_COMMIT_EFLAGS(EFlags); \
4789 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4790 IEM_MC_END(); \
4791 break; \
4792 \
4793 case IEMMODE_32BIT: \
4794 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4795 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4796 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4797 \
4798 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4799 IEMOP_HLP_DONE_DECODING(); \
4800 \
4801 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4802 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4803 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4804 \
4805 IEM_MC_ARG_CONST(uint32_t, u32Src, (int32_t)(int8_t)u8Imm, 1); \
4806 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4807 IEM_MC_FETCH_EFLAGS(EFlags); \
4808 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4809 \
4810 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4811 IEM_MC_COMMIT_EFLAGS(EFlags); \
4812 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4813 IEM_MC_END(); \
4814 break; \
4815 \
4816 case IEMMODE_64BIT: \
4817 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4818 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4819 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4820 \
4821 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4822 IEMOP_HLP_DONE_DECODING(); \
4823 \
4824 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4825 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4826 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4827 \
4828 IEM_MC_ARG_CONST(uint64_t, u64Src, (int64_t)(int8_t)u8Imm, 1); \
4829 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4830 IEM_MC_FETCH_EFLAGS(EFlags); \
4831 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4832 \
4833 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4834 IEM_MC_COMMIT_EFLAGS(EFlags); \
4835 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4836 IEM_MC_END(); \
4837 break; \
4838 \
4839 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4840 } \
4841 } \
4842 } \
4843 (void)0
4844
4845/* read-only variant */
4846#define IEMOP_BODY_BINARY_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4847 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4848 { \
4849 /* \
4850 * Register target \
4851 */ \
4852 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4853 switch (pVCpu->iem.s.enmEffOpSize) \
4854 { \
4855 case IEMMODE_16BIT: \
4856 IEM_MC_BEGIN(3, 0, 0, 0); \
4857 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4858 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4859 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4860 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4861 \
4862 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4863 IEM_MC_REF_EFLAGS(pEFlags); \
4864 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4865 \
4866 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4867 IEM_MC_END(); \
4868 break; \
4869 \
4870 case IEMMODE_32BIT: \
4871 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4872 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4873 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4874 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4875 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4876 \
4877 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4878 IEM_MC_REF_EFLAGS(pEFlags); \
4879 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4880 \
4881 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4882 IEM_MC_END(); \
4883 break; \
4884 \
4885 case IEMMODE_64BIT: \
4886 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4888 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4889 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4890 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4891 \
4892 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4893 IEM_MC_REF_EFLAGS(pEFlags); \
4894 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4895 \
4896 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4897 IEM_MC_END(); \
4898 break; \
4899 \
4900 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4901 } \
4902 } \
4903 else \
4904 { \
4905 /* \
4906 * Memory target. \
4907 */ \
4908 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4909 { \
4910 switch (pVCpu->iem.s.enmEffOpSize) \
4911 { \
4912 case IEMMODE_16BIT: \
4913 IEM_MC_BEGIN(3, 3, 0, 0); \
4914 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4915 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4916 \
4917 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4918 IEMOP_HLP_DONE_DECODING(); \
4919 \
4920 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4921 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
4922 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4923 \
4924 IEM_MC_ARG_CONST(uint16_t, u16Src, (int16_t)(int8_t)u8Imm, 1); \
4925 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4926 IEM_MC_FETCH_EFLAGS(EFlags); \
4927 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4928 \
4929 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
4930 IEM_MC_COMMIT_EFLAGS(EFlags); \
4931 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4932 IEM_MC_END(); \
4933 break; \
4934 \
4935 case IEMMODE_32BIT: \
4936 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4938 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4939 \
4940 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4941 IEMOP_HLP_DONE_DECODING(); \
4942 \
4943 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4944 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
4945 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4946 \
4947 IEM_MC_ARG_CONST(uint32_t, u32Src, (int32_t)(int8_t)u8Imm, 1); \
4948 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4949 IEM_MC_FETCH_EFLAGS(EFlags); \
4950 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4951 \
4952 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
4953 IEM_MC_COMMIT_EFLAGS(EFlags); \
4954 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4955 IEM_MC_END(); \
4956 break; \
4957 \
4958 case IEMMODE_64BIT: \
4959 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4960 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4961 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4962 \
4963 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4964 IEMOP_HLP_DONE_DECODING(); \
4965 \
4966 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4967 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
4968 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4969 \
4970 IEM_MC_ARG_CONST(uint64_t, u64Src, (int64_t)(int8_t)u8Imm, 1); \
4971 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4972 IEM_MC_FETCH_EFLAGS(EFlags); \
4973 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4974 \
4975 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
4976 IEM_MC_COMMIT_EFLAGS(EFlags); \
4977 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4978 IEM_MC_END(); \
4979 break; \
4980 \
4981 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4982 } \
4983 } \
4984 else \
4985 { \
4986 IEMOP_HLP_DONE_DECODING(); \
4987 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4988 } \
4989 } \
4990 (void)0
4991
4992/**
4993 * @opmaps grp1_83
4994 * @opcode /0
4995 */
4996FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Ib, uint8_t, bRm)
4997{
4998 IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib");
4999 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
5000 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
5001}
5002
5003
5004/**
5005 * @opmaps grp1_83
5006 * @opcode /1
5007 */
5008FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Ib, uint8_t, bRm)
5009{
5010 IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib");
5011 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
5012 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
5013}
5014
5015
5016/**
5017 * @opmaps grp1_83
5018 * @opcode /2
5019 */
5020FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Ib, uint8_t, bRm)
5021{
5022 IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib");
5023 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
5024 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
5025}
5026
5027
5028/**
5029 * @opmaps grp1_83
5030 * @opcode /3
5031 */
5032FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Ib, uint8_t, bRm)
5033{
5034 IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib");
5035 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
5036 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
5037}
5038
5039
5040/**
5041 * @opmaps grp1_83
5042 * @opcode /4
5043 */
5044FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Ib, uint8_t, bRm)
5045{
5046 IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib");
5047 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
5048 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
5049}
5050
5051
5052/**
5053 * @opmaps grp1_83
5054 * @opcode /5
5055 */
5056FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Ib, uint8_t, bRm)
5057{
5058 IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib");
5059 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
5060 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
5061}
5062
5063
5064/**
5065 * @opmaps grp1_83
5066 * @opcode /6
5067 */
5068FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Ib, uint8_t, bRm)
5069{
5070 IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib");
5071 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
5072 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
5073}
5074
5075
5076/**
5077 * @opmaps grp1_83
5078 * @opcode /7
5079 */
5080FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Ib, uint8_t, bRm)
5081{
5082 IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib");
5083 IEMOP_BODY_BINARY_Ev_Ib_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
5084}
5085
5086
5087/**
5088 * @opcode 0x83
5089 */
5090FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
5091{
5092 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
5093 to the 386 even if absent in the intel reference manuals and some
5094 3rd party opcode listings. */
5095 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5096 switch (IEM_GET_MODRM_REG_8(bRm))
5097 {
5098 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Ib, bRm);
5099 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Ib, bRm);
5100 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Ib, bRm);
5101 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Ib, bRm);
5102 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Ib, bRm);
5103 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Ib, bRm);
5104 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Ib, bRm);
5105 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Ib, bRm);
5106 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5107 }
5108}
5109
5110
5111/**
5112 * @opcode 0x84
5113 */
5114FNIEMOP_DEF(iemOp_test_Eb_Gb)
5115{
5116 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
5117 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5118 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_test_u8);
5119 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
5120}
5121
5122
5123/**
5124 * @opcode 0x85
5125 */
5126FNIEMOP_DEF(iemOp_test_Ev_Gv)
5127{
5128 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
5129 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5130 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64);
5131}
5132
5133
5134/**
5135 * @opcode 0x86
5136 */
5137FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
5138{
5139 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5140 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
5141
5142 /*
5143 * If rm is denoting a register, no more instruction bytes.
5144 */
5145 if (IEM_IS_MODRM_REG_MODE(bRm))
5146 {
5147 IEM_MC_BEGIN(0, 2, 0, 0);
5148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5149 IEM_MC_LOCAL(uint8_t, uTmp1);
5150 IEM_MC_LOCAL(uint8_t, uTmp2);
5151
5152 IEM_MC_FETCH_GREG_U8(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5153 IEM_MC_FETCH_GREG_U8(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5154 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5155 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5156
5157 IEM_MC_ADVANCE_RIP_AND_FINISH();
5158 IEM_MC_END();
5159 }
5160 else
5161 {
5162 /*
5163 * We're accessing memory.
5164 */
5165 IEM_MC_BEGIN(2, 4, 0, 0);
5166 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5167 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5168 IEM_MC_LOCAL(uint8_t, uTmpReg);
5169 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
5170 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, uTmpReg, 1);
5171
5172 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5174 IEM_MC_MEM_MAP_U8_RW(pu8Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5175 IEM_MC_FETCH_GREG_U8(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5176 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5177 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_locked, pu8Mem, pu8Reg);
5178 else
5179 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_unlocked, pu8Mem, pu8Reg);
5180 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Mem, bUnmapInfo);
5181 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5182
5183 IEM_MC_ADVANCE_RIP_AND_FINISH();
5184 IEM_MC_END();
5185 }
5186}
5187
5188
5189/**
5190 * @opcode 0x87
5191 */
5192FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
5193{
5194 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
5195 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5196
5197 /*
5198 * If rm is denoting a register, no more instruction bytes.
5199 */
5200 if (IEM_IS_MODRM_REG_MODE(bRm))
5201 {
5202 switch (pVCpu->iem.s.enmEffOpSize)
5203 {
5204 case IEMMODE_16BIT:
5205 IEM_MC_BEGIN(0, 2, 0, 0);
5206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5207 IEM_MC_LOCAL(uint16_t, uTmp1);
5208 IEM_MC_LOCAL(uint16_t, uTmp2);
5209
5210 IEM_MC_FETCH_GREG_U16(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5211 IEM_MC_FETCH_GREG_U16(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5212 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5213 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5214
5215 IEM_MC_ADVANCE_RIP_AND_FINISH();
5216 IEM_MC_END();
5217 break;
5218
5219 case IEMMODE_32BIT:
5220 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5222 IEM_MC_LOCAL(uint32_t, uTmp1);
5223 IEM_MC_LOCAL(uint32_t, uTmp2);
5224
5225 IEM_MC_FETCH_GREG_U32(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5226 IEM_MC_FETCH_GREG_U32(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5227 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5228 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5229
5230 IEM_MC_ADVANCE_RIP_AND_FINISH();
5231 IEM_MC_END();
5232 break;
5233
5234 case IEMMODE_64BIT:
5235 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5237 IEM_MC_LOCAL(uint64_t, uTmp1);
5238 IEM_MC_LOCAL(uint64_t, uTmp2);
5239
5240 IEM_MC_FETCH_GREG_U64(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5241 IEM_MC_FETCH_GREG_U64(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5242 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5243 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5244
5245 IEM_MC_ADVANCE_RIP_AND_FINISH();
5246 IEM_MC_END();
5247 break;
5248
5249 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5250 }
5251 }
5252 else
5253 {
5254 /*
5255 * We're accessing memory.
5256 */
5257 switch (pVCpu->iem.s.enmEffOpSize)
5258 {
5259 case IEMMODE_16BIT:
5260 IEM_MC_BEGIN(2, 4, 0, 0);
5261 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5262 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5263 IEM_MC_LOCAL(uint16_t, uTmpReg);
5264 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
5265 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, uTmpReg, 1);
5266
5267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5269 IEM_MC_MEM_MAP_U16_RW(pu16Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5270 IEM_MC_FETCH_GREG_U16(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5271 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5272 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_locked, pu16Mem, pu16Reg);
5273 else
5274 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_unlocked, pu16Mem, pu16Reg);
5275 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Mem, bUnmapInfo);
5276 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5277
5278 IEM_MC_ADVANCE_RIP_AND_FINISH();
5279 IEM_MC_END();
5280 break;
5281
5282 case IEMMODE_32BIT:
5283 IEM_MC_BEGIN(2, 4, IEM_MC_F_MIN_386, 0);
5284 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5285 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5286 IEM_MC_LOCAL(uint32_t, uTmpReg);
5287 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
5288 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, uTmpReg, 1);
5289
5290 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5292 IEM_MC_MEM_MAP_U32_RW(pu32Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5293 IEM_MC_FETCH_GREG_U32(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5294 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5295 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_locked, pu32Mem, pu32Reg);
5296 else
5297 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_unlocked, pu32Mem, pu32Reg);
5298 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Mem, bUnmapInfo);
5299 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5300
5301 IEM_MC_ADVANCE_RIP_AND_FINISH();
5302 IEM_MC_END();
5303 break;
5304
5305 case IEMMODE_64BIT:
5306 IEM_MC_BEGIN(2, 4, IEM_MC_F_64BIT, 0);
5307 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5308 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5309 IEM_MC_LOCAL(uint64_t, uTmpReg);
5310 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
5311 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, uTmpReg, 1);
5312
5313 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5315 IEM_MC_MEM_MAP_U64_RW(pu64Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5316 IEM_MC_FETCH_GREG_U64(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5317 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5318 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_locked, pu64Mem, pu64Reg);
5319 else
5320 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_unlocked, pu64Mem, pu64Reg);
5321 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Mem, bUnmapInfo);
5322 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5323
5324 IEM_MC_ADVANCE_RIP_AND_FINISH();
5325 IEM_MC_END();
5326 break;
5327
5328 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5329 }
5330 }
5331}
5332
5333
5334/**
5335 * @opcode 0x88
5336 */
5337FNIEMOP_DEF(iemOp_mov_Eb_Gb)
5338{
5339 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
5340
5341 uint8_t bRm;
5342 IEM_OPCODE_GET_NEXT_U8(&bRm);
5343
5344 /*
5345 * If rm is denoting a register, no more instruction bytes.
5346 */
5347 if (IEM_IS_MODRM_REG_MODE(bRm))
5348 {
5349 IEM_MC_BEGIN(0, 1, 0, 0);
5350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5351 IEM_MC_LOCAL(uint8_t, u8Value);
5352 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5353 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Value);
5354 IEM_MC_ADVANCE_RIP_AND_FINISH();
5355 IEM_MC_END();
5356 }
5357 else
5358 {
5359 /*
5360 * We're writing a register to memory.
5361 */
5362 IEM_MC_BEGIN(0, 2, 0, 0);
5363 IEM_MC_LOCAL(uint8_t, u8Value);
5364 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5365 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5367 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5368 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
5369 IEM_MC_ADVANCE_RIP_AND_FINISH();
5370 IEM_MC_END();
5371 }
5372}
5373
5374
5375/**
5376 * @opcode 0x89
5377 */
5378FNIEMOP_DEF(iemOp_mov_Ev_Gv)
5379{
5380 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
5381
5382 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5383
5384 /*
5385 * If rm is denoting a register, no more instruction bytes.
5386 */
5387 if (IEM_IS_MODRM_REG_MODE(bRm))
5388 {
5389 switch (pVCpu->iem.s.enmEffOpSize)
5390 {
5391 case IEMMODE_16BIT:
5392 IEM_MC_BEGIN(0, 1, 0, 0);
5393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5394 IEM_MC_LOCAL(uint16_t, u16Value);
5395 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5396 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5397 IEM_MC_ADVANCE_RIP_AND_FINISH();
5398 IEM_MC_END();
5399 break;
5400
5401 case IEMMODE_32BIT:
5402 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5403 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5404 IEM_MC_LOCAL(uint32_t, u32Value);
5405 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5406 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5407 IEM_MC_ADVANCE_RIP_AND_FINISH();
5408 IEM_MC_END();
5409 break;
5410
5411 case IEMMODE_64BIT:
5412 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5414 IEM_MC_LOCAL(uint64_t, u64Value);
5415 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5416 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5417 IEM_MC_ADVANCE_RIP_AND_FINISH();
5418 IEM_MC_END();
5419 break;
5420
5421 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5422 }
5423 }
5424 else
5425 {
5426 /*
5427 * We're writing a register to memory.
5428 */
5429 switch (pVCpu->iem.s.enmEffOpSize)
5430 {
5431 case IEMMODE_16BIT:
5432 IEM_MC_BEGIN(0, 2, 0, 0);
5433 IEM_MC_LOCAL(uint16_t, u16Value);
5434 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5435 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5437 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5438 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5439 IEM_MC_ADVANCE_RIP_AND_FINISH();
5440 IEM_MC_END();
5441 break;
5442
5443 case IEMMODE_32BIT:
5444 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5445 IEM_MC_LOCAL(uint32_t, u32Value);
5446 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5447 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5449 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5450 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
5451 IEM_MC_ADVANCE_RIP_AND_FINISH();
5452 IEM_MC_END();
5453 break;
5454
5455 case IEMMODE_64BIT:
5456 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5457 IEM_MC_LOCAL(uint64_t, u64Value);
5458 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5461 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5462 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
5463 IEM_MC_ADVANCE_RIP_AND_FINISH();
5464 IEM_MC_END();
5465 break;
5466
5467 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5468 }
5469 }
5470}
5471
5472
5473/**
5474 * @opcode 0x8a
5475 */
5476FNIEMOP_DEF(iemOp_mov_Gb_Eb)
5477{
5478 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
5479
5480 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5481
5482 /*
5483 * If rm is denoting a register, no more instruction bytes.
5484 */
5485 if (IEM_IS_MODRM_REG_MODE(bRm))
5486 {
5487 IEM_MC_BEGIN(0, 1, 0, 0);
5488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5489 IEM_MC_LOCAL(uint8_t, u8Value);
5490 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5491 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5492 IEM_MC_ADVANCE_RIP_AND_FINISH();
5493 IEM_MC_END();
5494 }
5495 else
5496 {
5497 /*
5498 * We're loading a register from memory.
5499 */
5500 IEM_MC_BEGIN(0, 2, 0, 0);
5501 IEM_MC_LOCAL(uint8_t, u8Value);
5502 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5503 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5505 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5506 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5507 IEM_MC_ADVANCE_RIP_AND_FINISH();
5508 IEM_MC_END();
5509 }
5510}
5511
5512
5513/**
5514 * @opcode 0x8b
5515 */
5516FNIEMOP_DEF(iemOp_mov_Gv_Ev)
5517{
5518 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
5519
5520 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5521
5522 /*
5523 * If rm is denoting a register, no more instruction bytes.
5524 */
5525 if (IEM_IS_MODRM_REG_MODE(bRm))
5526 {
5527 switch (pVCpu->iem.s.enmEffOpSize)
5528 {
5529 case IEMMODE_16BIT:
5530 IEM_MC_BEGIN(0, 1, 0, 0);
5531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5532 IEM_MC_LOCAL(uint16_t, u16Value);
5533 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5534 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5535 IEM_MC_ADVANCE_RIP_AND_FINISH();
5536 IEM_MC_END();
5537 break;
5538
5539 case IEMMODE_32BIT:
5540 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5542 IEM_MC_LOCAL(uint32_t, u32Value);
5543 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5544 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5545 IEM_MC_ADVANCE_RIP_AND_FINISH();
5546 IEM_MC_END();
5547 break;
5548
5549 case IEMMODE_64BIT:
5550 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5552 IEM_MC_LOCAL(uint64_t, u64Value);
5553 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5554 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5555 IEM_MC_ADVANCE_RIP_AND_FINISH();
5556 IEM_MC_END();
5557 break;
5558
5559 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5560 }
5561 }
5562 else
5563 {
5564 /*
5565 * We're loading a register from memory.
5566 */
5567 switch (pVCpu->iem.s.enmEffOpSize)
5568 {
5569 case IEMMODE_16BIT:
5570 IEM_MC_BEGIN(0, 2, 0, 0);
5571 IEM_MC_LOCAL(uint16_t, u16Value);
5572 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5575 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5576 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5577 IEM_MC_ADVANCE_RIP_AND_FINISH();
5578 IEM_MC_END();
5579 break;
5580
5581 case IEMMODE_32BIT:
5582 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5583 IEM_MC_LOCAL(uint32_t, u32Value);
5584 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5585 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5587 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5588 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5589 IEM_MC_ADVANCE_RIP_AND_FINISH();
5590 IEM_MC_END();
5591 break;
5592
5593 case IEMMODE_64BIT:
5594 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5595 IEM_MC_LOCAL(uint64_t, u64Value);
5596 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5597 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5599 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5600 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5601 IEM_MC_ADVANCE_RIP_AND_FINISH();
5602 IEM_MC_END();
5603 break;
5604
5605 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5606 }
5607 }
5608}
5609
5610
5611/**
5612 * opcode 0x63
5613 * @todo Table fixme
5614 */
5615FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
5616{
5617 if (!IEM_IS_64BIT_CODE(pVCpu))
5618 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
5619 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
5620 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
5621 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
5622}
5623
5624
5625/**
5626 * @opcode 0x8c
5627 */
5628FNIEMOP_DEF(iemOp_mov_Ev_Sw)
5629{
5630 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
5631
5632 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5633
5634 /*
5635 * Check that the destination register exists. The REX.R prefix is ignored.
5636 */
5637 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5638 if (iSegReg > X86_SREG_GS)
5639 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5640
5641 /*
5642 * If rm is denoting a register, no more instruction bytes.
5643 * In that case, the operand size is respected and the upper bits are
5644 * cleared (starting with some pentium).
5645 */
5646 if (IEM_IS_MODRM_REG_MODE(bRm))
5647 {
5648 switch (pVCpu->iem.s.enmEffOpSize)
5649 {
5650 case IEMMODE_16BIT:
5651 IEM_MC_BEGIN(0, 1, 0, 0);
5652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5653 IEM_MC_LOCAL(uint16_t, u16Value);
5654 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5655 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5656 IEM_MC_ADVANCE_RIP_AND_FINISH();
5657 IEM_MC_END();
5658 break;
5659
5660 case IEMMODE_32BIT:
5661 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5663 IEM_MC_LOCAL(uint32_t, u32Value);
5664 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
5665 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5666 IEM_MC_ADVANCE_RIP_AND_FINISH();
5667 IEM_MC_END();
5668 break;
5669
5670 case IEMMODE_64BIT:
5671 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5673 IEM_MC_LOCAL(uint64_t, u64Value);
5674 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
5675 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5676 IEM_MC_ADVANCE_RIP_AND_FINISH();
5677 IEM_MC_END();
5678 break;
5679
5680 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5681 }
5682 }
5683 else
5684 {
5685 /*
5686 * We're saving the register to memory. The access is word sized
5687 * regardless of operand size prefixes.
5688 */
5689#if 0 /* not necessary */
5690 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5691#endif
5692 IEM_MC_BEGIN(0, 2, 0, 0);
5693 IEM_MC_LOCAL(uint16_t, u16Value);
5694 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5695 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5697 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5698 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5699 IEM_MC_ADVANCE_RIP_AND_FINISH();
5700 IEM_MC_END();
5701 }
5702}
5703
5704
5705
5706
5707/**
5708 * @opcode 0x8d
5709 */
5710FNIEMOP_DEF(iemOp_lea_Gv_M)
5711{
5712 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
5713 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5714 if (IEM_IS_MODRM_REG_MODE(bRm))
5715 IEMOP_RAISE_INVALID_OPCODE_RET(); /* no register form */
5716
5717 switch (pVCpu->iem.s.enmEffOpSize)
5718 {
5719 case IEMMODE_16BIT:
5720 IEM_MC_BEGIN(0, 2, 0, 0);
5721 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5722 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5724 IEM_MC_LOCAL(uint16_t, u16Cast);
5725 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
5726 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Cast);
5727 IEM_MC_ADVANCE_RIP_AND_FINISH();
5728 IEM_MC_END();
5729 break;
5730
5731 case IEMMODE_32BIT:
5732 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5733 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5736 IEM_MC_LOCAL(uint32_t, u32Cast);
5737 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
5738 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Cast);
5739 IEM_MC_ADVANCE_RIP_AND_FINISH();
5740 IEM_MC_END();
5741 break;
5742
5743 case IEMMODE_64BIT:
5744 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5745 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5746 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5748 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), GCPtrEffSrc);
5749 IEM_MC_ADVANCE_RIP_AND_FINISH();
5750 IEM_MC_END();
5751 break;
5752
5753 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5754 }
5755}
5756
5757
5758/**
5759 * @opcode 0x8e
5760 */
5761FNIEMOP_DEF(iemOp_mov_Sw_Ev)
5762{
5763 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
5764
5765 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5766
5767 /*
5768 * The practical operand size is 16-bit.
5769 */
5770#if 0 /* not necessary */
5771 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5772#endif
5773
5774 /*
5775 * Check that the destination register exists and can be used with this
5776 * instruction. The REX.R prefix is ignored.
5777 */
5778 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5779 /** @todo r=bird: What does 8086 do here wrt CS? */
5780 if ( iSegReg == X86_SREG_CS
5781 || iSegReg > X86_SREG_GS)
5782 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5783
5784 /*
5785 * If rm is denoting a register, no more instruction bytes.
5786 *
5787 * Note! Using IEMOP_MOV_SW_EV_REG_BODY here to specify different
5788 * IEM_CIMPL_F_XXX values depending on the CPU mode and target
5789 * register. This is a restriction of the current recompiler
5790 * approach.
5791 */
5792 if (IEM_IS_MODRM_REG_MODE(bRm))
5793 {
5794#define IEMOP_MOV_SW_EV_REG_BODY(a_fCImplFlags) \
5795 IEM_MC_BEGIN(2, 0, 0, a_fCImplFlags); \
5796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5797 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
5798 IEM_MC_ARG(uint16_t, u16Value, 1); \
5799 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5800 IEM_MC_HINT_FLUSH_GUEST_SHADOW_SREG(iSegReg); \
5801 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, iemCImpl_load_SReg, iSRegArg, u16Value); \
5802 IEM_MC_END()
5803
5804 if (iSegReg == X86_SREG_SS)
5805 {
5806 if (IEM_IS_32BIT_CODE(pVCpu))
5807 {
5808 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
5809 }
5810 else
5811 {
5812 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
5813 }
5814 }
5815 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5816 {
5817 IEMOP_MOV_SW_EV_REG_BODY(0);
5818 }
5819 else
5820 {
5821 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_MODE);
5822 }
5823#undef IEMOP_MOV_SW_EV_REG_BODY
5824 }
5825 else
5826 {
5827 /*
5828 * We're loading the register from memory. The access is word sized
5829 * regardless of operand size prefixes.
5830 */
5831#define IEMOP_MOV_SW_EV_MEM_BODY(a_fCImplFlags) \
5832 IEM_MC_BEGIN(2, 1, 0, a_fCImplFlags); \
5833 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
5834 IEM_MC_ARG(uint16_t, u16Value, 1); \
5835 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5836 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5838 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5839 IEM_MC_HINT_FLUSH_GUEST_SHADOW_SREG(iSegReg); \
5840 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, iemCImpl_load_SReg, iSRegArg, u16Value); \
5841 IEM_MC_END()
5842
5843 if (iSegReg == X86_SREG_SS)
5844 {
5845 if (IEM_IS_32BIT_CODE(pVCpu))
5846 {
5847 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
5848 }
5849 else
5850 {
5851 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
5852 }
5853 }
5854 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5855 {
5856 IEMOP_MOV_SW_EV_MEM_BODY(0);
5857 }
5858 else
5859 {
5860 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_MODE);
5861 }
5862#undef IEMOP_MOV_SW_EV_MEM_BODY
5863 }
5864}
5865
5866
5867/** Opcode 0x8f /0. */
5868FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
5869{
5870 /* This bugger is rather annoying as it requires rSP to be updated before
5871 doing the effective address calculations. Will eventually require a
5872 split between the R/M+SIB decoding and the effective address
5873 calculation - which is something that is required for any attempt at
5874 reusing this code for a recompiler. It may also be good to have if we
5875 need to delay #UD exception caused by invalid lock prefixes.
5876
5877 For now, we'll do a mostly safe interpreter-only implementation here. */
5878 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
5879 * now until tests show it's checked.. */
5880 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
5881
5882 /* Register access is relatively easy and can share code. */
5883 if (IEM_IS_MODRM_REG_MODE(bRm))
5884 return FNIEMOP_CALL_1(iemOpCommonPopGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
5885
5886 /*
5887 * Memory target.
5888 *
5889 * Intel says that RSP is incremented before it's used in any effective
5890 * address calcuations. This means some serious extra annoyance here since
5891 * we decode and calculate the effective address in one step and like to
5892 * delay committing registers till everything is done.
5893 *
5894 * So, we'll decode and calculate the effective address twice. This will
5895 * require some recoding if turned into a recompiler.
5896 */
5897 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
5898
5899#if 1 /* This can be compiled, optimize later if needed. */
5900 switch (pVCpu->iem.s.enmEffOpSize)
5901 {
5902 case IEMMODE_16BIT:
5903 IEM_MC_BEGIN(2, 0, 0, 0);
5904 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5905 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2 << 8);
5906 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5907 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
5908 IEM_MC_HINT_FLUSH_GUEST_SHADOW_GREG(X86_GREG_xSP);
5909 IEM_MC_CALL_CIMPL_2(0, iemCImpl_pop_mem16, iEffSeg, GCPtrEffDst);
5910 IEM_MC_END();
5911 break;
5912
5913 case IEMMODE_32BIT:
5914 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0);
5915 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5916 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4 << 8);
5917 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5918 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
5919 IEM_MC_HINT_FLUSH_GUEST_SHADOW_GREG(X86_GREG_xSP);
5920 IEM_MC_CALL_CIMPL_2(0, iemCImpl_pop_mem32, iEffSeg, GCPtrEffDst);
5921 IEM_MC_END();
5922 break;
5923
5924 case IEMMODE_64BIT:
5925 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
5926 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5927 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 8 << 8);
5928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5929 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
5930 IEM_MC_HINT_FLUSH_GUEST_SHADOW_GREG(X86_GREG_xSP);
5931 IEM_MC_CALL_CIMPL_2(0, iemCImpl_pop_mem64, iEffSeg, GCPtrEffDst);
5932 IEM_MC_END();
5933 break;
5934
5935 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5936 }
5937
5938#else
5939# ifndef TST_IEM_CHECK_MC
5940 /* Calc effective address with modified ESP. */
5941/** @todo testcase */
5942 RTGCPTR GCPtrEff;
5943 VBOXSTRICTRC rcStrict;
5944 switch (pVCpu->iem.s.enmEffOpSize)
5945 {
5946 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 2 << 8, &GCPtrEff); break;
5947 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 4 << 8, &GCPtrEff); break;
5948 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 8 << 8, &GCPtrEff); break;
5949 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5950 }
5951 if (rcStrict != VINF_SUCCESS)
5952 return rcStrict;
5953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5954
5955 /* Perform the operation - this should be CImpl. */
5956 RTUINT64U TmpRsp;
5957 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
5958 switch (pVCpu->iem.s.enmEffOpSize)
5959 {
5960 case IEMMODE_16BIT:
5961 {
5962 uint16_t u16Value;
5963 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
5964 if (rcStrict == VINF_SUCCESS)
5965 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
5966 break;
5967 }
5968
5969 case IEMMODE_32BIT:
5970 {
5971 uint32_t u32Value;
5972 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
5973 if (rcStrict == VINF_SUCCESS)
5974 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
5975 break;
5976 }
5977
5978 case IEMMODE_64BIT:
5979 {
5980 uint64_t u64Value;
5981 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
5982 if (rcStrict == VINF_SUCCESS)
5983 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
5984 break;
5985 }
5986
5987 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5988 }
5989 if (rcStrict == VINF_SUCCESS)
5990 {
5991 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
5992 return iemRegUpdateRipAndFinishClearingRF(pVCpu);
5993 }
5994 return rcStrict;
5995
5996# else
5997 return VERR_IEM_IPE_2;
5998# endif
5999#endif
6000}
6001
6002
6003/**
6004 * @opcode 0x8f
6005 */
6006FNIEMOP_DEF(iemOp_Grp1A__xop)
6007{
6008 /*
6009 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
6010 * three byte VEX prefix, except that the mmmmm field cannot have the values
6011 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
6012 */
6013 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6014 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
6015 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
6016
6017 IEMOP_MNEMONIC(xop, "xop");
6018 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
6019 {
6020 /** @todo Test when exctly the XOP conformance checks kick in during
6021 * instruction decoding and fetching (using \#PF). */
6022 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
6023 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6024 if ( ( pVCpu->iem.s.fPrefixes
6025 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
6026 == 0)
6027 {
6028 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
6029 if ((bXop2 & 0x80 /* XOP.W */) && IEM_IS_64BIT_CODE(pVCpu))
6030 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
6031 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6032 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
6033 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
6034 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
6035 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
6036 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
6037
6038 /** @todo XOP: Just use new tables and decoders. */
6039 switch (bRm & 0x1f)
6040 {
6041 case 8: /* xop opcode map 8. */
6042 IEMOP_BITCH_ABOUT_STUB();
6043 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6044
6045 case 9: /* xop opcode map 9. */
6046 IEMOP_BITCH_ABOUT_STUB();
6047 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6048
6049 case 10: /* xop opcode map 10. */
6050 IEMOP_BITCH_ABOUT_STUB();
6051 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6052
6053 default:
6054 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6055 IEMOP_RAISE_INVALID_OPCODE_RET();
6056 }
6057 }
6058 else
6059 Log(("XOP: Invalid prefix mix!\n"));
6060 }
6061 else
6062 Log(("XOP: XOP support disabled!\n"));
6063 IEMOP_RAISE_INVALID_OPCODE_RET();
6064}
6065
6066
6067/**
6068 * Common 'xchg reg,rAX' helper.
6069 */
6070FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
6071{
6072 iReg |= pVCpu->iem.s.uRexB;
6073 switch (pVCpu->iem.s.enmEffOpSize)
6074 {
6075 case IEMMODE_16BIT:
6076 IEM_MC_BEGIN(0, 2, 0, 0);
6077 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6078 IEM_MC_LOCAL(uint16_t, u16Tmp1);
6079 IEM_MC_LOCAL(uint16_t, u16Tmp2);
6080 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
6081 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
6082 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
6083 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
6084 IEM_MC_ADVANCE_RIP_AND_FINISH();
6085 IEM_MC_END();
6086 break;
6087
6088 case IEMMODE_32BIT:
6089 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6091 IEM_MC_LOCAL(uint32_t, u32Tmp1);
6092 IEM_MC_LOCAL(uint32_t, u32Tmp2);
6093 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
6094 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
6095 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
6096 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
6097 IEM_MC_ADVANCE_RIP_AND_FINISH();
6098 IEM_MC_END();
6099 break;
6100
6101 case IEMMODE_64BIT:
6102 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6104 IEM_MC_LOCAL(uint64_t, u64Tmp1);
6105 IEM_MC_LOCAL(uint64_t, u64Tmp2);
6106 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
6107 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
6108 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
6109 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
6110 IEM_MC_ADVANCE_RIP_AND_FINISH();
6111 IEM_MC_END();
6112 break;
6113
6114 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6115 }
6116}
6117
6118
6119/**
6120 * @opcode 0x90
6121 */
6122FNIEMOP_DEF(iemOp_nop)
6123{
6124 /* R8/R8D and RAX/EAX can be exchanged. */
6125 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
6126 {
6127 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
6128 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
6129 }
6130
6131 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
6132 {
6133 IEMOP_MNEMONIC(pause, "pause");
6134 /* ASSUMING that we keep the IEM_F_X86_CTX_IN_GUEST, IEM_F_X86_CTX_VMX
6135 and IEM_F_X86_CTX_SVM in the TB key, we can safely do the following: */
6136 if (!IEM_IS_IN_GUEST(pVCpu))
6137 { /* probable */ }
6138#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6139 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_VMX)
6140 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmx_pause);
6141#endif
6142#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
6143 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_SVM)
6144 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_svm_pause);
6145#endif
6146 }
6147 else
6148 IEMOP_MNEMONIC(nop, "nop");
6149 /** @todo testcase: lock nop; lock pause */
6150 IEM_MC_BEGIN(0, 0, 0, 0);
6151 IEMOP_HLP_DONE_DECODING();
6152 IEM_MC_ADVANCE_RIP_AND_FINISH();
6153 IEM_MC_END();
6154}
6155
6156
6157/**
6158 * @opcode 0x91
6159 */
6160FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
6161{
6162 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
6163 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
6164}
6165
6166
6167/**
6168 * @opcode 0x92
6169 */
6170FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
6171{
6172 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
6173 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
6174}
6175
6176
6177/**
6178 * @opcode 0x93
6179 */
6180FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
6181{
6182 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
6183 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
6184}
6185
6186
6187/**
6188 * @opcode 0x94
6189 */
6190FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
6191{
6192 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
6193 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
6194}
6195
6196
6197/**
6198 * @opcode 0x95
6199 */
6200FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
6201{
6202 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
6203 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
6204}
6205
6206
6207/**
6208 * @opcode 0x96
6209 */
6210FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
6211{
6212 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
6213 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
6214}
6215
6216
6217/**
6218 * @opcode 0x97
6219 */
6220FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
6221{
6222 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
6223 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
6224}
6225
6226
6227/**
6228 * @opcode 0x98
6229 */
6230FNIEMOP_DEF(iemOp_cbw)
6231{
6232 switch (pVCpu->iem.s.enmEffOpSize)
6233 {
6234 case IEMMODE_16BIT:
6235 IEMOP_MNEMONIC(cbw, "cbw");
6236 IEM_MC_BEGIN(0, 1, 0, 0);
6237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6238 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
6239 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
6240 } IEM_MC_ELSE() {
6241 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
6242 } IEM_MC_ENDIF();
6243 IEM_MC_ADVANCE_RIP_AND_FINISH();
6244 IEM_MC_END();
6245 break;
6246
6247 case IEMMODE_32BIT:
6248 IEMOP_MNEMONIC(cwde, "cwde");
6249 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6251 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6252 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
6253 } IEM_MC_ELSE() {
6254 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
6255 } IEM_MC_ENDIF();
6256 IEM_MC_ADVANCE_RIP_AND_FINISH();
6257 IEM_MC_END();
6258 break;
6259
6260 case IEMMODE_64BIT:
6261 IEMOP_MNEMONIC(cdqe, "cdqe");
6262 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6264 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6265 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
6266 } IEM_MC_ELSE() {
6267 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
6268 } IEM_MC_ENDIF();
6269 IEM_MC_ADVANCE_RIP_AND_FINISH();
6270 IEM_MC_END();
6271 break;
6272
6273 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6274 }
6275}
6276
6277
6278/**
6279 * @opcode 0x99
6280 */
6281FNIEMOP_DEF(iemOp_cwd)
6282{
6283 switch (pVCpu->iem.s.enmEffOpSize)
6284 {
6285 case IEMMODE_16BIT:
6286 IEMOP_MNEMONIC(cwd, "cwd");
6287 IEM_MC_BEGIN(0, 1, 0, 0);
6288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6289 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6290 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
6291 } IEM_MC_ELSE() {
6292 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
6293 } IEM_MC_ENDIF();
6294 IEM_MC_ADVANCE_RIP_AND_FINISH();
6295 IEM_MC_END();
6296 break;
6297
6298 case IEMMODE_32BIT:
6299 IEMOP_MNEMONIC(cdq, "cdq");
6300 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6302 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6303 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
6304 } IEM_MC_ELSE() {
6305 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
6306 } IEM_MC_ENDIF();
6307 IEM_MC_ADVANCE_RIP_AND_FINISH();
6308 IEM_MC_END();
6309 break;
6310
6311 case IEMMODE_64BIT:
6312 IEMOP_MNEMONIC(cqo, "cqo");
6313 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6315 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
6316 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
6317 } IEM_MC_ELSE() {
6318 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
6319 } IEM_MC_ENDIF();
6320 IEM_MC_ADVANCE_RIP_AND_FINISH();
6321 IEM_MC_END();
6322 break;
6323
6324 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6325 }
6326}
6327
6328
6329/**
6330 * @opcode 0x9a
6331 */
6332FNIEMOP_DEF(iemOp_call_Ap)
6333{
6334 IEMOP_MNEMONIC(call_Ap, "call Ap");
6335 IEMOP_HLP_NO_64BIT();
6336
6337 /* Decode the far pointer address and pass it on to the far call C implementation. */
6338 uint32_t off32Seg;
6339 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
6340 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
6341 else
6342 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
6343 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
6344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6345 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
6346 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
6347 iemCImpl_callf, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
6348}
6349
6350
6351/** Opcode 0x9b. (aka fwait) */
6352FNIEMOP_DEF(iemOp_wait)
6353{
6354 IEMOP_MNEMONIC(wait, "wait");
6355 IEM_MC_BEGIN(0, 0, 0, 0);
6356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6357 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
6358 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6359 IEM_MC_ADVANCE_RIP_AND_FINISH();
6360 IEM_MC_END();
6361}
6362
6363
6364/**
6365 * @opcode 0x9c
6366 */
6367FNIEMOP_DEF(iemOp_pushf_Fv)
6368{
6369 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
6370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6371 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6372 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
6373}
6374
6375
6376/**
6377 * @opcode 0x9d
6378 */
6379FNIEMOP_DEF(iemOp_popf_Fv)
6380{
6381 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
6382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6383 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6384 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE_AND_AFTER,
6385 iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
6386}
6387
6388
6389/**
6390 * @opcode 0x9e
6391 */
6392FNIEMOP_DEF(iemOp_sahf)
6393{
6394 IEMOP_MNEMONIC(sahf, "sahf");
6395 if ( IEM_IS_64BIT_CODE(pVCpu)
6396 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6397 IEMOP_RAISE_INVALID_OPCODE_RET();
6398 IEM_MC_BEGIN(0, 2, 0, 0);
6399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6400 IEM_MC_LOCAL(uint32_t, u32Flags);
6401 IEM_MC_LOCAL(uint32_t, EFlags);
6402 IEM_MC_FETCH_EFLAGS(EFlags);
6403 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
6404 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6405 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
6406 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
6407 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
6408 IEM_MC_COMMIT_EFLAGS(EFlags);
6409 IEM_MC_ADVANCE_RIP_AND_FINISH();
6410 IEM_MC_END();
6411}
6412
6413
6414/**
6415 * @opcode 0x9f
6416 */
6417FNIEMOP_DEF(iemOp_lahf)
6418{
6419 IEMOP_MNEMONIC(lahf, "lahf");
6420 if ( IEM_IS_64BIT_CODE(pVCpu)
6421 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6422 IEMOP_RAISE_INVALID_OPCODE_RET();
6423 IEM_MC_BEGIN(0, 1, 0, 0);
6424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6425 IEM_MC_LOCAL(uint8_t, u8Flags);
6426 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
6427 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
6428 IEM_MC_ADVANCE_RIP_AND_FINISH();
6429 IEM_MC_END();
6430}
6431
6432
6433/**
6434 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
6435 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode.
6436 * Will return/throw on failures.
6437 * @param a_GCPtrMemOff The variable to store the offset in.
6438 */
6439#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
6440 do \
6441 { \
6442 switch (pVCpu->iem.s.enmEffAddrMode) \
6443 { \
6444 case IEMMODE_16BIT: \
6445 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
6446 break; \
6447 case IEMMODE_32BIT: \
6448 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
6449 break; \
6450 case IEMMODE_64BIT: \
6451 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
6452 break; \
6453 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
6454 } \
6455 } while (0)
6456
6457/**
6458 * @opcode 0xa0
6459 */
6460FNIEMOP_DEF(iemOp_mov_AL_Ob)
6461{
6462 /*
6463 * Get the offset.
6464 */
6465 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
6466 RTGCPTR GCPtrMemOff;
6467 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
6468
6469 /*
6470 * Fetch AL.
6471 */
6472 IEM_MC_BEGIN(0, 1, 0, 0);
6473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6474 IEM_MC_LOCAL(uint8_t, u8Tmp);
6475 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6476 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6477 IEM_MC_ADVANCE_RIP_AND_FINISH();
6478 IEM_MC_END();
6479}
6480
6481
6482/**
6483 * @opcode 0xa1
6484 */
6485FNIEMOP_DEF(iemOp_mov_rAX_Ov)
6486{
6487 /*
6488 * Get the offset.
6489 */
6490 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
6491 RTGCPTR GCPtrMemOff;
6492 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
6493
6494 /*
6495 * Fetch rAX.
6496 */
6497 switch (pVCpu->iem.s.enmEffOpSize)
6498 {
6499 case IEMMODE_16BIT:
6500 IEM_MC_BEGIN(0, 1, 0, 0);
6501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6502 IEM_MC_LOCAL(uint16_t, u16Tmp);
6503 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6504 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
6505 IEM_MC_ADVANCE_RIP_AND_FINISH();
6506 IEM_MC_END();
6507 break;
6508
6509 case IEMMODE_32BIT:
6510 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6512 IEM_MC_LOCAL(uint32_t, u32Tmp);
6513 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6514 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
6515 IEM_MC_ADVANCE_RIP_AND_FINISH();
6516 IEM_MC_END();
6517 break;
6518
6519 case IEMMODE_64BIT:
6520 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6522 IEM_MC_LOCAL(uint64_t, u64Tmp);
6523 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6524 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
6525 IEM_MC_ADVANCE_RIP_AND_FINISH();
6526 IEM_MC_END();
6527 break;
6528
6529 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6530 }
6531}
6532
6533
6534/**
6535 * @opcode 0xa2
6536 */
6537FNIEMOP_DEF(iemOp_mov_Ob_AL)
6538{
6539 /*
6540 * Get the offset.
6541 */
6542 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
6543 RTGCPTR GCPtrMemOff;
6544 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
6545
6546 /*
6547 * Store AL.
6548 */
6549 IEM_MC_BEGIN(0, 1, 0, 0);
6550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6551 IEM_MC_LOCAL(uint8_t, u8Tmp);
6552 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
6553 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
6554 IEM_MC_ADVANCE_RIP_AND_FINISH();
6555 IEM_MC_END();
6556}
6557
6558
6559/**
6560 * @opcode 0xa3
6561 */
6562FNIEMOP_DEF(iemOp_mov_Ov_rAX)
6563{
6564 /*
6565 * Get the offset.
6566 */
6567 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
6568 RTGCPTR GCPtrMemOff;
6569 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
6570
6571 /*
6572 * Store rAX.
6573 */
6574 switch (pVCpu->iem.s.enmEffOpSize)
6575 {
6576 case IEMMODE_16BIT:
6577 IEM_MC_BEGIN(0, 1, 0, 0);
6578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6579 IEM_MC_LOCAL(uint16_t, u16Tmp);
6580 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
6581 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
6582 IEM_MC_ADVANCE_RIP_AND_FINISH();
6583 IEM_MC_END();
6584 break;
6585
6586 case IEMMODE_32BIT:
6587 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6589 IEM_MC_LOCAL(uint32_t, u32Tmp);
6590 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
6591 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
6592 IEM_MC_ADVANCE_RIP_AND_FINISH();
6593 IEM_MC_END();
6594 break;
6595
6596 case IEMMODE_64BIT:
6597 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6599 IEM_MC_LOCAL(uint64_t, u64Tmp);
6600 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
6601 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
6602 IEM_MC_ADVANCE_RIP_AND_FINISH();
6603 IEM_MC_END();
6604 break;
6605
6606 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6607 }
6608}
6609
6610/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
6611#define IEM_MOVS_CASE(ValBits, AddrBits, a_fMcFlags) \
6612 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
6613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6614 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6615 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6616 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6617 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
6618 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6619 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
6620 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6621 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6622 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6623 } IEM_MC_ELSE() { \
6624 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6625 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6626 } IEM_MC_ENDIF(); \
6627 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6628 IEM_MC_END() \
6629
6630/**
6631 * @opcode 0xa4
6632 */
6633FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
6634{
6635 /*
6636 * Use the C implementation if a repeat prefix is encountered.
6637 */
6638 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6639 {
6640 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
6641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6642 switch (pVCpu->iem.s.enmEffAddrMode)
6643 {
6644 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
6645 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
6646 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
6647 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6648 }
6649 }
6650
6651 /*
6652 * Sharing case implementation with movs[wdq] below.
6653 */
6654 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
6655 switch (pVCpu->iem.s.enmEffAddrMode)
6656 {
6657 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
6658 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32, IEM_MC_F_MIN_386); break;
6659 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64, IEM_MC_F_64BIT); break;
6660 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6661 }
6662}
6663
6664
6665/**
6666 * @opcode 0xa5
6667 */
6668FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
6669{
6670
6671 /*
6672 * Use the C implementation if a repeat prefix is encountered.
6673 */
6674 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6675 {
6676 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
6677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6678 switch (pVCpu->iem.s.enmEffOpSize)
6679 {
6680 case IEMMODE_16BIT:
6681 switch (pVCpu->iem.s.enmEffAddrMode)
6682 {
6683 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
6684 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
6685 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
6686 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6687 }
6688 break;
6689 case IEMMODE_32BIT:
6690 switch (pVCpu->iem.s.enmEffAddrMode)
6691 {
6692 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
6693 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
6694 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
6695 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6696 }
6697 case IEMMODE_64BIT:
6698 switch (pVCpu->iem.s.enmEffAddrMode)
6699 {
6700 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
6701 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
6702 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
6703 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6704 }
6705 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6706 }
6707 }
6708
6709 /*
6710 * Annoying double switch here.
6711 * Using ugly macro for implementing the cases, sharing it with movsb.
6712 */
6713 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
6714 switch (pVCpu->iem.s.enmEffOpSize)
6715 {
6716 case IEMMODE_16BIT:
6717 switch (pVCpu->iem.s.enmEffAddrMode)
6718 {
6719 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
6720 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32, IEM_MC_F_MIN_386); break;
6721 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64, IEM_MC_F_64BIT); break;
6722 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6723 }
6724 break;
6725
6726 case IEMMODE_32BIT:
6727 switch (pVCpu->iem.s.enmEffAddrMode)
6728 {
6729 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
6730 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32, IEM_MC_F_MIN_386); break;
6731 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64, IEM_MC_F_64BIT); break;
6732 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6733 }
6734 break;
6735
6736 case IEMMODE_64BIT:
6737 switch (pVCpu->iem.s.enmEffAddrMode)
6738 {
6739 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6740 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32, IEM_MC_F_64BIT); break;
6741 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64, IEM_MC_F_64BIT); break;
6742 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6743 }
6744 break;
6745 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6746 }
6747}
6748
6749#undef IEM_MOVS_CASE
6750
6751/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
6752#define IEM_CMPS_CASE(ValBits, AddrBits, a_fMcFlags) \
6753 IEM_MC_BEGIN(3, 3, a_fMcFlags, 0); \
6754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6755 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
6756 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
6757 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
6758 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
6759 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6760 \
6761 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6762 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
6763 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6764 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
6765 IEM_MC_REF_LOCAL(puValue1, uValue1); \
6766 IEM_MC_REF_EFLAGS(pEFlags); \
6767 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
6768 \
6769 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6770 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6771 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6772 } IEM_MC_ELSE() { \
6773 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6774 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6775 } IEM_MC_ENDIF(); \
6776 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6777 IEM_MC_END() \
6778
6779/**
6780 * @opcode 0xa6
6781 */
6782FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
6783{
6784
6785 /*
6786 * Use the C implementation if a repeat prefix is encountered.
6787 */
6788 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6789 {
6790 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
6791 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6792 switch (pVCpu->iem.s.enmEffAddrMode)
6793 {
6794 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6795 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6796 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6797 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6798 }
6799 }
6800 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6801 {
6802 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
6803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6804 switch (pVCpu->iem.s.enmEffAddrMode)
6805 {
6806 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6807 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6808 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6809 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6810 }
6811 }
6812
6813 /*
6814 * Sharing case implementation with cmps[wdq] below.
6815 */
6816 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
6817 switch (pVCpu->iem.s.enmEffAddrMode)
6818 {
6819 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
6820 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32, IEM_MC_F_MIN_386); break;
6821 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64, IEM_MC_F_64BIT); break;
6822 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6823 }
6824}
6825
6826
6827/**
6828 * @opcode 0xa7
6829 */
6830FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
6831{
6832 /*
6833 * Use the C implementation if a repeat prefix is encountered.
6834 */
6835 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6836 {
6837 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
6838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6839 switch (pVCpu->iem.s.enmEffOpSize)
6840 {
6841 case IEMMODE_16BIT:
6842 switch (pVCpu->iem.s.enmEffAddrMode)
6843 {
6844 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
6845 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
6846 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
6847 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6848 }
6849 break;
6850 case IEMMODE_32BIT:
6851 switch (pVCpu->iem.s.enmEffAddrMode)
6852 {
6853 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
6854 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
6855 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
6856 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6857 }
6858 case IEMMODE_64BIT:
6859 switch (pVCpu->iem.s.enmEffAddrMode)
6860 {
6861 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
6862 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
6863 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
6864 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6865 }
6866 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6867 }
6868 }
6869
6870 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6871 {
6872 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
6873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6874 switch (pVCpu->iem.s.enmEffOpSize)
6875 {
6876 case IEMMODE_16BIT:
6877 switch (pVCpu->iem.s.enmEffAddrMode)
6878 {
6879 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
6880 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
6881 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
6882 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6883 }
6884 break;
6885 case IEMMODE_32BIT:
6886 switch (pVCpu->iem.s.enmEffAddrMode)
6887 {
6888 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
6889 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
6890 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
6891 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6892 }
6893 case IEMMODE_64BIT:
6894 switch (pVCpu->iem.s.enmEffAddrMode)
6895 {
6896 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
6897 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
6898 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
6899 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6900 }
6901 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6902 }
6903 }
6904
6905 /*
6906 * Annoying double switch here.
6907 * Using ugly macro for implementing the cases, sharing it with cmpsb.
6908 */
6909 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
6910 switch (pVCpu->iem.s.enmEffOpSize)
6911 {
6912 case IEMMODE_16BIT:
6913 switch (pVCpu->iem.s.enmEffAddrMode)
6914 {
6915 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
6916 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32, IEM_MC_F_MIN_386); break;
6917 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64, IEM_MC_F_64BIT); break;
6918 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6919 }
6920 break;
6921
6922 case IEMMODE_32BIT:
6923 switch (pVCpu->iem.s.enmEffAddrMode)
6924 {
6925 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
6926 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32, IEM_MC_F_MIN_386); break;
6927 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64, IEM_MC_F_64BIT); break;
6928 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6929 }
6930 break;
6931
6932 case IEMMODE_64BIT:
6933 switch (pVCpu->iem.s.enmEffAddrMode)
6934 {
6935 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6936 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32, IEM_MC_F_MIN_386); break;
6937 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64, IEM_MC_F_64BIT); break;
6938 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6939 }
6940 break;
6941 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6942 }
6943}
6944
6945#undef IEM_CMPS_CASE
6946
6947/**
6948 * @opcode 0xa8
6949 */
6950FNIEMOP_DEF(iemOp_test_AL_Ib)
6951{
6952 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
6953 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6954 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_test_u8);
6955}
6956
6957
6958/**
6959 * @opcode 0xa9
6960 */
6961FNIEMOP_DEF(iemOp_test_eAX_Iz)
6962{
6963 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
6964 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6965 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, 0);
6966}
6967
6968
6969/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
6970#define IEM_STOS_CASE(ValBits, AddrBits, a_fMcFlags) \
6971 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
6972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6973 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6974 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6975 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
6976 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6977 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
6978 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6979 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6980 } IEM_MC_ELSE() { \
6981 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6982 } IEM_MC_ENDIF(); \
6983 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6984 IEM_MC_END() \
6985
6986/**
6987 * @opcode 0xaa
6988 */
6989FNIEMOP_DEF(iemOp_stosb_Yb_AL)
6990{
6991 /*
6992 * Use the C implementation if a repeat prefix is encountered.
6993 */
6994 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6995 {
6996 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
6997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6998 switch (pVCpu->iem.s.enmEffAddrMode)
6999 {
7000 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP, iemCImpl_stos_al_m16);
7001 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP, iemCImpl_stos_al_m32);
7002 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP, iemCImpl_stos_al_m64);
7003 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7004 }
7005 }
7006
7007 /*
7008 * Sharing case implementation with stos[wdq] below.
7009 */
7010 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
7011 switch (pVCpu->iem.s.enmEffAddrMode)
7012 {
7013 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7014 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7015 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64, IEM_MC_F_64BIT); break;
7016 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7017 }
7018}
7019
7020
7021/**
7022 * @opcode 0xab
7023 */
7024FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
7025{
7026 /*
7027 * Use the C implementation if a repeat prefix is encountered.
7028 */
7029 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7030 {
7031 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
7032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7033 switch (pVCpu->iem.s.enmEffOpSize)
7034 {
7035 case IEMMODE_16BIT:
7036 switch (pVCpu->iem.s.enmEffAddrMode)
7037 {
7038 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_ax_m16);
7039 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_ax_m32);
7040 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_ax_m64);
7041 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7042 }
7043 break;
7044 case IEMMODE_32BIT:
7045 switch (pVCpu->iem.s.enmEffAddrMode)
7046 {
7047 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_eax_m16);
7048 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_eax_m32);
7049 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_eax_m64);
7050 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7051 }
7052 case IEMMODE_64BIT:
7053 switch (pVCpu->iem.s.enmEffAddrMode)
7054 {
7055 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
7056 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_rax_m32);
7057 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_rax_m64);
7058 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7059 }
7060 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7061 }
7062 }
7063
7064 /*
7065 * Annoying double switch here.
7066 * Using ugly macro for implementing the cases, sharing it with stosb.
7067 */
7068 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
7069 switch (pVCpu->iem.s.enmEffOpSize)
7070 {
7071 case IEMMODE_16BIT:
7072 switch (pVCpu->iem.s.enmEffAddrMode)
7073 {
7074 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7075 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7076 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64, IEM_MC_F_64BIT); break;
7077 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7078 }
7079 break;
7080
7081 case IEMMODE_32BIT:
7082 switch (pVCpu->iem.s.enmEffAddrMode)
7083 {
7084 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7085 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7086 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64, IEM_MC_F_64BIT); break;
7087 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7088 }
7089 break;
7090
7091 case IEMMODE_64BIT:
7092 switch (pVCpu->iem.s.enmEffAddrMode)
7093 {
7094 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7095 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32, IEM_MC_F_64BIT); break;
7096 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64, IEM_MC_F_64BIT); break;
7097 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7098 }
7099 break;
7100 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7101 }
7102}
7103
7104#undef IEM_STOS_CASE
7105
7106/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
7107#define IEM_LODS_CASE(ValBits, AddrBits, a_fMcFlags) \
7108 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
7109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7110 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7111 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7112 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
7113 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
7114 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
7115 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7116 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7117 } IEM_MC_ELSE() { \
7118 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7119 } IEM_MC_ENDIF(); \
7120 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7121 IEM_MC_END() \
7122
7123/**
7124 * @opcode 0xac
7125 */
7126FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
7127{
7128 /*
7129 * Use the C implementation if a repeat prefix is encountered.
7130 */
7131 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7132 {
7133 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
7134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7135 switch (pVCpu->iem.s.enmEffAddrMode)
7136 {
7137 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
7138 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
7139 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
7140 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7141 }
7142 }
7143
7144 /*
7145 * Sharing case implementation with stos[wdq] below.
7146 */
7147 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
7148 switch (pVCpu->iem.s.enmEffAddrMode)
7149 {
7150 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7151 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7152 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64, IEM_MC_F_64BIT); break;
7153 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7154 }
7155}
7156
7157
7158/**
7159 * @opcode 0xad
7160 */
7161FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
7162{
7163 /*
7164 * Use the C implementation if a repeat prefix is encountered.
7165 */
7166 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7167 {
7168 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
7169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7170 switch (pVCpu->iem.s.enmEffOpSize)
7171 {
7172 case IEMMODE_16BIT:
7173 switch (pVCpu->iem.s.enmEffAddrMode)
7174 {
7175 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
7176 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
7177 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
7178 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7179 }
7180 break;
7181 case IEMMODE_32BIT:
7182 switch (pVCpu->iem.s.enmEffAddrMode)
7183 {
7184 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
7185 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
7186 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
7187 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7188 }
7189 case IEMMODE_64BIT:
7190 switch (pVCpu->iem.s.enmEffAddrMode)
7191 {
7192 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
7193 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
7194 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
7195 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7196 }
7197 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7198 }
7199 }
7200
7201 /*
7202 * Annoying double switch here.
7203 * Using ugly macro for implementing the cases, sharing it with lodsb.
7204 */
7205 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
7206 switch (pVCpu->iem.s.enmEffOpSize)
7207 {
7208 case IEMMODE_16BIT:
7209 switch (pVCpu->iem.s.enmEffAddrMode)
7210 {
7211 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7212 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7213 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64, IEM_MC_F_64BIT); break;
7214 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7215 }
7216 break;
7217
7218 case IEMMODE_32BIT:
7219 switch (pVCpu->iem.s.enmEffAddrMode)
7220 {
7221 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7222 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7223 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64, IEM_MC_F_64BIT); break;
7224 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7225 }
7226 break;
7227
7228 case IEMMODE_64BIT:
7229 switch (pVCpu->iem.s.enmEffAddrMode)
7230 {
7231 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7232 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32, IEM_MC_F_64BIT); break;
7233 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64, IEM_MC_F_64BIT); break;
7234 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7235 }
7236 break;
7237 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7238 }
7239}
7240
7241#undef IEM_LODS_CASE
7242
7243/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
7244#define IEM_SCAS_CASE(ValBits, AddrBits, a_fMcFlags) \
7245 IEM_MC_BEGIN(3, 2, a_fMcFlags, 0); \
7246 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7247 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
7248 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
7249 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
7250 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7251 \
7252 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7253 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
7254 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
7255 IEM_MC_REF_EFLAGS(pEFlags); \
7256 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
7257 \
7258 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7259 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7260 } IEM_MC_ELSE() { \
7261 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7262 } IEM_MC_ENDIF(); \
7263 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7264 IEM_MC_END();
7265
7266/**
7267 * @opcode 0xae
7268 */
7269FNIEMOP_DEF(iemOp_scasb_AL_Xb)
7270{
7271 /*
7272 * Use the C implementation if a repeat prefix is encountered.
7273 */
7274 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7275 {
7276 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
7277 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7278 switch (pVCpu->iem.s.enmEffAddrMode)
7279 {
7280 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_al_m16);
7281 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_al_m32);
7282 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_al_m64);
7283 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7284 }
7285 }
7286 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7287 {
7288 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
7289 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7290 switch (pVCpu->iem.s.enmEffAddrMode)
7291 {
7292 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_al_m16);
7293 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_al_m32);
7294 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_al_m64);
7295 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7296 }
7297 }
7298
7299 /*
7300 * Sharing case implementation with stos[wdq] below.
7301 */
7302 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
7303 switch (pVCpu->iem.s.enmEffAddrMode)
7304 {
7305 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7306 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7307 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64, IEM_MC_F_64BIT); break;
7308 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7309 }
7310}
7311
7312
7313/**
7314 * @opcode 0xaf
7315 */
7316FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
7317{
7318 /*
7319 * Use the C implementation if a repeat prefix is encountered.
7320 */
7321 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7322 {
7323 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
7324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7325 switch (pVCpu->iem.s.enmEffOpSize)
7326 {
7327 case IEMMODE_16BIT:
7328 switch (pVCpu->iem.s.enmEffAddrMode)
7329 {
7330 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_ax_m16);
7331 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_ax_m32);
7332 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_ax_m64);
7333 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7334 }
7335 break;
7336 case IEMMODE_32BIT:
7337 switch (pVCpu->iem.s.enmEffAddrMode)
7338 {
7339 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_eax_m16);
7340 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_eax_m32);
7341 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_eax_m64);
7342 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7343 }
7344 case IEMMODE_64BIT:
7345 switch (pVCpu->iem.s.enmEffAddrMode)
7346 {
7347 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
7348 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_rax_m32);
7349 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_rax_m64);
7350 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7351 }
7352 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7353 }
7354 }
7355 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7356 {
7357 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
7358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7359 switch (pVCpu->iem.s.enmEffOpSize)
7360 {
7361 case IEMMODE_16BIT:
7362 switch (pVCpu->iem.s.enmEffAddrMode)
7363 {
7364 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_ax_m16);
7365 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_ax_m32);
7366 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_ax_m64);
7367 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7368 }
7369 break;
7370 case IEMMODE_32BIT:
7371 switch (pVCpu->iem.s.enmEffAddrMode)
7372 {
7373 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_eax_m16);
7374 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_eax_m32);
7375 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_eax_m64);
7376 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7377 }
7378 case IEMMODE_64BIT:
7379 switch (pVCpu->iem.s.enmEffAddrMode)
7380 {
7381 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
7382 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_rax_m32);
7383 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_rax_m64);
7384 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7385 }
7386 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7387 }
7388 }
7389
7390 /*
7391 * Annoying double switch here.
7392 * Using ugly macro for implementing the cases, sharing it with scasb.
7393 */
7394 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
7395 switch (pVCpu->iem.s.enmEffOpSize)
7396 {
7397 case IEMMODE_16BIT:
7398 switch (pVCpu->iem.s.enmEffAddrMode)
7399 {
7400 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7401 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7402 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64, IEM_MC_F_64BIT); break;
7403 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7404 }
7405 break;
7406
7407 case IEMMODE_32BIT:
7408 switch (pVCpu->iem.s.enmEffAddrMode)
7409 {
7410 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7411 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7412 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64, IEM_MC_F_64BIT); break;
7413 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7414 }
7415 break;
7416
7417 case IEMMODE_64BIT:
7418 switch (pVCpu->iem.s.enmEffAddrMode)
7419 {
7420 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7421 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32, IEM_MC_F_64BIT); break;
7422 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64, IEM_MC_F_64BIT); break;
7423 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7424 }
7425 break;
7426 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7427 }
7428}
7429
7430#undef IEM_SCAS_CASE
7431
7432/**
7433 * Common 'mov r8, imm8' helper.
7434 */
7435FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iFixedReg)
7436{
7437 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7438 IEM_MC_BEGIN(0, 0, 0, 0);
7439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7440 IEM_MC_STORE_GREG_U8_CONST(iFixedReg, u8Imm);
7441 IEM_MC_ADVANCE_RIP_AND_FINISH();
7442 IEM_MC_END();
7443}
7444
7445
7446/**
7447 * @opcode 0xb0
7448 */
7449FNIEMOP_DEF(iemOp_mov_AL_Ib)
7450{
7451 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
7452 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7453}
7454
7455
7456/**
7457 * @opcode 0xb1
7458 */
7459FNIEMOP_DEF(iemOp_CL_Ib)
7460{
7461 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
7462 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7463}
7464
7465
7466/**
7467 * @opcode 0xb2
7468 */
7469FNIEMOP_DEF(iemOp_DL_Ib)
7470{
7471 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
7472 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7473}
7474
7475
7476/**
7477 * @opcode 0xb3
7478 */
7479FNIEMOP_DEF(iemOp_BL_Ib)
7480{
7481 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
7482 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7483}
7484
7485
7486/**
7487 * @opcode 0xb4
7488 */
7489FNIEMOP_DEF(iemOp_mov_AH_Ib)
7490{
7491 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
7492 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7493}
7494
7495
7496/**
7497 * @opcode 0xb5
7498 */
7499FNIEMOP_DEF(iemOp_CH_Ib)
7500{
7501 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
7502 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7503}
7504
7505
7506/**
7507 * @opcode 0xb6
7508 */
7509FNIEMOP_DEF(iemOp_DH_Ib)
7510{
7511 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
7512 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7513}
7514
7515
7516/**
7517 * @opcode 0xb7
7518 */
7519FNIEMOP_DEF(iemOp_BH_Ib)
7520{
7521 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
7522 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7523}
7524
7525
7526/**
7527 * Common 'mov regX,immX' helper.
7528 */
7529FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iFixedReg)
7530{
7531 switch (pVCpu->iem.s.enmEffOpSize)
7532 {
7533 case IEMMODE_16BIT:
7534 IEM_MC_BEGIN(0, 0, 0, 0);
7535 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7537 IEM_MC_STORE_GREG_U16_CONST(iFixedReg, u16Imm);
7538 IEM_MC_ADVANCE_RIP_AND_FINISH();
7539 IEM_MC_END();
7540 break;
7541
7542 case IEMMODE_32BIT:
7543 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7544 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
7545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7546 IEM_MC_STORE_GREG_U32_CONST(iFixedReg, u32Imm);
7547 IEM_MC_ADVANCE_RIP_AND_FINISH();
7548 IEM_MC_END();
7549 break;
7550
7551 case IEMMODE_64BIT:
7552 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
7553 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
7554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7555 IEM_MC_STORE_GREG_U64_CONST(iFixedReg, u64Imm);
7556 IEM_MC_ADVANCE_RIP_AND_FINISH();
7557 IEM_MC_END();
7558 break;
7559 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7560 }
7561}
7562
7563
7564/**
7565 * @opcode 0xb8
7566 */
7567FNIEMOP_DEF(iemOp_eAX_Iv)
7568{
7569 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
7570 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7571}
7572
7573
7574/**
7575 * @opcode 0xb9
7576 */
7577FNIEMOP_DEF(iemOp_eCX_Iv)
7578{
7579 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
7580 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7581}
7582
7583
7584/**
7585 * @opcode 0xba
7586 */
7587FNIEMOP_DEF(iemOp_eDX_Iv)
7588{
7589 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
7590 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7591}
7592
7593
7594/**
7595 * @opcode 0xbb
7596 */
7597FNIEMOP_DEF(iemOp_eBX_Iv)
7598{
7599 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
7600 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7601}
7602
7603
7604/**
7605 * @opcode 0xbc
7606 */
7607FNIEMOP_DEF(iemOp_eSP_Iv)
7608{
7609 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
7610 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7611}
7612
7613
7614/**
7615 * @opcode 0xbd
7616 */
7617FNIEMOP_DEF(iemOp_eBP_Iv)
7618{
7619 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
7620 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7621}
7622
7623
7624/**
7625 * @opcode 0xbe
7626 */
7627FNIEMOP_DEF(iemOp_eSI_Iv)
7628{
7629 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
7630 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7631}
7632
7633
7634/**
7635 * @opcode 0xbf
7636 */
7637FNIEMOP_DEF(iemOp_eDI_Iv)
7638{
7639 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
7640 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7641}
7642
7643
7644/**
7645 * @opcode 0xc0
7646 */
7647FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
7648{
7649 IEMOP_HLP_MIN_186();
7650 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7651 PCIEMOPSHIFTSIZES pImpl;
7652 switch (IEM_GET_MODRM_REG_8(bRm))
7653 {
7654 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
7655 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
7656 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
7657 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
7658 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
7659 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
7660 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
7661 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7662 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
7663 }
7664 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7665
7666 if (IEM_IS_MODRM_REG_MODE(bRm))
7667 {
7668 /* register */
7669 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7670 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0);
7671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7672 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7673 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7674 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7675 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7676 IEM_MC_REF_EFLAGS(pEFlags);
7677 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7678 IEM_MC_ADVANCE_RIP_AND_FINISH();
7679 IEM_MC_END();
7680 }
7681 else
7682 {
7683 /* memory */
7684 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_186, 0);
7685 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7686 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7687
7688 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7690
7691 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
7692 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7693 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7694
7695 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7696 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7697 IEM_MC_FETCH_EFLAGS(EFlags);
7698 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7699
7700 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
7701 IEM_MC_COMMIT_EFLAGS(EFlags);
7702 IEM_MC_ADVANCE_RIP_AND_FINISH();
7703 IEM_MC_END();
7704 }
7705}
7706
7707
7708/**
7709 * @opcode 0xc1
7710 */
7711FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
7712{
7713 IEMOP_HLP_MIN_186();
7714 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7715 PCIEMOPSHIFTSIZES pImpl;
7716 switch (IEM_GET_MODRM_REG_8(bRm))
7717 {
7718 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
7719 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
7720 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
7721 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
7722 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
7723 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
7724 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
7725 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7726 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
7727 }
7728 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7729
7730 if (IEM_IS_MODRM_REG_MODE(bRm))
7731 {
7732 /* register */
7733 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7734 switch (pVCpu->iem.s.enmEffOpSize)
7735 {
7736 case IEMMODE_16BIT:
7737 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0);
7738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7739 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7740 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7741 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7742 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7743 IEM_MC_REF_EFLAGS(pEFlags);
7744 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7745 IEM_MC_ADVANCE_RIP_AND_FINISH();
7746 IEM_MC_END();
7747 break;
7748
7749 case IEMMODE_32BIT:
7750 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
7751 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7752 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7753 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7754 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7755 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7756 IEM_MC_REF_EFLAGS(pEFlags);
7757 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7758 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
7759 IEM_MC_ADVANCE_RIP_AND_FINISH();
7760 IEM_MC_END();
7761 break;
7762
7763 case IEMMODE_64BIT:
7764 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
7765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7766 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7767 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7768 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7769 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7770 IEM_MC_REF_EFLAGS(pEFlags);
7771 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7772 IEM_MC_ADVANCE_RIP_AND_FINISH();
7773 IEM_MC_END();
7774 break;
7775
7776 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7777 }
7778 }
7779 else
7780 {
7781 /* memory */
7782 switch (pVCpu->iem.s.enmEffOpSize)
7783 {
7784 case IEMMODE_16BIT:
7785 IEM_MC_BEGIN(3, 3, 0, 0);
7786 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7788
7789 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7791
7792 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
7793 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7794 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7795
7796 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7797 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7798 IEM_MC_FETCH_EFLAGS(EFlags);
7799 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7800
7801 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
7802 IEM_MC_COMMIT_EFLAGS(EFlags);
7803 IEM_MC_ADVANCE_RIP_AND_FINISH();
7804 IEM_MC_END();
7805 break;
7806
7807 case IEMMODE_32BIT:
7808 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
7809 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7810 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7811
7812 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7814
7815 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
7816 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7817 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7818
7819 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7820 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7821 IEM_MC_FETCH_EFLAGS(EFlags);
7822 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7823
7824 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
7825 IEM_MC_COMMIT_EFLAGS(EFlags);
7826 IEM_MC_ADVANCE_RIP_AND_FINISH();
7827 IEM_MC_END();
7828 break;
7829
7830 case IEMMODE_64BIT:
7831 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
7832 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7833 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7834
7835 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7837
7838 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
7839 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7840 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7841
7842 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7843 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
7844 IEM_MC_FETCH_EFLAGS(EFlags);
7845 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7846
7847 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
7848 IEM_MC_COMMIT_EFLAGS(EFlags);
7849 IEM_MC_ADVANCE_RIP_AND_FINISH();
7850 IEM_MC_END();
7851 break;
7852
7853 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7854 }
7855 }
7856}
7857
7858
7859/**
7860 * @opcode 0xc2
7861 */
7862FNIEMOP_DEF(iemOp_retn_Iw)
7863{
7864 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
7865 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7866 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7868 switch (pVCpu->iem.s.enmEffOpSize)
7869 {
7870 case IEMMODE_16BIT:
7871 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_iw_16, u16Imm);
7872 case IEMMODE_32BIT:
7873 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_iw_32, u16Imm);
7874 case IEMMODE_64BIT:
7875 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_iw_64, u16Imm);
7876 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7877 }
7878}
7879
7880
7881/**
7882 * @opcode 0xc3
7883 */
7884FNIEMOP_DEF(iemOp_retn)
7885{
7886 IEMOP_MNEMONIC(retn, "retn");
7887 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7888 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7889 switch (pVCpu->iem.s.enmEffOpSize)
7890 {
7891 case IEMMODE_16BIT:
7892 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_16);
7893 case IEMMODE_32BIT:
7894 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_32);
7895 case IEMMODE_64BIT:
7896 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_64);
7897 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7898 }
7899}
7900
7901
7902/**
7903 * @opcode 0xc4
7904 */
7905FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
7906{
7907 /* The LDS instruction is invalid 64-bit mode. In legacy and
7908 compatability mode it is invalid with MOD=3.
7909 The use as a VEX prefix is made possible by assigning the inverted
7910 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
7911 outside of 64-bit mode. VEX is not available in real or v86 mode. */
7912 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7913 if ( IEM_IS_64BIT_CODE(pVCpu)
7914 || IEM_IS_MODRM_REG_MODE(bRm) )
7915 {
7916 IEMOP_MNEMONIC(vex3_prefix, "vex3");
7917 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
7918 {
7919 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
7920 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
7921 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
7922 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
7923 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
7924 if ((bVex2 & 0x80 /* VEX.W */) && IEM_IS_64BIT_CODE(pVCpu))
7925 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
7926 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
7927 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
7928 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
7929 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
7930 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
7931 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
7932
7933 switch (bRm & 0x1f)
7934 {
7935 case 1: /* 0x0f lead opcode byte. */
7936#ifdef IEM_WITH_VEX
7937 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7938#else
7939 IEMOP_BITCH_ABOUT_STUB();
7940 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7941#endif
7942
7943 case 2: /* 0x0f 0x38 lead opcode bytes. */
7944#ifdef IEM_WITH_VEX
7945 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7946#else
7947 IEMOP_BITCH_ABOUT_STUB();
7948 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7949#endif
7950
7951 case 3: /* 0x0f 0x3a lead opcode bytes. */
7952#ifdef IEM_WITH_VEX
7953 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7954#else
7955 IEMOP_BITCH_ABOUT_STUB();
7956 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7957#endif
7958
7959 default:
7960 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
7961 IEMOP_RAISE_INVALID_OPCODE_RET();
7962 }
7963 }
7964 Log(("VEX3: VEX support disabled!\n"));
7965 IEMOP_RAISE_INVALID_OPCODE_RET();
7966 }
7967
7968 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
7969 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
7970}
7971
7972
7973/**
7974 * @opcode 0xc5
7975 */
7976FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
7977{
7978 /* The LES instruction is invalid 64-bit mode. In legacy and
7979 compatability mode it is invalid with MOD=3.
7980 The use as a VEX prefix is made possible by assigning the inverted
7981 REX.R to the top MOD bit, and the top bit in the inverted register
7982 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
7983 to accessing registers 0..7 in this VEX form. */
7984 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7985 if ( IEM_IS_64BIT_CODE(pVCpu)
7986 || IEM_IS_MODRM_REG_MODE(bRm))
7987 {
7988 IEMOP_MNEMONIC(vex2_prefix, "vex2");
7989 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
7990 {
7991 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
7992 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
7993 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
7994 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
7995 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
7996 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
7997 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
7998 pVCpu->iem.s.idxPrefix = bRm & 0x3;
7999
8000#ifdef IEM_WITH_VEX
8001 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8002#else
8003 IEMOP_BITCH_ABOUT_STUB();
8004 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8005#endif
8006 }
8007
8008 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
8009 Log(("VEX2: VEX support disabled!\n"));
8010 IEMOP_RAISE_INVALID_OPCODE_RET();
8011 }
8012
8013 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
8014 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
8015}
8016
8017
8018/**
8019 * @opcode 0xc6
8020 */
8021FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
8022{
8023 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8024 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
8025 IEMOP_RAISE_INVALID_OPCODE_RET();
8026 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
8027
8028 if (IEM_IS_MODRM_REG_MODE(bRm))
8029 {
8030 /* register access */
8031 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8032 IEM_MC_BEGIN(0, 0, 0, 0);
8033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8034 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u8Imm);
8035 IEM_MC_ADVANCE_RIP_AND_FINISH();
8036 IEM_MC_END();
8037 }
8038 else
8039 {
8040 /* memory access. */
8041 IEM_MC_BEGIN(0, 1, 0, 0);
8042 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8043 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8044 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8046 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
8047 IEM_MC_ADVANCE_RIP_AND_FINISH();
8048 IEM_MC_END();
8049 }
8050}
8051
8052
8053/**
8054 * @opcode 0xc7
8055 */
8056FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
8057{
8058 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8059 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Iz in this group. */
8060 IEMOP_RAISE_INVALID_OPCODE_RET();
8061 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
8062
8063 if (IEM_IS_MODRM_REG_MODE(bRm))
8064 {
8065 /* register access */
8066 switch (pVCpu->iem.s.enmEffOpSize)
8067 {
8068 case IEMMODE_16BIT:
8069 IEM_MC_BEGIN(0, 0, 0, 0);
8070 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8072 IEM_MC_STORE_GREG_U16_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u16Imm);
8073 IEM_MC_ADVANCE_RIP_AND_FINISH();
8074 IEM_MC_END();
8075 break;
8076
8077 case IEMMODE_32BIT:
8078 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8079 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8081 IEM_MC_STORE_GREG_U32_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u32Imm);
8082 IEM_MC_ADVANCE_RIP_AND_FINISH();
8083 IEM_MC_END();
8084 break;
8085
8086 case IEMMODE_64BIT:
8087 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
8088 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8090 IEM_MC_STORE_GREG_U64_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u64Imm);
8091 IEM_MC_ADVANCE_RIP_AND_FINISH();
8092 IEM_MC_END();
8093 break;
8094
8095 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8096 }
8097 }
8098 else
8099 {
8100 /* memory access. */
8101 switch (pVCpu->iem.s.enmEffOpSize)
8102 {
8103 case IEMMODE_16BIT:
8104 IEM_MC_BEGIN(0, 1, 0, 0);
8105 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8106 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
8107 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8109 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
8110 IEM_MC_ADVANCE_RIP_AND_FINISH();
8111 IEM_MC_END();
8112 break;
8113
8114 case IEMMODE_32BIT:
8115 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8116 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8117 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8118 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8120 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
8121 IEM_MC_ADVANCE_RIP_AND_FINISH();
8122 IEM_MC_END();
8123 break;
8124
8125 case IEMMODE_64BIT:
8126 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
8127 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8128 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8129 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8131 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
8132 IEM_MC_ADVANCE_RIP_AND_FINISH();
8133 IEM_MC_END();
8134 break;
8135
8136 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8137 }
8138 }
8139}
8140
8141
8142
8143
8144/**
8145 * @opcode 0xc8
8146 */
8147FNIEMOP_DEF(iemOp_enter_Iw_Ib)
8148{
8149 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
8150 IEMOP_HLP_MIN_186();
8151 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8152 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
8153 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
8154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8155 IEM_MC_DEFER_TO_CIMPL_3_RET(0, iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
8156}
8157
8158
8159/**
8160 * @opcode 0xc9
8161 */
8162FNIEMOP_DEF(iemOp_leave)
8163{
8164 IEMOP_MNEMONIC(leave, "leave");
8165 IEMOP_HLP_MIN_186();
8166 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8168 IEM_MC_DEFER_TO_CIMPL_1_RET(0, iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
8169}
8170
8171
8172/**
8173 * @opcode 0xca
8174 */
8175FNIEMOP_DEF(iemOp_retf_Iw)
8176{
8177 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
8178 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8180 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_MODE,
8181 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
8182}
8183
8184
8185/**
8186 * @opcode 0xcb
8187 */
8188FNIEMOP_DEF(iemOp_retf)
8189{
8190 IEMOP_MNEMONIC(retf, "retf");
8191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8192 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_MODE,
8193 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
8194}
8195
8196
8197/**
8198 * @opcode 0xcc
8199 */
8200FNIEMOP_DEF(iemOp_int3)
8201{
8202 IEMOP_MNEMONIC(int3, "int3");
8203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8204 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
8205 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
8206 iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
8207}
8208
8209
8210/**
8211 * @opcode 0xcd
8212 */
8213FNIEMOP_DEF(iemOp_int_Ib)
8214{
8215 IEMOP_MNEMONIC(int_Ib, "int Ib");
8216 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
8217 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8218 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
8219 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
8220 iemCImpl_int, u8Int, IEMINT_INTN);
8221}
8222
8223
8224/**
8225 * @opcode 0xce
8226 */
8227FNIEMOP_DEF(iemOp_into)
8228{
8229 IEMOP_MNEMONIC(into, "into");
8230 IEMOP_HLP_NO_64BIT();
8231 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_CONDITIONAL
8232 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
8233 iemCImpl_int, X86_XCPT_OF, IEMINT_INTO);
8234}
8235
8236
8237/**
8238 * @opcode 0xcf
8239 */
8240FNIEMOP_DEF(iemOp_iret)
8241{
8242 IEMOP_MNEMONIC(iret, "iret");
8243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8244 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
8245 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE | IEM_CIMPL_F_VMEXIT,
8246 iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
8247}
8248
8249
8250/**
8251 * @opcode 0xd0
8252 */
8253FNIEMOP_DEF(iemOp_Grp2_Eb_1)
8254{
8255 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8256 PCIEMOPSHIFTSIZES pImpl;
8257 switch (IEM_GET_MODRM_REG_8(bRm))
8258 {
8259 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
8260 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
8261 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
8262 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
8263 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
8264 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
8265 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
8266 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8267 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
8268 }
8269 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8270
8271 if (IEM_IS_MODRM_REG_MODE(bRm))
8272 {
8273 /* register */
8274 IEM_MC_BEGIN(3, 0, 0, 0);
8275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8276 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8277 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
8278 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8279 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8280 IEM_MC_REF_EFLAGS(pEFlags);
8281 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8282 IEM_MC_ADVANCE_RIP_AND_FINISH();
8283 IEM_MC_END();
8284 }
8285 else
8286 {
8287 /* memory */
8288 IEM_MC_BEGIN(3, 3, 0, 0);
8289 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8290 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
8291 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8292 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8293 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8294
8295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8297 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8298 IEM_MC_FETCH_EFLAGS(EFlags);
8299 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8300
8301 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
8302 IEM_MC_COMMIT_EFLAGS(EFlags);
8303 IEM_MC_ADVANCE_RIP_AND_FINISH();
8304 IEM_MC_END();
8305 }
8306}
8307
8308
8309
8310/**
8311 * @opcode 0xd1
8312 */
8313FNIEMOP_DEF(iemOp_Grp2_Ev_1)
8314{
8315 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8316 PCIEMOPSHIFTSIZES pImpl;
8317 switch (IEM_GET_MODRM_REG_8(bRm))
8318 {
8319 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
8320 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
8321 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
8322 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
8323 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
8324 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
8325 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
8326 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8327 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
8328 }
8329 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8330
8331 if (IEM_IS_MODRM_REG_MODE(bRm))
8332 {
8333 /* register */
8334 switch (pVCpu->iem.s.enmEffOpSize)
8335 {
8336 case IEMMODE_16BIT:
8337 IEM_MC_BEGIN(3, 0, 0, 0);
8338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8339 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8340 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8341 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8342 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8343 IEM_MC_REF_EFLAGS(pEFlags);
8344 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8345 IEM_MC_ADVANCE_RIP_AND_FINISH();
8346 IEM_MC_END();
8347 break;
8348
8349 case IEMMODE_32BIT:
8350 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
8351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8352 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8353 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8354 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8355 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8356 IEM_MC_REF_EFLAGS(pEFlags);
8357 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8358 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
8359 IEM_MC_ADVANCE_RIP_AND_FINISH();
8360 IEM_MC_END();
8361 break;
8362
8363 case IEMMODE_64BIT:
8364 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
8365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8366 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8367 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8368 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8369 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8370 IEM_MC_REF_EFLAGS(pEFlags);
8371 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8372 IEM_MC_ADVANCE_RIP_AND_FINISH();
8373 IEM_MC_END();
8374 break;
8375
8376 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8377 }
8378 }
8379 else
8380 {
8381 /* memory */
8382 switch (pVCpu->iem.s.enmEffOpSize)
8383 {
8384 case IEMMODE_16BIT:
8385 IEM_MC_BEGIN(3, 3, 0, 0);
8386 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8387 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8388 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8389 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8390 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8391
8392 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8394 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8395 IEM_MC_FETCH_EFLAGS(EFlags);
8396 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8397
8398 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
8399 IEM_MC_COMMIT_EFLAGS(EFlags);
8400 IEM_MC_ADVANCE_RIP_AND_FINISH();
8401 IEM_MC_END();
8402 break;
8403
8404 case IEMMODE_32BIT:
8405 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
8406 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8407 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8408 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8409 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8410 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8411
8412 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8414 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8415 IEM_MC_FETCH_EFLAGS(EFlags);
8416 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8417
8418 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
8419 IEM_MC_COMMIT_EFLAGS(EFlags);
8420 IEM_MC_ADVANCE_RIP_AND_FINISH();
8421 IEM_MC_END();
8422 break;
8423
8424 case IEMMODE_64BIT:
8425 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
8426 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8427 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8428 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8429 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8430 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8431
8432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8434 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8435 IEM_MC_FETCH_EFLAGS(EFlags);
8436 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8437
8438 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
8439 IEM_MC_COMMIT_EFLAGS(EFlags);
8440 IEM_MC_ADVANCE_RIP_AND_FINISH();
8441 IEM_MC_END();
8442 break;
8443
8444 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8445 }
8446 }
8447}
8448
8449
8450/**
8451 * @opcode 0xd2
8452 */
8453FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
8454{
8455 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8456 PCIEMOPSHIFTSIZES pImpl;
8457 switch (IEM_GET_MODRM_REG_8(bRm))
8458 {
8459 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
8460 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
8461 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
8462 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
8463 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
8464 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
8465 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
8466 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8467 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
8468 }
8469 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8470
8471 if (IEM_IS_MODRM_REG_MODE(bRm))
8472 {
8473 /* register */
8474 IEM_MC_BEGIN(3, 0, 0, 0);
8475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8476 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8477 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8478 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8479 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8480 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8481 IEM_MC_REF_EFLAGS(pEFlags);
8482 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8483 IEM_MC_ADVANCE_RIP_AND_FINISH();
8484 IEM_MC_END();
8485 }
8486 else
8487 {
8488 /* memory */
8489 IEM_MC_BEGIN(3, 3, 0, 0);
8490 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8491 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8492 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8494 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8495
8496 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8498 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8499 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8500 IEM_MC_FETCH_EFLAGS(EFlags);
8501 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8502
8503 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
8504 IEM_MC_COMMIT_EFLAGS(EFlags);
8505 IEM_MC_ADVANCE_RIP_AND_FINISH();
8506 IEM_MC_END();
8507 }
8508}
8509
8510
8511/**
8512 * @opcode 0xd3
8513 */
8514FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
8515{
8516 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8517 PCIEMOPSHIFTSIZES pImpl;
8518 switch (IEM_GET_MODRM_REG_8(bRm))
8519 {
8520 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
8521 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
8522 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
8523 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
8524 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
8525 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
8526 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
8527 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8528 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8529 }
8530 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8531
8532 if (IEM_IS_MODRM_REG_MODE(bRm))
8533 {
8534 /* register */
8535 switch (pVCpu->iem.s.enmEffOpSize)
8536 {
8537 case IEMMODE_16BIT:
8538 IEM_MC_BEGIN(3, 0, 0, 0);
8539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8540 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8541 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8542 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8543 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8544 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8545 IEM_MC_REF_EFLAGS(pEFlags);
8546 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8547 IEM_MC_ADVANCE_RIP_AND_FINISH();
8548 IEM_MC_END();
8549 break;
8550
8551 case IEMMODE_32BIT:
8552 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
8553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8554 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8555 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8556 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8557 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8558 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8559 IEM_MC_REF_EFLAGS(pEFlags);
8560 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8561 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
8562 IEM_MC_ADVANCE_RIP_AND_FINISH();
8563 IEM_MC_END();
8564 break;
8565
8566 case IEMMODE_64BIT:
8567 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
8568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8569 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8570 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8571 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8572 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8573 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8574 IEM_MC_REF_EFLAGS(pEFlags);
8575 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8576 IEM_MC_ADVANCE_RIP_AND_FINISH();
8577 IEM_MC_END();
8578 break;
8579
8580 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8581 }
8582 }
8583 else
8584 {
8585 /* memory */
8586 switch (pVCpu->iem.s.enmEffOpSize)
8587 {
8588 case IEMMODE_16BIT:
8589 IEM_MC_BEGIN(3, 3, 0, 0);
8590 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8591 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8592 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8593 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8594 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8595
8596 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8598 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8599 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8600 IEM_MC_FETCH_EFLAGS(EFlags);
8601 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8602
8603 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
8604 IEM_MC_COMMIT_EFLAGS(EFlags);
8605 IEM_MC_ADVANCE_RIP_AND_FINISH();
8606 IEM_MC_END();
8607 break;
8608
8609 case IEMMODE_32BIT:
8610 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
8611 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8612 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8613 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8614 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8615 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8616
8617 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8619 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8620 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8621 IEM_MC_FETCH_EFLAGS(EFlags);
8622 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8623
8624 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
8625 IEM_MC_COMMIT_EFLAGS(EFlags);
8626 IEM_MC_ADVANCE_RIP_AND_FINISH();
8627 IEM_MC_END();
8628 break;
8629
8630 case IEMMODE_64BIT:
8631 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
8632 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8633 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8634 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8635 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8636 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8637
8638 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8640 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8641 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8642 IEM_MC_FETCH_EFLAGS(EFlags);
8643 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8644
8645 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
8646 IEM_MC_COMMIT_EFLAGS(EFlags);
8647 IEM_MC_ADVANCE_RIP_AND_FINISH();
8648 IEM_MC_END();
8649 break;
8650
8651 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8652 }
8653 }
8654}
8655
8656/**
8657 * @opcode 0xd4
8658 */
8659FNIEMOP_DEF(iemOp_aam_Ib)
8660{
8661 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
8662 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
8663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8664 IEMOP_HLP_NO_64BIT();
8665 if (!bImm)
8666 IEMOP_RAISE_DIVIDE_ERROR_RET();
8667 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aam, bImm);
8668}
8669
8670
8671/**
8672 * @opcode 0xd5
8673 */
8674FNIEMOP_DEF(iemOp_aad_Ib)
8675{
8676 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
8677 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
8678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8679 IEMOP_HLP_NO_64BIT();
8680 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aad, bImm);
8681}
8682
8683
8684/**
8685 * @opcode 0xd6
8686 */
8687FNIEMOP_DEF(iemOp_salc)
8688{
8689 IEMOP_MNEMONIC(salc, "salc");
8690 IEMOP_HLP_NO_64BIT();
8691
8692 IEM_MC_BEGIN(0, 0, 0, 0);
8693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8694 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8695 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
8696 } IEM_MC_ELSE() {
8697 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
8698 } IEM_MC_ENDIF();
8699 IEM_MC_ADVANCE_RIP_AND_FINISH();
8700 IEM_MC_END();
8701}
8702
8703
8704/**
8705 * @opcode 0xd7
8706 */
8707FNIEMOP_DEF(iemOp_xlat)
8708{
8709 IEMOP_MNEMONIC(xlat, "xlat");
8710 switch (pVCpu->iem.s.enmEffAddrMode)
8711 {
8712 case IEMMODE_16BIT:
8713 IEM_MC_BEGIN(2, 0, 0, 0);
8714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8715 IEM_MC_LOCAL(uint8_t, u8Tmp);
8716 IEM_MC_LOCAL(uint16_t, u16Addr);
8717 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
8718 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
8719 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
8720 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
8721 IEM_MC_ADVANCE_RIP_AND_FINISH();
8722 IEM_MC_END();
8723 break;
8724
8725 case IEMMODE_32BIT:
8726 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0);
8727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8728 IEM_MC_LOCAL(uint8_t, u8Tmp);
8729 IEM_MC_LOCAL(uint32_t, u32Addr);
8730 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
8731 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
8732 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
8733 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
8734 IEM_MC_ADVANCE_RIP_AND_FINISH();
8735 IEM_MC_END();
8736 break;
8737
8738 case IEMMODE_64BIT:
8739 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
8740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8741 IEM_MC_LOCAL(uint8_t, u8Tmp);
8742 IEM_MC_LOCAL(uint64_t, u64Addr);
8743 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
8744 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
8745 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
8746 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
8747 IEM_MC_ADVANCE_RIP_AND_FINISH();
8748 IEM_MC_END();
8749 break;
8750
8751 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8752 }
8753}
8754
8755
8756/**
8757 * Common worker for FPU instructions working on ST0 and STn, and storing the
8758 * result in ST0.
8759 *
8760 * @param bRm Mod R/M byte.
8761 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8762 */
8763FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
8764{
8765 IEM_MC_BEGIN(3, 1, 0, 0);
8766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8767 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8768 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8769 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8770 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8771
8772 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8773 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8774 IEM_MC_PREPARE_FPU_USAGE();
8775 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8776 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
8777 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
8778 } IEM_MC_ELSE() {
8779 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
8780 } IEM_MC_ENDIF();
8781 IEM_MC_ADVANCE_RIP_AND_FINISH();
8782
8783 IEM_MC_END();
8784}
8785
8786
8787/**
8788 * Common worker for FPU instructions working on ST0 and STn, and only affecting
8789 * flags.
8790 *
8791 * @param bRm Mod R/M byte.
8792 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8793 */
8794FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8795{
8796 IEM_MC_BEGIN(3, 1, 0, 0);
8797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8798 IEM_MC_LOCAL(uint16_t, u16Fsw);
8799 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8800 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8801 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8802
8803 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8804 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8805 IEM_MC_PREPARE_FPU_USAGE();
8806 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8807 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8808 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
8809 } IEM_MC_ELSE() {
8810 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
8811 } IEM_MC_ENDIF();
8812 IEM_MC_ADVANCE_RIP_AND_FINISH();
8813
8814 IEM_MC_END();
8815}
8816
8817
8818/**
8819 * Common worker for FPU instructions working on ST0 and STn, only affecting
8820 * flags, and popping when done.
8821 *
8822 * @param bRm Mod R/M byte.
8823 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8824 */
8825FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8826{
8827 IEM_MC_BEGIN(3, 1, 0, 0);
8828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8829 IEM_MC_LOCAL(uint16_t, u16Fsw);
8830 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8831 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8832 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8833
8834 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8835 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8836 IEM_MC_PREPARE_FPU_USAGE();
8837 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8838 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8839 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
8840 } IEM_MC_ELSE() {
8841 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
8842 } IEM_MC_ENDIF();
8843 IEM_MC_ADVANCE_RIP_AND_FINISH();
8844
8845 IEM_MC_END();
8846}
8847
8848
8849/** Opcode 0xd8 11/0. */
8850FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
8851{
8852 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
8853 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
8854}
8855
8856
8857/** Opcode 0xd8 11/1. */
8858FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
8859{
8860 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
8861 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
8862}
8863
8864
8865/** Opcode 0xd8 11/2. */
8866FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
8867{
8868 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
8869 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
8870}
8871
8872
8873/** Opcode 0xd8 11/3. */
8874FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
8875{
8876 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
8877 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
8878}
8879
8880
8881/** Opcode 0xd8 11/4. */
8882FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
8883{
8884 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
8885 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
8886}
8887
8888
8889/** Opcode 0xd8 11/5. */
8890FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
8891{
8892 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
8893 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
8894}
8895
8896
8897/** Opcode 0xd8 11/6. */
8898FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
8899{
8900 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
8901 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
8902}
8903
8904
8905/** Opcode 0xd8 11/7. */
8906FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
8907{
8908 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
8909 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
8910}
8911
8912
8913/**
8914 * Common worker for FPU instructions working on ST0 and an m32r, and storing
8915 * the result in ST0.
8916 *
8917 * @param bRm Mod R/M byte.
8918 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8919 */
8920FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
8921{
8922 IEM_MC_BEGIN(3, 3, 0, 0);
8923 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8924 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8925 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8926 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8927 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8928 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8929
8930 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8932
8933 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8934 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8935 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8936
8937 IEM_MC_PREPARE_FPU_USAGE();
8938 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8939 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
8940 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
8941 } IEM_MC_ELSE() {
8942 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
8943 } IEM_MC_ENDIF();
8944 IEM_MC_ADVANCE_RIP_AND_FINISH();
8945
8946 IEM_MC_END();
8947}
8948
8949
8950/** Opcode 0xd8 !11/0. */
8951FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
8952{
8953 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
8954 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
8955}
8956
8957
8958/** Opcode 0xd8 !11/1. */
8959FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
8960{
8961 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
8962 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
8963}
8964
8965
8966/** Opcode 0xd8 !11/2. */
8967FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
8968{
8969 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
8970
8971 IEM_MC_BEGIN(3, 3, 0, 0);
8972 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8973 IEM_MC_LOCAL(uint16_t, u16Fsw);
8974 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8975 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8976 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8977 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8978
8979 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8981
8982 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8983 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8984 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8985
8986 IEM_MC_PREPARE_FPU_USAGE();
8987 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8988 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
8989 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8990 } IEM_MC_ELSE() {
8991 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8992 } IEM_MC_ENDIF();
8993 IEM_MC_ADVANCE_RIP_AND_FINISH();
8994
8995 IEM_MC_END();
8996}
8997
8998
8999/** Opcode 0xd8 !11/3. */
9000FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
9001{
9002 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
9003
9004 IEM_MC_BEGIN(3, 3, 0, 0);
9005 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9006 IEM_MC_LOCAL(uint16_t, u16Fsw);
9007 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
9008 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9009 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9010 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
9011
9012 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9013 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9014
9015 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9016 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9017 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9018
9019 IEM_MC_PREPARE_FPU_USAGE();
9020 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9021 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
9022 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9023 } IEM_MC_ELSE() {
9024 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9025 } IEM_MC_ENDIF();
9026 IEM_MC_ADVANCE_RIP_AND_FINISH();
9027
9028 IEM_MC_END();
9029}
9030
9031
9032/** Opcode 0xd8 !11/4. */
9033FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
9034{
9035 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
9036 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
9037}
9038
9039
9040/** Opcode 0xd8 !11/5. */
9041FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
9042{
9043 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
9044 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
9045}
9046
9047
9048/** Opcode 0xd8 !11/6. */
9049FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
9050{
9051 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
9052 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
9053}
9054
9055
9056/** Opcode 0xd8 !11/7. */
9057FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
9058{
9059 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
9060 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
9061}
9062
9063
9064/**
9065 * @opcode 0xd8
9066 */
9067FNIEMOP_DEF(iemOp_EscF0)
9068{
9069 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9070 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
9071
9072 if (IEM_IS_MODRM_REG_MODE(bRm))
9073 {
9074 switch (IEM_GET_MODRM_REG_8(bRm))
9075 {
9076 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
9077 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
9078 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
9079 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
9080 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
9081 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
9082 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
9083 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
9084 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9085 }
9086 }
9087 else
9088 {
9089 switch (IEM_GET_MODRM_REG_8(bRm))
9090 {
9091 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
9092 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
9093 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
9094 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
9095 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
9096 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
9097 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
9098 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
9099 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9100 }
9101 }
9102}
9103
9104
9105/** Opcode 0xd9 /0 mem32real
9106 * @sa iemOp_fld_m64r */
9107FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
9108{
9109 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
9110
9111 IEM_MC_BEGIN(2, 3, 0, 0);
9112 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9113 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9114 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
9115 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9116 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
9117
9118 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9120
9121 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9122 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9123 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9124 IEM_MC_PREPARE_FPU_USAGE();
9125 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
9126 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r32, pFpuRes, pr32Val);
9127 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9128 } IEM_MC_ELSE() {
9129 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9130 } IEM_MC_ENDIF();
9131 IEM_MC_ADVANCE_RIP_AND_FINISH();
9132
9133 IEM_MC_END();
9134}
9135
9136
9137/** Opcode 0xd9 !11/2 mem32real */
9138FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
9139{
9140 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
9141 IEM_MC_BEGIN(3, 2, 0, 0);
9142 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9143 IEM_MC_LOCAL(uint16_t, u16Fsw);
9144 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9145 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
9146 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9147
9148 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9149 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9150 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9151 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9152
9153 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9154 IEM_MC_PREPARE_FPU_USAGE();
9155 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9156 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
9157 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9158 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9159 } IEM_MC_ELSE() {
9160 IEM_MC_IF_FCW_IM() {
9161 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
9162 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
9163 } IEM_MC_ENDIF();
9164 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9165 } IEM_MC_ENDIF();
9166 IEM_MC_ADVANCE_RIP_AND_FINISH();
9167
9168 IEM_MC_END();
9169}
9170
9171
9172/** Opcode 0xd9 !11/3 */
9173FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
9174{
9175 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
9176 IEM_MC_BEGIN(3, 2, 0, 0);
9177 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9178 IEM_MC_LOCAL(uint16_t, u16Fsw);
9179 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9180 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
9181 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9182
9183 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9185 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9186 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9187
9188 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9189 IEM_MC_PREPARE_FPU_USAGE();
9190 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9191 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
9192 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9193 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9194 } IEM_MC_ELSE() {
9195 IEM_MC_IF_FCW_IM() {
9196 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
9197 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
9198 } IEM_MC_ENDIF();
9199 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9200 } IEM_MC_ENDIF();
9201 IEM_MC_ADVANCE_RIP_AND_FINISH();
9202
9203 IEM_MC_END();
9204}
9205
9206
9207/** Opcode 0xd9 !11/4 */
9208FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
9209{
9210 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
9211 IEM_MC_BEGIN(3, 0, 0, 0);
9212 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
9213 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9214
9215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9216 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9217 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9218
9219 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9220 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
9221 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
9222 IEM_MC_END();
9223}
9224
9225
9226/** Opcode 0xd9 !11/5 */
9227FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
9228{
9229 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
9230 IEM_MC_BEGIN(1, 1, 0, 0);
9231 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9232 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9233
9234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9235 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9236 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9237
9238 IEM_MC_ARG(uint16_t, u16Fsw, 0);
9239 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9240
9241 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_FPU, iemCImpl_fldcw, u16Fsw);
9242 IEM_MC_END();
9243}
9244
9245
9246/** Opcode 0xd9 !11/6 */
9247FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
9248{
9249 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
9250 IEM_MC_BEGIN(3, 0, 0, 0);
9251 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
9252 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9253
9254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9255 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9256 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9257
9258 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9259 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
9260 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
9261 IEM_MC_END();
9262}
9263
9264
9265/** Opcode 0xd9 !11/7 */
9266FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
9267{
9268 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
9269 IEM_MC_BEGIN(2, 0, 0, 0);
9270 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9271 IEM_MC_LOCAL(uint16_t, u16Fcw);
9272 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9274 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9275 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9276 IEM_MC_FETCH_FCW(u16Fcw);
9277 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
9278 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
9279 IEM_MC_END();
9280}
9281
9282
9283/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
9284FNIEMOP_DEF(iemOp_fnop)
9285{
9286 IEMOP_MNEMONIC(fnop, "fnop");
9287 IEM_MC_BEGIN(0, 0, 0, 0);
9288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9289 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9290 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9291 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9292 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
9293 * intel optimizations. Investigate. */
9294 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9295 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
9296 IEM_MC_END();
9297}
9298
9299
9300/** Opcode 0xd9 11/0 stN */
9301FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
9302{
9303 IEMOP_MNEMONIC(fld_stN, "fld stN");
9304 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
9305 * indicates that it does. */
9306 IEM_MC_BEGIN(0, 2, 0, 0);
9307 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9308 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9309 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9310 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9311 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9312
9313 IEM_MC_PREPARE_FPU_USAGE();
9314 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, IEM_GET_MODRM_RM_8(bRm)) {
9315 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9316 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
9317 } IEM_MC_ELSE() {
9318 IEM_MC_FPU_STACK_PUSH_UNDERFLOW(pVCpu->iem.s.uFpuOpcode);
9319 } IEM_MC_ENDIF();
9320
9321 IEM_MC_ADVANCE_RIP_AND_FINISH();
9322 IEM_MC_END();
9323}
9324
9325
9326/** Opcode 0xd9 11/3 stN */
9327FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
9328{
9329 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
9330 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
9331 * indicates that it does. */
9332 IEM_MC_BEGIN(2, 3, 0, 0);
9333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9334 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
9335 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
9336 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9337 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ IEM_GET_MODRM_RM_8(bRm), 0);
9338 IEM_MC_ARG_CONST(uint16_t, uFpuOpcode, /*=*/ pVCpu->iem.s.uFpuOpcode, 1);
9339 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9340 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9341
9342 IEM_MC_PREPARE_FPU_USAGE();
9343 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9344 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
9345 IEM_MC_STORE_FPUREG_R80_SRC_REF(IEM_GET_MODRM_RM_8(bRm), pr80Value1);
9346 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9347 } IEM_MC_ELSE() {
9348 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, iemCImpl_fxch_underflow, iStReg, uFpuOpcode);
9349 } IEM_MC_ENDIF();
9350
9351 IEM_MC_ADVANCE_RIP_AND_FINISH();
9352 IEM_MC_END();
9353}
9354
9355
9356/** Opcode 0xd9 11/4, 0xdd 11/2. */
9357FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
9358{
9359 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
9360
9361 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
9362 uint8_t const iDstReg = IEM_GET_MODRM_RM_8(bRm);
9363 if (!iDstReg)
9364 {
9365 IEM_MC_BEGIN(0, 1, 0, 0);
9366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9367 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
9368 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9369 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9370
9371 IEM_MC_PREPARE_FPU_USAGE();
9372 IEM_MC_IF_FPUREG_NOT_EMPTY(0) {
9373 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9374 } IEM_MC_ELSE() {
9375 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0, pVCpu->iem.s.uFpuOpcode);
9376 } IEM_MC_ENDIF();
9377
9378 IEM_MC_ADVANCE_RIP_AND_FINISH();
9379 IEM_MC_END();
9380 }
9381 else
9382 {
9383 IEM_MC_BEGIN(0, 2, 0, 0);
9384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9385 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9386 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9387 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9388 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9389
9390 IEM_MC_PREPARE_FPU_USAGE();
9391 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9392 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9393 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg, pVCpu->iem.s.uFpuOpcode);
9394 } IEM_MC_ELSE() {
9395 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg, pVCpu->iem.s.uFpuOpcode);
9396 } IEM_MC_ENDIF();
9397
9398 IEM_MC_ADVANCE_RIP_AND_FINISH();
9399 IEM_MC_END();
9400 }
9401}
9402
9403
9404/**
9405 * Common worker for FPU instructions working on ST0 and replaces it with the
9406 * result, i.e. unary operators.
9407 *
9408 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9409 */
9410FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
9411{
9412 IEM_MC_BEGIN(2, 1, 0, 0);
9413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9414 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9415 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9416 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9417
9418 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9419 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9420 IEM_MC_PREPARE_FPU_USAGE();
9421 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9422 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
9423 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9424 } IEM_MC_ELSE() {
9425 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9426 } IEM_MC_ENDIF();
9427 IEM_MC_ADVANCE_RIP_AND_FINISH();
9428
9429 IEM_MC_END();
9430}
9431
9432
9433/** Opcode 0xd9 0xe0. */
9434FNIEMOP_DEF(iemOp_fchs)
9435{
9436 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
9437 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
9438}
9439
9440
9441/** Opcode 0xd9 0xe1. */
9442FNIEMOP_DEF(iemOp_fabs)
9443{
9444 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
9445 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
9446}
9447
9448
9449/** Opcode 0xd9 0xe4. */
9450FNIEMOP_DEF(iemOp_ftst)
9451{
9452 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
9453 IEM_MC_BEGIN(2, 1, 0, 0);
9454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9455 IEM_MC_LOCAL(uint16_t, u16Fsw);
9456 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9457 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9458
9459 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9460 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9461 IEM_MC_PREPARE_FPU_USAGE();
9462 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9463 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_ftst_r80, pu16Fsw, pr80Value);
9464 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9465 } IEM_MC_ELSE() {
9466 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
9467 } IEM_MC_ENDIF();
9468 IEM_MC_ADVANCE_RIP_AND_FINISH();
9469
9470 IEM_MC_END();
9471}
9472
9473
9474/** Opcode 0xd9 0xe5. */
9475FNIEMOP_DEF(iemOp_fxam)
9476{
9477 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
9478 IEM_MC_BEGIN(2, 1, 0, 0);
9479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9480 IEM_MC_LOCAL(uint16_t, u16Fsw);
9481 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9482 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9483
9484 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9485 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9486 IEM_MC_PREPARE_FPU_USAGE();
9487 IEM_MC_REF_FPUREG(pr80Value, 0);
9488 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fxam_r80, pu16Fsw, pr80Value);
9489 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9490 IEM_MC_ADVANCE_RIP_AND_FINISH();
9491
9492 IEM_MC_END();
9493}
9494
9495
9496/**
9497 * Common worker for FPU instructions pushing a constant onto the FPU stack.
9498 *
9499 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9500 */
9501FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
9502{
9503 IEM_MC_BEGIN(1, 1, 0, 0);
9504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9505 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9506 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9507
9508 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9509 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9510 IEM_MC_PREPARE_FPU_USAGE();
9511 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
9512 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
9513 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
9514 } IEM_MC_ELSE() {
9515 IEM_MC_FPU_STACK_PUSH_OVERFLOW(pVCpu->iem.s.uFpuOpcode);
9516 } IEM_MC_ENDIF();
9517 IEM_MC_ADVANCE_RIP_AND_FINISH();
9518
9519 IEM_MC_END();
9520}
9521
9522
9523/** Opcode 0xd9 0xe8. */
9524FNIEMOP_DEF(iemOp_fld1)
9525{
9526 IEMOP_MNEMONIC(fld1, "fld1");
9527 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
9528}
9529
9530
9531/** Opcode 0xd9 0xe9. */
9532FNIEMOP_DEF(iemOp_fldl2t)
9533{
9534 IEMOP_MNEMONIC(fldl2t, "fldl2t");
9535 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
9536}
9537
9538
9539/** Opcode 0xd9 0xea. */
9540FNIEMOP_DEF(iemOp_fldl2e)
9541{
9542 IEMOP_MNEMONIC(fldl2e, "fldl2e");
9543 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
9544}
9545
9546/** Opcode 0xd9 0xeb. */
9547FNIEMOP_DEF(iemOp_fldpi)
9548{
9549 IEMOP_MNEMONIC(fldpi, "fldpi");
9550 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
9551}
9552
9553
9554/** Opcode 0xd9 0xec. */
9555FNIEMOP_DEF(iemOp_fldlg2)
9556{
9557 IEMOP_MNEMONIC(fldlg2, "fldlg2");
9558 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
9559}
9560
9561/** Opcode 0xd9 0xed. */
9562FNIEMOP_DEF(iemOp_fldln2)
9563{
9564 IEMOP_MNEMONIC(fldln2, "fldln2");
9565 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
9566}
9567
9568
9569/** Opcode 0xd9 0xee. */
9570FNIEMOP_DEF(iemOp_fldz)
9571{
9572 IEMOP_MNEMONIC(fldz, "fldz");
9573 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
9574}
9575
9576
9577/** Opcode 0xd9 0xf0.
9578 *
9579 * The f2xm1 instruction works on values +1.0 thru -1.0, currently (the range on
9580 * 287 & 8087 was +0.5 thru 0.0 according to docs). In addition is does appear
9581 * to produce proper results for +Inf and -Inf.
9582 *
9583 * This is probably usful in the implementation pow() and similar.
9584 */
9585FNIEMOP_DEF(iemOp_f2xm1)
9586{
9587 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
9588 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
9589}
9590
9591
9592/**
9593 * Common worker for FPU instructions working on STn and ST0, storing the result
9594 * in STn, and popping the stack unless IE, DE or ZE was raised.
9595 *
9596 * @param bRm Mod R/M byte.
9597 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9598 */
9599FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
9600{
9601 IEM_MC_BEGIN(3, 1, 0, 0);
9602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9603 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9604 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9605 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9606 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9607
9608 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9609 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9610
9611 IEM_MC_PREPARE_FPU_USAGE();
9612 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
9613 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
9614 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
9615 } IEM_MC_ELSE() {
9616 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
9617 } IEM_MC_ENDIF();
9618 IEM_MC_ADVANCE_RIP_AND_FINISH();
9619
9620 IEM_MC_END();
9621}
9622
9623
9624/** Opcode 0xd9 0xf1. */
9625FNIEMOP_DEF(iemOp_fyl2x)
9626{
9627 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
9628 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
9629}
9630
9631
9632/**
9633 * Common worker for FPU instructions working on ST0 and having two outputs, one
9634 * replacing ST0 and one pushed onto the stack.
9635 *
9636 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9637 */
9638FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
9639{
9640 IEM_MC_BEGIN(2, 1, 0, 0);
9641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9642 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
9643 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
9644 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9645
9646 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9647 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9648 IEM_MC_PREPARE_FPU_USAGE();
9649 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9650 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
9651 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo, pVCpu->iem.s.uFpuOpcode);
9652 } IEM_MC_ELSE() {
9653 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO(pVCpu->iem.s.uFpuOpcode);
9654 } IEM_MC_ENDIF();
9655 IEM_MC_ADVANCE_RIP_AND_FINISH();
9656
9657 IEM_MC_END();
9658}
9659
9660
9661/** Opcode 0xd9 0xf2. */
9662FNIEMOP_DEF(iemOp_fptan)
9663{
9664 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
9665 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
9666}
9667
9668
9669/** Opcode 0xd9 0xf3. */
9670FNIEMOP_DEF(iemOp_fpatan)
9671{
9672 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
9673 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
9674}
9675
9676
9677/** Opcode 0xd9 0xf4. */
9678FNIEMOP_DEF(iemOp_fxtract)
9679{
9680 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
9681 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
9682}
9683
9684
9685/** Opcode 0xd9 0xf5. */
9686FNIEMOP_DEF(iemOp_fprem1)
9687{
9688 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
9689 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
9690}
9691
9692
9693/** Opcode 0xd9 0xf6. */
9694FNIEMOP_DEF(iemOp_fdecstp)
9695{
9696 IEMOP_MNEMONIC(fdecstp, "fdecstp");
9697 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
9698 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
9699 * FINCSTP and FDECSTP. */
9700 IEM_MC_BEGIN(0, 0, 0, 0);
9701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9702
9703 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9704 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9705
9706 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9707 IEM_MC_FPU_STACK_DEC_TOP();
9708 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
9709
9710 IEM_MC_ADVANCE_RIP_AND_FINISH();
9711 IEM_MC_END();
9712}
9713
9714
9715/** Opcode 0xd9 0xf7. */
9716FNIEMOP_DEF(iemOp_fincstp)
9717{
9718 IEMOP_MNEMONIC(fincstp, "fincstp");
9719 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
9720 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
9721 * FINCSTP and FDECSTP. */
9722 IEM_MC_BEGIN(0, 0, 0, 0);
9723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9724
9725 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9726 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9727
9728 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9729 IEM_MC_FPU_STACK_INC_TOP();
9730 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
9731
9732 IEM_MC_ADVANCE_RIP_AND_FINISH();
9733 IEM_MC_END();
9734}
9735
9736
9737/** Opcode 0xd9 0xf8. */
9738FNIEMOP_DEF(iemOp_fprem)
9739{
9740 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
9741 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
9742}
9743
9744
9745/** Opcode 0xd9 0xf9. */
9746FNIEMOP_DEF(iemOp_fyl2xp1)
9747{
9748 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
9749 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
9750}
9751
9752
9753/** Opcode 0xd9 0xfa. */
9754FNIEMOP_DEF(iemOp_fsqrt)
9755{
9756 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
9757 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
9758}
9759
9760
9761/** Opcode 0xd9 0xfb. */
9762FNIEMOP_DEF(iemOp_fsincos)
9763{
9764 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
9765 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
9766}
9767
9768
9769/** Opcode 0xd9 0xfc. */
9770FNIEMOP_DEF(iemOp_frndint)
9771{
9772 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
9773 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
9774}
9775
9776
9777/** Opcode 0xd9 0xfd. */
9778FNIEMOP_DEF(iemOp_fscale)
9779{
9780 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
9781 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
9782}
9783
9784
9785/** Opcode 0xd9 0xfe. */
9786FNIEMOP_DEF(iemOp_fsin)
9787{
9788 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
9789 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
9790}
9791
9792
9793/** Opcode 0xd9 0xff. */
9794FNIEMOP_DEF(iemOp_fcos)
9795{
9796 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
9797 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
9798}
9799
9800
9801/** Used by iemOp_EscF1. */
9802IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
9803{
9804 /* 0xe0 */ iemOp_fchs,
9805 /* 0xe1 */ iemOp_fabs,
9806 /* 0xe2 */ iemOp_Invalid,
9807 /* 0xe3 */ iemOp_Invalid,
9808 /* 0xe4 */ iemOp_ftst,
9809 /* 0xe5 */ iemOp_fxam,
9810 /* 0xe6 */ iemOp_Invalid,
9811 /* 0xe7 */ iemOp_Invalid,
9812 /* 0xe8 */ iemOp_fld1,
9813 /* 0xe9 */ iemOp_fldl2t,
9814 /* 0xea */ iemOp_fldl2e,
9815 /* 0xeb */ iemOp_fldpi,
9816 /* 0xec */ iemOp_fldlg2,
9817 /* 0xed */ iemOp_fldln2,
9818 /* 0xee */ iemOp_fldz,
9819 /* 0xef */ iemOp_Invalid,
9820 /* 0xf0 */ iemOp_f2xm1,
9821 /* 0xf1 */ iemOp_fyl2x,
9822 /* 0xf2 */ iemOp_fptan,
9823 /* 0xf3 */ iemOp_fpatan,
9824 /* 0xf4 */ iemOp_fxtract,
9825 /* 0xf5 */ iemOp_fprem1,
9826 /* 0xf6 */ iemOp_fdecstp,
9827 /* 0xf7 */ iemOp_fincstp,
9828 /* 0xf8 */ iemOp_fprem,
9829 /* 0xf9 */ iemOp_fyl2xp1,
9830 /* 0xfa */ iemOp_fsqrt,
9831 /* 0xfb */ iemOp_fsincos,
9832 /* 0xfc */ iemOp_frndint,
9833 /* 0xfd */ iemOp_fscale,
9834 /* 0xfe */ iemOp_fsin,
9835 /* 0xff */ iemOp_fcos
9836};
9837
9838
9839/**
9840 * @opcode 0xd9
9841 */
9842FNIEMOP_DEF(iemOp_EscF1)
9843{
9844 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9845 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
9846
9847 if (IEM_IS_MODRM_REG_MODE(bRm))
9848 {
9849 switch (IEM_GET_MODRM_REG_8(bRm))
9850 {
9851 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
9852 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
9853 case 2:
9854 if (bRm == 0xd0)
9855 return FNIEMOP_CALL(iemOp_fnop);
9856 IEMOP_RAISE_INVALID_OPCODE_RET();
9857 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
9858 case 4:
9859 case 5:
9860 case 6:
9861 case 7:
9862 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
9863 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
9864 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9865 }
9866 }
9867 else
9868 {
9869 switch (IEM_GET_MODRM_REG_8(bRm))
9870 {
9871 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
9872 case 1: IEMOP_RAISE_INVALID_OPCODE_RET();
9873 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
9874 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
9875 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
9876 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
9877 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
9878 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
9879 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9880 }
9881 }
9882}
9883
9884
9885/** Opcode 0xda 11/0. */
9886FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
9887{
9888 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
9889 IEM_MC_BEGIN(0, 1, 0, 0);
9890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9891 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9892
9893 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9894 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9895
9896 IEM_MC_PREPARE_FPU_USAGE();
9897 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9898 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9899 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9900 } IEM_MC_ENDIF();
9901 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9902 } IEM_MC_ELSE() {
9903 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9904 } IEM_MC_ENDIF();
9905 IEM_MC_ADVANCE_RIP_AND_FINISH();
9906
9907 IEM_MC_END();
9908}
9909
9910
9911/** Opcode 0xda 11/1. */
9912FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
9913{
9914 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
9915 IEM_MC_BEGIN(0, 1, 0, 0);
9916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9917 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9918
9919 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9920 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9921
9922 IEM_MC_PREPARE_FPU_USAGE();
9923 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9924 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9925 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9926 } IEM_MC_ENDIF();
9927 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9928 } IEM_MC_ELSE() {
9929 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9930 } IEM_MC_ENDIF();
9931 IEM_MC_ADVANCE_RIP_AND_FINISH();
9932
9933 IEM_MC_END();
9934}
9935
9936
9937/** Opcode 0xda 11/2. */
9938FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
9939{
9940 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
9941 IEM_MC_BEGIN(0, 1, 0, 0);
9942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9943 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9944
9945 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9946 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9947
9948 IEM_MC_PREPARE_FPU_USAGE();
9949 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9950 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9951 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9952 } IEM_MC_ENDIF();
9953 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9954 } IEM_MC_ELSE() {
9955 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9956 } IEM_MC_ENDIF();
9957 IEM_MC_ADVANCE_RIP_AND_FINISH();
9958
9959 IEM_MC_END();
9960}
9961
9962
9963/** Opcode 0xda 11/3. */
9964FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
9965{
9966 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
9967 IEM_MC_BEGIN(0, 1, 0, 0);
9968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9969 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9970
9971 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9972 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9973
9974 IEM_MC_PREPARE_FPU_USAGE();
9975 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9976 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9977 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9978 } IEM_MC_ENDIF();
9979 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9980 } IEM_MC_ELSE() {
9981 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9982 } IEM_MC_ENDIF();
9983 IEM_MC_ADVANCE_RIP_AND_FINISH();
9984
9985 IEM_MC_END();
9986}
9987
9988
9989/**
9990 * Common worker for FPU instructions working on ST0 and ST1, only affecting
9991 * flags, and popping twice when done.
9992 *
9993 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9994 */
9995FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
9996{
9997 IEM_MC_BEGIN(3, 1, 0, 0);
9998 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9999 IEM_MC_LOCAL(uint16_t, u16Fsw);
10000 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10001 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10002 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10003
10004 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10005 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10006
10007 IEM_MC_PREPARE_FPU_USAGE();
10008 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1) {
10009 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
10010 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10011 } IEM_MC_ELSE() {
10012 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP(pVCpu->iem.s.uFpuOpcode);
10013 } IEM_MC_ENDIF();
10014 IEM_MC_ADVANCE_RIP_AND_FINISH();
10015
10016 IEM_MC_END();
10017}
10018
10019
10020/** Opcode 0xda 0xe9. */
10021FNIEMOP_DEF(iemOp_fucompp)
10022{
10023 IEMOP_MNEMONIC(fucompp, "fucompp");
10024 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fucom_r80_by_r80);
10025}
10026
10027
10028/**
10029 * Common worker for FPU instructions working on ST0 and an m32i, and storing
10030 * the result in ST0.
10031 *
10032 * @param bRm Mod R/M byte.
10033 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10034 */
10035FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
10036{
10037 IEM_MC_BEGIN(3, 3, 0, 0);
10038 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10039 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10040 IEM_MC_LOCAL(int32_t, i32Val2);
10041 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10042 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10043 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10044
10045 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10047
10048 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10049 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10050 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10051
10052 IEM_MC_PREPARE_FPU_USAGE();
10053 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10054 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
10055 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10056 } IEM_MC_ELSE() {
10057 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10058 } IEM_MC_ENDIF();
10059 IEM_MC_ADVANCE_RIP_AND_FINISH();
10060
10061 IEM_MC_END();
10062}
10063
10064
10065/** Opcode 0xda !11/0. */
10066FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
10067{
10068 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
10069 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
10070}
10071
10072
10073/** Opcode 0xda !11/1. */
10074FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
10075{
10076 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
10077 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
10078}
10079
10080
10081/** Opcode 0xda !11/2. */
10082FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
10083{
10084 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
10085
10086 IEM_MC_BEGIN(3, 3, 0, 0);
10087 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10088 IEM_MC_LOCAL(uint16_t, u16Fsw);
10089 IEM_MC_LOCAL(int32_t, i32Val2);
10090 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10091 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10092 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10093
10094 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10096
10097 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10098 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10099 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10100
10101 IEM_MC_PREPARE_FPU_USAGE();
10102 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10103 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
10104 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10105 } IEM_MC_ELSE() {
10106 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10107 } IEM_MC_ENDIF();
10108 IEM_MC_ADVANCE_RIP_AND_FINISH();
10109
10110 IEM_MC_END();
10111}
10112
10113
10114/** Opcode 0xda !11/3. */
10115FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
10116{
10117 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
10118
10119 IEM_MC_BEGIN(3, 3, 0, 0);
10120 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10121 IEM_MC_LOCAL(uint16_t, u16Fsw);
10122 IEM_MC_LOCAL(int32_t, i32Val2);
10123 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10124 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10125 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10126
10127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10129
10130 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10131 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10132 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10133
10134 IEM_MC_PREPARE_FPU_USAGE();
10135 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10136 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
10137 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10138 } IEM_MC_ELSE() {
10139 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10140 } IEM_MC_ENDIF();
10141 IEM_MC_ADVANCE_RIP_AND_FINISH();
10142
10143 IEM_MC_END();
10144}
10145
10146
10147/** Opcode 0xda !11/4. */
10148FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
10149{
10150 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
10151 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
10152}
10153
10154
10155/** Opcode 0xda !11/5. */
10156FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
10157{
10158 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
10159 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
10160}
10161
10162
10163/** Opcode 0xda !11/6. */
10164FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
10165{
10166 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
10167 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
10168}
10169
10170
10171/** Opcode 0xda !11/7. */
10172FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
10173{
10174 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
10175 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
10176}
10177
10178
10179/**
10180 * @opcode 0xda
10181 */
10182FNIEMOP_DEF(iemOp_EscF2)
10183{
10184 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10185 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
10186 if (IEM_IS_MODRM_REG_MODE(bRm))
10187 {
10188 switch (IEM_GET_MODRM_REG_8(bRm))
10189 {
10190 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
10191 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
10192 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
10193 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
10194 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
10195 case 5:
10196 if (bRm == 0xe9)
10197 return FNIEMOP_CALL(iemOp_fucompp);
10198 IEMOP_RAISE_INVALID_OPCODE_RET();
10199 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10200 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
10201 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10202 }
10203 }
10204 else
10205 {
10206 switch (IEM_GET_MODRM_REG_8(bRm))
10207 {
10208 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
10209 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
10210 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
10211 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
10212 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
10213 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
10214 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
10215 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
10216 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10217 }
10218 }
10219}
10220
10221
10222/** Opcode 0xdb !11/0. */
10223FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
10224{
10225 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
10226
10227 IEM_MC_BEGIN(2, 3, 0, 0);
10228 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10229 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10230 IEM_MC_LOCAL(int32_t, i32Val);
10231 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10232 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
10233
10234 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10236
10237 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10238 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10239 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10240
10241 IEM_MC_PREPARE_FPU_USAGE();
10242 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10243 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i32, pFpuRes, pi32Val);
10244 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10245 } IEM_MC_ELSE() {
10246 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10247 } IEM_MC_ENDIF();
10248 IEM_MC_ADVANCE_RIP_AND_FINISH();
10249
10250 IEM_MC_END();
10251}
10252
10253
10254/** Opcode 0xdb !11/1. */
10255FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
10256{
10257 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
10258 IEM_MC_BEGIN(3, 2, 0, 0);
10259 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10260 IEM_MC_LOCAL(uint16_t, u16Fsw);
10261 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10262 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10263 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10264
10265 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10266 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10267 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10268 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10269
10270 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10271 IEM_MC_PREPARE_FPU_USAGE();
10272 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10273 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10274 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
10275 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10276 } IEM_MC_ELSE() {
10277 IEM_MC_IF_FCW_IM() {
10278 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10279 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
10280 } IEM_MC_ENDIF();
10281 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10282 } IEM_MC_ENDIF();
10283 IEM_MC_ADVANCE_RIP_AND_FINISH();
10284
10285 IEM_MC_END();
10286}
10287
10288
10289/** Opcode 0xdb !11/2. */
10290FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
10291{
10292 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
10293 IEM_MC_BEGIN(3, 2, 0, 0);
10294 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10295 IEM_MC_LOCAL(uint16_t, u16Fsw);
10296 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10297 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10298 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10299
10300 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10302 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10303 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10304
10305 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10306 IEM_MC_PREPARE_FPU_USAGE();
10307 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10308 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10309 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
10310 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10311 } IEM_MC_ELSE() {
10312 IEM_MC_IF_FCW_IM() {
10313 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10314 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
10315 } IEM_MC_ENDIF();
10316 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10317 } IEM_MC_ENDIF();
10318 IEM_MC_ADVANCE_RIP_AND_FINISH();
10319
10320 IEM_MC_END();
10321}
10322
10323
10324/** Opcode 0xdb !11/3. */
10325FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
10326{
10327 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
10328 IEM_MC_BEGIN(3, 2, 0, 0);
10329 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10330 IEM_MC_LOCAL(uint16_t, u16Fsw);
10331 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10332 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10333 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10334
10335 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10337 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10338 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10339
10340 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10341 IEM_MC_PREPARE_FPU_USAGE();
10342 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10343 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10344 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
10345 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10346 } IEM_MC_ELSE() {
10347 IEM_MC_IF_FCW_IM() {
10348 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10349 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
10350 } IEM_MC_ENDIF();
10351 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10352 } IEM_MC_ENDIF();
10353 IEM_MC_ADVANCE_RIP_AND_FINISH();
10354
10355 IEM_MC_END();
10356}
10357
10358
10359/** Opcode 0xdb !11/5. */
10360FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
10361{
10362 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
10363
10364 IEM_MC_BEGIN(2, 3, 0, 0);
10365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10366 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10367 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
10368 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10369 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
10370
10371 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10373
10374 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10375 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10376 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10377
10378 IEM_MC_PREPARE_FPU_USAGE();
10379 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10380 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
10381 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10382 } IEM_MC_ELSE() {
10383 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10384 } IEM_MC_ENDIF();
10385 IEM_MC_ADVANCE_RIP_AND_FINISH();
10386
10387 IEM_MC_END();
10388}
10389
10390
10391/** Opcode 0xdb !11/7. */
10392FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
10393{
10394 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
10395 IEM_MC_BEGIN(3, 2, 0, 0);
10396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10397 IEM_MC_LOCAL(uint16_t, u16Fsw);
10398 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10399 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
10400 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10401
10402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10403 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10404 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10405 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10406
10407 IEM_MC_MEM_MAP_EX(pr80Dst, IEM_ACCESS_DATA_W, sizeof(*pr80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
10408 IEM_MC_PREPARE_FPU_USAGE();
10409 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10410 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
10411 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
10412 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10413 } IEM_MC_ELSE() {
10414 IEM_MC_IF_FCW_IM() {
10415 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
10416 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
10417 } IEM_MC_ENDIF();
10418 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10419 } IEM_MC_ENDIF();
10420 IEM_MC_ADVANCE_RIP_AND_FINISH();
10421
10422 IEM_MC_END();
10423}
10424
10425
10426/** Opcode 0xdb 11/0. */
10427FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
10428{
10429 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
10430 IEM_MC_BEGIN(0, 1, 0, 0);
10431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10432 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10433
10434 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10435 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10436
10437 IEM_MC_PREPARE_FPU_USAGE();
10438 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10439 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF) {
10440 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10441 } IEM_MC_ENDIF();
10442 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10443 } IEM_MC_ELSE() {
10444 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10445 } IEM_MC_ENDIF();
10446 IEM_MC_ADVANCE_RIP_AND_FINISH();
10447
10448 IEM_MC_END();
10449}
10450
10451
10452/** Opcode 0xdb 11/1. */
10453FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
10454{
10455 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
10456 IEM_MC_BEGIN(0, 1, 0, 0);
10457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10458 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10459
10460 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10461 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10462
10463 IEM_MC_PREPARE_FPU_USAGE();
10464 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10465 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10466 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10467 } IEM_MC_ENDIF();
10468 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10469 } IEM_MC_ELSE() {
10470 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10471 } IEM_MC_ENDIF();
10472 IEM_MC_ADVANCE_RIP_AND_FINISH();
10473
10474 IEM_MC_END();
10475}
10476
10477
10478/** Opcode 0xdb 11/2. */
10479FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
10480{
10481 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
10482 IEM_MC_BEGIN(0, 1, 0, 0);
10483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10484 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10485
10486 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10487 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10488
10489 IEM_MC_PREPARE_FPU_USAGE();
10490 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10491 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10492 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10493 } IEM_MC_ENDIF();
10494 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10495 } IEM_MC_ELSE() {
10496 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10497 } IEM_MC_ENDIF();
10498 IEM_MC_ADVANCE_RIP_AND_FINISH();
10499
10500 IEM_MC_END();
10501}
10502
10503
10504/** Opcode 0xdb 11/3. */
10505FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
10506{
10507 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
10508 IEM_MC_BEGIN(0, 1, 0, 0);
10509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10510 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10511
10512 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10513 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10514
10515 IEM_MC_PREPARE_FPU_USAGE();
10516 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10517 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF) {
10518 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10519 } IEM_MC_ENDIF();
10520 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10521 } IEM_MC_ELSE() {
10522 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10523 } IEM_MC_ENDIF();
10524 IEM_MC_ADVANCE_RIP_AND_FINISH();
10525
10526 IEM_MC_END();
10527}
10528
10529
10530/** Opcode 0xdb 0xe0. */
10531FNIEMOP_DEF(iemOp_fneni)
10532{
10533 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
10534 IEM_MC_BEGIN(0, 0, 0, 0);
10535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10536 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10537 IEM_MC_ADVANCE_RIP_AND_FINISH();
10538 IEM_MC_END();
10539}
10540
10541
10542/** Opcode 0xdb 0xe1. */
10543FNIEMOP_DEF(iemOp_fndisi)
10544{
10545 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
10546 IEM_MC_BEGIN(0, 0, 0, 0);
10547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10548 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10549 IEM_MC_ADVANCE_RIP_AND_FINISH();
10550 IEM_MC_END();
10551}
10552
10553
10554/** Opcode 0xdb 0xe2. */
10555FNIEMOP_DEF(iemOp_fnclex)
10556{
10557 IEMOP_MNEMONIC(fnclex, "fnclex");
10558 IEM_MC_BEGIN(0, 0, 0, 0);
10559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10560 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10561 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10562 IEM_MC_CLEAR_FSW_EX();
10563 IEM_MC_ADVANCE_RIP_AND_FINISH();
10564 IEM_MC_END();
10565}
10566
10567
10568/** Opcode 0xdb 0xe3. */
10569FNIEMOP_DEF(iemOp_fninit)
10570{
10571 IEMOP_MNEMONIC(fninit, "fninit");
10572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10573 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_FPU, iemCImpl_finit, false /*fCheckXcpts*/);
10574}
10575
10576
10577/** Opcode 0xdb 0xe4. */
10578FNIEMOP_DEF(iemOp_fnsetpm)
10579{
10580 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
10581 IEM_MC_BEGIN(0, 0, 0, 0);
10582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10583 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10584 IEM_MC_ADVANCE_RIP_AND_FINISH();
10585 IEM_MC_END();
10586}
10587
10588
10589/** Opcode 0xdb 0xe5. */
10590FNIEMOP_DEF(iemOp_frstpm)
10591{
10592 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
10593#if 0 /* #UDs on newer CPUs */
10594 IEM_MC_BEGIN(0, 0, 0, 0);
10595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10596 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10597 IEM_MC_ADVANCE_RIP_AND_FINISH();
10598 IEM_MC_END();
10599 return VINF_SUCCESS;
10600#else
10601 IEMOP_RAISE_INVALID_OPCODE_RET();
10602#endif
10603}
10604
10605
10606/** Opcode 0xdb 11/5. */
10607FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
10608{
10609 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
10610 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
10611 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), true /*fUCmp*/,
10612 0 /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
10613}
10614
10615
10616/** Opcode 0xdb 11/6. */
10617FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
10618{
10619 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
10620 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
10621 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
10622 false /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
10623}
10624
10625
10626/**
10627 * @opcode 0xdb
10628 */
10629FNIEMOP_DEF(iemOp_EscF3)
10630{
10631 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10632 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
10633 if (IEM_IS_MODRM_REG_MODE(bRm))
10634 {
10635 switch (IEM_GET_MODRM_REG_8(bRm))
10636 {
10637 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
10638 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
10639 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
10640 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
10641 case 4:
10642 switch (bRm)
10643 {
10644 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
10645 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
10646 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
10647 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
10648 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
10649 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
10650 case 0xe6: IEMOP_RAISE_INVALID_OPCODE_RET();
10651 case 0xe7: IEMOP_RAISE_INVALID_OPCODE_RET();
10652 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10653 }
10654 break;
10655 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
10656 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
10657 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
10658 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10659 }
10660 }
10661 else
10662 {
10663 switch (IEM_GET_MODRM_REG_8(bRm))
10664 {
10665 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
10666 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
10667 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
10668 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
10669 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
10670 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
10671 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10672 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
10673 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10674 }
10675 }
10676}
10677
10678
10679/**
10680 * Common worker for FPU instructions working on STn and ST0, and storing the
10681 * result in STn unless IE, DE or ZE was raised.
10682 *
10683 * @param bRm Mod R/M byte.
10684 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10685 */
10686FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
10687{
10688 IEM_MC_BEGIN(3, 1, 0, 0);
10689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10690 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10691 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10692 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10693 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10694
10695 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10696 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10697
10698 IEM_MC_PREPARE_FPU_USAGE();
10699 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
10700 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
10701 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10702 } IEM_MC_ELSE() {
10703 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10704 } IEM_MC_ENDIF();
10705 IEM_MC_ADVANCE_RIP_AND_FINISH();
10706
10707 IEM_MC_END();
10708}
10709
10710
10711/** Opcode 0xdc 11/0. */
10712FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
10713{
10714 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
10715 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
10716}
10717
10718
10719/** Opcode 0xdc 11/1. */
10720FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
10721{
10722 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
10723 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
10724}
10725
10726
10727/** Opcode 0xdc 11/4. */
10728FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
10729{
10730 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
10731 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
10732}
10733
10734
10735/** Opcode 0xdc 11/5. */
10736FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
10737{
10738 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
10739 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
10740}
10741
10742
10743/** Opcode 0xdc 11/6. */
10744FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
10745{
10746 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
10747 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
10748}
10749
10750
10751/** Opcode 0xdc 11/7. */
10752FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
10753{
10754 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
10755 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
10756}
10757
10758
10759/**
10760 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
10761 * memory operand, and storing the result in ST0.
10762 *
10763 * @param bRm Mod R/M byte.
10764 * @param pfnImpl Pointer to the instruction implementation (assembly).
10765 */
10766FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
10767{
10768 IEM_MC_BEGIN(3, 3, 0, 0);
10769 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10770 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10771 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
10772 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10773 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
10774 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
10775
10776 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10778 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10779 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10780
10781 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10782 IEM_MC_PREPARE_FPU_USAGE();
10783 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0) {
10784 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
10785 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10786 } IEM_MC_ELSE() {
10787 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10788 } IEM_MC_ENDIF();
10789 IEM_MC_ADVANCE_RIP_AND_FINISH();
10790
10791 IEM_MC_END();
10792}
10793
10794
10795/** Opcode 0xdc !11/0. */
10796FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
10797{
10798 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
10799 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
10800}
10801
10802
10803/** Opcode 0xdc !11/1. */
10804FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
10805{
10806 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
10807 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
10808}
10809
10810
10811/** Opcode 0xdc !11/2. */
10812FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
10813{
10814 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
10815
10816 IEM_MC_BEGIN(3, 3, 0, 0);
10817 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10818 IEM_MC_LOCAL(uint16_t, u16Fsw);
10819 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
10820 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10821 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10822 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
10823
10824 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10825 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10826
10827 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10828 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10829 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10830
10831 IEM_MC_PREPARE_FPU_USAGE();
10832 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10833 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
10834 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10835 } IEM_MC_ELSE() {
10836 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10837 } IEM_MC_ENDIF();
10838 IEM_MC_ADVANCE_RIP_AND_FINISH();
10839
10840 IEM_MC_END();
10841}
10842
10843
10844/** Opcode 0xdc !11/3. */
10845FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
10846{
10847 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
10848
10849 IEM_MC_BEGIN(3, 3, 0, 0);
10850 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10851 IEM_MC_LOCAL(uint16_t, u16Fsw);
10852 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
10853 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10854 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10855 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
10856
10857 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10858 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10859
10860 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10861 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10862 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10863
10864 IEM_MC_PREPARE_FPU_USAGE();
10865 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10866 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
10867 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10868 } IEM_MC_ELSE() {
10869 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10870 } IEM_MC_ENDIF();
10871 IEM_MC_ADVANCE_RIP_AND_FINISH();
10872
10873 IEM_MC_END();
10874}
10875
10876
10877/** Opcode 0xdc !11/4. */
10878FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
10879{
10880 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
10881 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
10882}
10883
10884
10885/** Opcode 0xdc !11/5. */
10886FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
10887{
10888 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
10889 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
10890}
10891
10892
10893/** Opcode 0xdc !11/6. */
10894FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
10895{
10896 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
10897 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
10898}
10899
10900
10901/** Opcode 0xdc !11/7. */
10902FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
10903{
10904 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
10905 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
10906}
10907
10908
10909/**
10910 * @opcode 0xdc
10911 */
10912FNIEMOP_DEF(iemOp_EscF4)
10913{
10914 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10915 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
10916 if (IEM_IS_MODRM_REG_MODE(bRm))
10917 {
10918 switch (IEM_GET_MODRM_REG_8(bRm))
10919 {
10920 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
10921 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
10922 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
10923 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
10924 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
10925 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
10926 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
10927 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
10928 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10929 }
10930 }
10931 else
10932 {
10933 switch (IEM_GET_MODRM_REG_8(bRm))
10934 {
10935 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
10936 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
10937 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
10938 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
10939 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
10940 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
10941 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
10942 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
10943 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10944 }
10945 }
10946}
10947
10948
10949/** Opcode 0xdd !11/0.
10950 * @sa iemOp_fld_m32r */
10951FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
10952{
10953 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
10954
10955 IEM_MC_BEGIN(2, 3, 0, 0);
10956 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10957 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10958 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
10959 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10960 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
10961
10962 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10964 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10965 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10966
10967 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10968 IEM_MC_PREPARE_FPU_USAGE();
10969 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10970 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r64, pFpuRes, pr64Val);
10971 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10972 } IEM_MC_ELSE() {
10973 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10974 } IEM_MC_ENDIF();
10975 IEM_MC_ADVANCE_RIP_AND_FINISH();
10976
10977 IEM_MC_END();
10978}
10979
10980
10981/** Opcode 0xdd !11/0. */
10982FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
10983{
10984 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
10985 IEM_MC_BEGIN(3, 2, 0, 0);
10986 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10987 IEM_MC_LOCAL(uint16_t, u16Fsw);
10988 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10989 IEM_MC_ARG(int64_t *, pi64Dst, 1);
10990 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10991
10992 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10994 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10995 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10996
10997 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10998 IEM_MC_PREPARE_FPU_USAGE();
10999 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11000 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
11001 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11002 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11003 } IEM_MC_ELSE() {
11004 IEM_MC_IF_FCW_IM() {
11005 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
11006 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
11007 } IEM_MC_ENDIF();
11008 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11009 } IEM_MC_ENDIF();
11010 IEM_MC_ADVANCE_RIP_AND_FINISH();
11011
11012 IEM_MC_END();
11013}
11014
11015
11016/** Opcode 0xdd !11/0. */
11017FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
11018{
11019 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
11020 IEM_MC_BEGIN(3, 2, 0, 0);
11021 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11022 IEM_MC_LOCAL(uint16_t, u16Fsw);
11023 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11024 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
11025 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11026
11027 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11029 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11030 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11031
11032 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11033 IEM_MC_PREPARE_FPU_USAGE();
11034 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11035 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
11036 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11037 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11038 } IEM_MC_ELSE() {
11039 IEM_MC_IF_FCW_IM() {
11040 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
11041 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
11042 } IEM_MC_ENDIF();
11043 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11044 } IEM_MC_ENDIF();
11045 IEM_MC_ADVANCE_RIP_AND_FINISH();
11046
11047 IEM_MC_END();
11048}
11049
11050
11051
11052
11053/** Opcode 0xdd !11/0. */
11054FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
11055{
11056 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
11057 IEM_MC_BEGIN(3, 2, 0, 0);
11058 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11059 IEM_MC_LOCAL(uint16_t, u16Fsw);
11060 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11061 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
11062 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11063
11064 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11066 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11067 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11068
11069 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11070 IEM_MC_PREPARE_FPU_USAGE();
11071 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11072 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
11073 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11074 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11075 } IEM_MC_ELSE() {
11076 IEM_MC_IF_FCW_IM() {
11077 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
11078 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
11079 } IEM_MC_ENDIF();
11080 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11081 } IEM_MC_ENDIF();
11082 IEM_MC_ADVANCE_RIP_AND_FINISH();
11083
11084 IEM_MC_END();
11085}
11086
11087
11088/** Opcode 0xdd !11/0. */
11089FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
11090{
11091 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
11092 IEM_MC_BEGIN(3, 0, 0, 0);
11093 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
11094 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11095
11096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11097 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11098 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11099
11100 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
11101 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
11102 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
11103 IEM_MC_END();
11104}
11105
11106
11107/** Opcode 0xdd !11/0. */
11108FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
11109{
11110 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
11111 IEM_MC_BEGIN(3, 0, 0, 0);
11112 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
11113 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11114
11115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11116 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11117 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); /* Note! Implicit fninit after the save, do not use FOR_READ here! */
11118
11119 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
11120 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
11121 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
11122 IEM_MC_END();
11123}
11124
11125/** Opcode 0xdd !11/0. */
11126FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
11127{
11128 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
11129
11130 IEM_MC_BEGIN(0, 2, 0, 0);
11131 IEM_MC_LOCAL(uint16_t, u16Tmp);
11132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11133
11134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11136 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11137
11138 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
11139 IEM_MC_FETCH_FSW(u16Tmp);
11140 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
11141 IEM_MC_ADVANCE_RIP_AND_FINISH();
11142
11143/** @todo Debug / drop a hint to the verifier that things may differ
11144 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
11145 * NT4SP1. (X86_FSW_PE) */
11146 IEM_MC_END();
11147}
11148
11149
11150/** Opcode 0xdd 11/0. */
11151FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
11152{
11153 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
11154 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
11155 unmodified. */
11156 IEM_MC_BEGIN(0, 0, 0, 0);
11157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11158
11159 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11160 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11161
11162 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11163 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
11164 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11165
11166 IEM_MC_ADVANCE_RIP_AND_FINISH();
11167 IEM_MC_END();
11168}
11169
11170
11171/** Opcode 0xdd 11/1. */
11172FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
11173{
11174 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
11175 IEM_MC_BEGIN(0, 2, 0, 0);
11176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11177 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
11178 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11179 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11180 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11181
11182 IEM_MC_PREPARE_FPU_USAGE();
11183 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11184 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
11185 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11186 } IEM_MC_ELSE() {
11187 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11188 } IEM_MC_ENDIF();
11189
11190 IEM_MC_ADVANCE_RIP_AND_FINISH();
11191 IEM_MC_END();
11192}
11193
11194
11195/** Opcode 0xdd 11/3. */
11196FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
11197{
11198 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
11199 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
11200}
11201
11202
11203/** Opcode 0xdd 11/4. */
11204FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
11205{
11206 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
11207 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
11208}
11209
11210
11211/**
11212 * @opcode 0xdd
11213 */
11214FNIEMOP_DEF(iemOp_EscF5)
11215{
11216 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11217 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
11218 if (IEM_IS_MODRM_REG_MODE(bRm))
11219 {
11220 switch (IEM_GET_MODRM_REG_8(bRm))
11221 {
11222 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
11223 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
11224 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
11225 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
11226 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
11227 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
11228 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
11229 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11230 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11231 }
11232 }
11233 else
11234 {
11235 switch (IEM_GET_MODRM_REG_8(bRm))
11236 {
11237 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
11238 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
11239 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
11240 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
11241 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
11242 case 5: IEMOP_RAISE_INVALID_OPCODE_RET();
11243 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
11244 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
11245 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11246 }
11247 }
11248}
11249
11250
11251/** Opcode 0xde 11/0. */
11252FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
11253{
11254 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
11255 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
11256}
11257
11258
11259/** Opcode 0xde 11/0. */
11260FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
11261{
11262 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
11263 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
11264}
11265
11266
11267/** Opcode 0xde 0xd9. */
11268FNIEMOP_DEF(iemOp_fcompp)
11269{
11270 IEMOP_MNEMONIC(fcompp, "fcompp");
11271 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fcom_r80_by_r80);
11272}
11273
11274
11275/** Opcode 0xde 11/4. */
11276FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
11277{
11278 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
11279 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
11280}
11281
11282
11283/** Opcode 0xde 11/5. */
11284FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
11285{
11286 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
11287 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
11288}
11289
11290
11291/** Opcode 0xde 11/6. */
11292FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
11293{
11294 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
11295 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
11296}
11297
11298
11299/** Opcode 0xde 11/7. */
11300FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
11301{
11302 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
11303 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
11304}
11305
11306
11307/**
11308 * Common worker for FPU instructions working on ST0 and an m16i, and storing
11309 * the result in ST0.
11310 *
11311 * @param bRm Mod R/M byte.
11312 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11313 */
11314FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
11315{
11316 IEM_MC_BEGIN(3, 3, 0, 0);
11317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11318 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11319 IEM_MC_LOCAL(int16_t, i16Val2);
11320 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11321 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11322 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11323
11324 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11326
11327 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11328 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11329 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11330
11331 IEM_MC_PREPARE_FPU_USAGE();
11332 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11333 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
11334 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
11335 } IEM_MC_ELSE() {
11336 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11337 } IEM_MC_ENDIF();
11338 IEM_MC_ADVANCE_RIP_AND_FINISH();
11339
11340 IEM_MC_END();
11341}
11342
11343
11344/** Opcode 0xde !11/0. */
11345FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
11346{
11347 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
11348 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
11349}
11350
11351
11352/** Opcode 0xde !11/1. */
11353FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
11354{
11355 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
11356 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
11357}
11358
11359
11360/** Opcode 0xde !11/2. */
11361FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
11362{
11363 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
11364
11365 IEM_MC_BEGIN(3, 3, 0, 0);
11366 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11367 IEM_MC_LOCAL(uint16_t, u16Fsw);
11368 IEM_MC_LOCAL(int16_t, i16Val2);
11369 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11370 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11371 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11372
11373 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11375
11376 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11377 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11378 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11379
11380 IEM_MC_PREPARE_FPU_USAGE();
11381 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11382 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
11383 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11384 } IEM_MC_ELSE() {
11385 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11386 } IEM_MC_ENDIF();
11387 IEM_MC_ADVANCE_RIP_AND_FINISH();
11388
11389 IEM_MC_END();
11390}
11391
11392
11393/** Opcode 0xde !11/3. */
11394FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
11395{
11396 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
11397
11398 IEM_MC_BEGIN(3, 3, 0, 0);
11399 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11400 IEM_MC_LOCAL(uint16_t, u16Fsw);
11401 IEM_MC_LOCAL(int16_t, i16Val2);
11402 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11403 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11404 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11405
11406 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11408
11409 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11410 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11411 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11412
11413 IEM_MC_PREPARE_FPU_USAGE();
11414 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11415 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
11416 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11417 } IEM_MC_ELSE() {
11418 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11419 } IEM_MC_ENDIF();
11420 IEM_MC_ADVANCE_RIP_AND_FINISH();
11421
11422 IEM_MC_END();
11423}
11424
11425
11426/** Opcode 0xde !11/4. */
11427FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
11428{
11429 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
11430 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
11431}
11432
11433
11434/** Opcode 0xde !11/5. */
11435FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
11436{
11437 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
11438 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
11439}
11440
11441
11442/** Opcode 0xde !11/6. */
11443FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
11444{
11445 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
11446 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
11447}
11448
11449
11450/** Opcode 0xde !11/7. */
11451FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
11452{
11453 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
11454 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
11455}
11456
11457
11458/**
11459 * @opcode 0xde
11460 */
11461FNIEMOP_DEF(iemOp_EscF6)
11462{
11463 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11464 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
11465 if (IEM_IS_MODRM_REG_MODE(bRm))
11466 {
11467 switch (IEM_GET_MODRM_REG_8(bRm))
11468 {
11469 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
11470 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
11471 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
11472 case 3: if (bRm == 0xd9)
11473 return FNIEMOP_CALL(iemOp_fcompp);
11474 IEMOP_RAISE_INVALID_OPCODE_RET();
11475 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
11476 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
11477 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
11478 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
11479 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11480 }
11481 }
11482 else
11483 {
11484 switch (IEM_GET_MODRM_REG_8(bRm))
11485 {
11486 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
11487 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
11488 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
11489 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
11490 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
11491 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
11492 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
11493 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
11494 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11495 }
11496 }
11497}
11498
11499
11500/** Opcode 0xdf 11/0.
11501 * Undocument instruction, assumed to work like ffree + fincstp. */
11502FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
11503{
11504 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
11505 IEM_MC_BEGIN(0, 0, 0, 0);
11506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11507
11508 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11509 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11510
11511 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11512 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
11513 IEM_MC_FPU_STACK_INC_TOP();
11514 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11515
11516 IEM_MC_ADVANCE_RIP_AND_FINISH();
11517 IEM_MC_END();
11518}
11519
11520
11521/** Opcode 0xdf 0xe0. */
11522FNIEMOP_DEF(iemOp_fnstsw_ax)
11523{
11524 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
11525 IEM_MC_BEGIN(0, 1, 0, 0);
11526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11527 IEM_MC_LOCAL(uint16_t, u16Tmp);
11528 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11529 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
11530 IEM_MC_FETCH_FSW(u16Tmp);
11531 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
11532 IEM_MC_ADVANCE_RIP_AND_FINISH();
11533 IEM_MC_END();
11534}
11535
11536
11537/** Opcode 0xdf 11/5. */
11538FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
11539{
11540 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
11541 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
11542 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
11543 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11544}
11545
11546
11547/** Opcode 0xdf 11/6. */
11548FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
11549{
11550 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
11551 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
11552 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
11553 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11554}
11555
11556
11557/** Opcode 0xdf !11/0. */
11558FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
11559{
11560 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
11561
11562 IEM_MC_BEGIN(2, 3, 0, 0);
11563 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11564 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11565 IEM_MC_LOCAL(int16_t, i16Val);
11566 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11567 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
11568
11569 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11571
11572 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11573 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11574 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11575
11576 IEM_MC_PREPARE_FPU_USAGE();
11577 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11578 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i16, pFpuRes, pi16Val);
11579 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11580 } IEM_MC_ELSE() {
11581 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11582 } IEM_MC_ENDIF();
11583 IEM_MC_ADVANCE_RIP_AND_FINISH();
11584
11585 IEM_MC_END();
11586}
11587
11588
11589/** Opcode 0xdf !11/1. */
11590FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
11591{
11592 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
11593 IEM_MC_BEGIN(3, 2, 0, 0);
11594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11595 IEM_MC_LOCAL(uint16_t, u16Fsw);
11596 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11597 IEM_MC_ARG(int16_t *, pi16Dst, 1);
11598 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11599
11600 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11602 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11603 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11604
11605 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11606 IEM_MC_PREPARE_FPU_USAGE();
11607 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11608 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
11609 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
11610 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11611 } IEM_MC_ELSE() {
11612 IEM_MC_IF_FCW_IM() {
11613 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
11614 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
11615 } IEM_MC_ENDIF();
11616 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11617 } IEM_MC_ENDIF();
11618 IEM_MC_ADVANCE_RIP_AND_FINISH();
11619
11620 IEM_MC_END();
11621}
11622
11623
11624/** Opcode 0xdf !11/2. */
11625FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
11626{
11627 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
11628 IEM_MC_BEGIN(3, 2, 0, 0);
11629 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11630 IEM_MC_LOCAL(uint16_t, u16Fsw);
11631 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11632 IEM_MC_ARG(int16_t *, pi16Dst, 1);
11633 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11634
11635 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11637 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11638 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11639
11640 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11641 IEM_MC_PREPARE_FPU_USAGE();
11642 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11643 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
11644 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
11645 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11646 } IEM_MC_ELSE() {
11647 IEM_MC_IF_FCW_IM() {
11648 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
11649 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
11650 } IEM_MC_ENDIF();
11651 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11652 } IEM_MC_ENDIF();
11653 IEM_MC_ADVANCE_RIP_AND_FINISH();
11654
11655 IEM_MC_END();
11656}
11657
11658
11659/** Opcode 0xdf !11/3. */
11660FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
11661{
11662 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
11663 IEM_MC_BEGIN(3, 2, 0, 0);
11664 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11665 IEM_MC_LOCAL(uint16_t, u16Fsw);
11666 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11667 IEM_MC_ARG(int16_t *, pi16Dst, 1);
11668 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11669
11670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11672 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11673 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11674
11675 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11676 IEM_MC_PREPARE_FPU_USAGE();
11677 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11678 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
11679 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
11680 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11681 } IEM_MC_ELSE() {
11682 IEM_MC_IF_FCW_IM() {
11683 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
11684 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
11685 } IEM_MC_ENDIF();
11686 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11687 } IEM_MC_ENDIF();
11688 IEM_MC_ADVANCE_RIP_AND_FINISH();
11689
11690 IEM_MC_END();
11691}
11692
11693
11694/** Opcode 0xdf !11/4. */
11695FNIEMOP_DEF_1(iemOp_fbld_m80d, uint8_t, bRm)
11696{
11697 IEMOP_MNEMONIC(fbld_m80d, "fbld m80d");
11698
11699 IEM_MC_BEGIN(2, 3, 0, 0);
11700 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11701 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11702 IEM_MC_LOCAL(RTPBCD80U, d80Val);
11703 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11704 IEM_MC_ARG_LOCAL_REF(PCRTPBCD80U, pd80Val, d80Val, 1);
11705
11706 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11708
11709 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11710 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11711 IEM_MC_FETCH_MEM_D80(d80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11712
11713 IEM_MC_PREPARE_FPU_USAGE();
11714 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11715 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_d80, pFpuRes, pd80Val);
11716 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11717 } IEM_MC_ELSE() {
11718 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11719 } IEM_MC_ENDIF();
11720 IEM_MC_ADVANCE_RIP_AND_FINISH();
11721
11722 IEM_MC_END();
11723}
11724
11725
11726/** Opcode 0xdf !11/5. */
11727FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
11728{
11729 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
11730
11731 IEM_MC_BEGIN(2, 3, 0, 0);
11732 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11733 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11734 IEM_MC_LOCAL(int64_t, i64Val);
11735 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11736 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
11737
11738 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11740
11741 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11742 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11743 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11744
11745 IEM_MC_PREPARE_FPU_USAGE();
11746 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11747 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i64, pFpuRes, pi64Val);
11748 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11749 } IEM_MC_ELSE() {
11750 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11751 } IEM_MC_ENDIF();
11752 IEM_MC_ADVANCE_RIP_AND_FINISH();
11753
11754 IEM_MC_END();
11755}
11756
11757
11758/** Opcode 0xdf !11/6. */
11759FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
11760{
11761 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
11762 IEM_MC_BEGIN(3, 2, 0, 0);
11763 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11764 IEM_MC_LOCAL(uint16_t, u16Fsw);
11765 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11766 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
11767 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11768
11769 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11771 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11772 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11773
11774 IEM_MC_MEM_MAP_EX(pd80Dst, IEM_ACCESS_DATA_W, sizeof(*pd80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
11775 IEM_MC_PREPARE_FPU_USAGE();
11776 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11777 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
11778 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pd80Dst, IEM_ACCESS_DATA_W, u16Fsw);
11779 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11780 } IEM_MC_ELSE() {
11781 IEM_MC_IF_FCW_IM() {
11782 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
11783 IEM_MC_MEM_COMMIT_AND_UNMAP(pd80Dst, IEM_ACCESS_DATA_W);
11784 } IEM_MC_ENDIF();
11785 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11786 } IEM_MC_ENDIF();
11787 IEM_MC_ADVANCE_RIP_AND_FINISH();
11788
11789 IEM_MC_END();
11790}
11791
11792
11793/** Opcode 0xdf !11/7. */
11794FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
11795{
11796 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
11797 IEM_MC_BEGIN(3, 2, 0, 0);
11798 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11799 IEM_MC_LOCAL(uint16_t, u16Fsw);
11800 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11801 IEM_MC_ARG(int64_t *, pi64Dst, 1);
11802 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11803
11804 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11806 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11807 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11808
11809 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11810 IEM_MC_PREPARE_FPU_USAGE();
11811 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11812 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
11813 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11814 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11815 } IEM_MC_ELSE() {
11816 IEM_MC_IF_FCW_IM() {
11817 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
11818 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
11819 } IEM_MC_ENDIF();
11820 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11821 } IEM_MC_ENDIF();
11822 IEM_MC_ADVANCE_RIP_AND_FINISH();
11823
11824 IEM_MC_END();
11825}
11826
11827
11828/**
11829 * @opcode 0xdf
11830 */
11831FNIEMOP_DEF(iemOp_EscF7)
11832{
11833 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11834 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdf & 0x7);
11835 if (IEM_IS_MODRM_REG_MODE(bRm))
11836 {
11837 switch (IEM_GET_MODRM_REG_8(bRm))
11838 {
11839 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
11840 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
11841 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
11842 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
11843 case 4: if (bRm == 0xe0)
11844 return FNIEMOP_CALL(iemOp_fnstsw_ax);
11845 IEMOP_RAISE_INVALID_OPCODE_RET();
11846 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
11847 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
11848 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11849 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11850 }
11851 }
11852 else
11853 {
11854 switch (IEM_GET_MODRM_REG_8(bRm))
11855 {
11856 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
11857 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
11858 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
11859 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
11860 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
11861 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
11862 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
11863 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
11864 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11865 }
11866 }
11867}
11868
11869
11870/**
11871 * @opcode 0xe0
11872 */
11873FNIEMOP_DEF(iemOp_loopne_Jb)
11874{
11875 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
11876 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11877 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11878
11879 switch (pVCpu->iem.s.enmEffAddrMode)
11880 {
11881 case IEMMODE_16BIT:
11882 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
11883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11884 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11885 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11886 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11887 } IEM_MC_ELSE() {
11888 IEM_MC_ADVANCE_RIP_AND_FINISH();
11889 } IEM_MC_ENDIF();
11890 IEM_MC_END();
11891 break;
11892
11893 case IEMMODE_32BIT:
11894 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
11895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11896 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11897 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11898 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11899 } IEM_MC_ELSE() {
11900 IEM_MC_ADVANCE_RIP_AND_FINISH();
11901 } IEM_MC_ENDIF();
11902 IEM_MC_END();
11903 break;
11904
11905 case IEMMODE_64BIT:
11906 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
11907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11908 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
11909 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11910 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11911 } IEM_MC_ELSE() {
11912 IEM_MC_ADVANCE_RIP_AND_FINISH();
11913 } IEM_MC_ENDIF();
11914 IEM_MC_END();
11915 break;
11916
11917 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11918 }
11919}
11920
11921
11922/**
11923 * @opcode 0xe1
11924 */
11925FNIEMOP_DEF(iemOp_loope_Jb)
11926{
11927 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
11928 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11929 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11930
11931 switch (pVCpu->iem.s.enmEffAddrMode)
11932 {
11933 case IEMMODE_16BIT:
11934 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
11935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11936 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11937 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11938 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11939 } IEM_MC_ELSE() {
11940 IEM_MC_ADVANCE_RIP_AND_FINISH();
11941 } IEM_MC_ENDIF();
11942 IEM_MC_END();
11943 break;
11944
11945 case IEMMODE_32BIT:
11946 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
11947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11948 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11949 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11950 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11951 } IEM_MC_ELSE() {
11952 IEM_MC_ADVANCE_RIP_AND_FINISH();
11953 } IEM_MC_ENDIF();
11954 IEM_MC_END();
11955 break;
11956
11957 case IEMMODE_64BIT:
11958 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
11959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11960 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
11961 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11962 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11963 } IEM_MC_ELSE() {
11964 IEM_MC_ADVANCE_RIP_AND_FINISH();
11965 } IEM_MC_ENDIF();
11966 IEM_MC_END();
11967 break;
11968
11969 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11970 }
11971}
11972
11973
11974/**
11975 * @opcode 0xe2
11976 */
11977FNIEMOP_DEF(iemOp_loop_Jb)
11978{
11979 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
11980 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11981 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11982
11983 /** @todo Check out the \#GP case if EIP < CS.Base or EIP > CS.Limit when
11984 * using the 32-bit operand size override. How can that be restarted? See
11985 * weird pseudo code in intel manual. */
11986
11987 /* NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
11988 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
11989 * the loop causes guest crashes, but when logging it's nice to skip a few million
11990 * lines of useless output. */
11991#if defined(LOG_ENABLED)
11992 if ((LogIs3Enabled() || LogIs4Enabled()) && -(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm)
11993 switch (pVCpu->iem.s.enmEffAddrMode)
11994 {
11995 case IEMMODE_16BIT:
11996 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
11997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11998 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
11999 IEM_MC_ADVANCE_RIP_AND_FINISH();
12000 IEM_MC_END();
12001 break;
12002
12003 case IEMMODE_32BIT:
12004 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12006 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
12007 IEM_MC_ADVANCE_RIP_AND_FINISH();
12008 IEM_MC_END();
12009 break;
12010
12011 case IEMMODE_64BIT:
12012 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12013 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12014 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
12015 IEM_MC_ADVANCE_RIP_AND_FINISH();
12016 IEM_MC_END();
12017 break;
12018
12019 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12020 }
12021#endif
12022
12023 switch (pVCpu->iem.s.enmEffAddrMode)
12024 {
12025 case IEMMODE_16BIT:
12026 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12028 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
12029 IEM_MC_IF_CX_IS_NZ() {
12030 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12031 } IEM_MC_ELSE() {
12032 IEM_MC_ADVANCE_RIP_AND_FINISH();
12033 } IEM_MC_ENDIF();
12034 IEM_MC_END();
12035 break;
12036
12037 case IEMMODE_32BIT:
12038 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12040 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
12041 IEM_MC_IF_ECX_IS_NZ() {
12042 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12043 } IEM_MC_ELSE() {
12044 IEM_MC_ADVANCE_RIP_AND_FINISH();
12045 } IEM_MC_ENDIF();
12046 IEM_MC_END();
12047 break;
12048
12049 case IEMMODE_64BIT:
12050 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12052 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
12053 IEM_MC_IF_RCX_IS_NZ() {
12054 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12055 } IEM_MC_ELSE() {
12056 IEM_MC_ADVANCE_RIP_AND_FINISH();
12057 } IEM_MC_ENDIF();
12058 IEM_MC_END();
12059 break;
12060
12061 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12062 }
12063}
12064
12065
12066/**
12067 * @opcode 0xe3
12068 */
12069FNIEMOP_DEF(iemOp_jecxz_Jb)
12070{
12071 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
12072 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12073 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12074
12075 switch (pVCpu->iem.s.enmEffAddrMode)
12076 {
12077 case IEMMODE_16BIT:
12078 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12080 IEM_MC_IF_CX_IS_NZ() {
12081 IEM_MC_ADVANCE_RIP_AND_FINISH();
12082 } IEM_MC_ELSE() {
12083 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12084 } IEM_MC_ENDIF();
12085 IEM_MC_END();
12086 break;
12087
12088 case IEMMODE_32BIT:
12089 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12091 IEM_MC_IF_ECX_IS_NZ() {
12092 IEM_MC_ADVANCE_RIP_AND_FINISH();
12093 } IEM_MC_ELSE() {
12094 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12095 } IEM_MC_ENDIF();
12096 IEM_MC_END();
12097 break;
12098
12099 case IEMMODE_64BIT:
12100 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12102 IEM_MC_IF_RCX_IS_NZ() {
12103 IEM_MC_ADVANCE_RIP_AND_FINISH();
12104 } IEM_MC_ELSE() {
12105 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12106 } IEM_MC_ENDIF();
12107 IEM_MC_END();
12108 break;
12109
12110 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12111 }
12112}
12113
12114
12115/** Opcode 0xe4 */
12116FNIEMOP_DEF(iemOp_in_AL_Ib)
12117{
12118 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
12119 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12121 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12122 iemCImpl_in, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12123}
12124
12125
12126/** Opcode 0xe5 */
12127FNIEMOP_DEF(iemOp_in_eAX_Ib)
12128{
12129 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
12130 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12131 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12132 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12133 iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12134 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12135}
12136
12137
12138/** Opcode 0xe6 */
12139FNIEMOP_DEF(iemOp_out_Ib_AL)
12140{
12141 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
12142 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12144 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12145 iemCImpl_out, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12146}
12147
12148
12149/** Opcode 0xe7 */
12150FNIEMOP_DEF(iemOp_out_Ib_eAX)
12151{
12152 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
12153 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12155 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12156 iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12157 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12158}
12159
12160
12161/**
12162 * @opcode 0xe8
12163 */
12164FNIEMOP_DEF(iemOp_call_Jv)
12165{
12166 IEMOP_MNEMONIC(call_Jv, "call Jv");
12167 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12168 switch (pVCpu->iem.s.enmEffOpSize)
12169 {
12170 case IEMMODE_16BIT:
12171 {
12172 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12173 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE, iemCImpl_call_rel_16, (int16_t)u16Imm);
12174 }
12175
12176 case IEMMODE_32BIT:
12177 {
12178 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12179 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE, iemCImpl_call_rel_32, (int32_t)u32Imm);
12180 }
12181
12182 case IEMMODE_64BIT:
12183 {
12184 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12185 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE, iemCImpl_call_rel_64, u64Imm);
12186 }
12187
12188 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12189 }
12190}
12191
12192
12193/**
12194 * @opcode 0xe9
12195 */
12196FNIEMOP_DEF(iemOp_jmp_Jv)
12197{
12198 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
12199 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12200 switch (pVCpu->iem.s.enmEffOpSize)
12201 {
12202 case IEMMODE_16BIT:
12203 IEM_MC_BEGIN(0, 0, 0, 0);
12204 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
12205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12206 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
12207 IEM_MC_END();
12208 break;
12209
12210 case IEMMODE_64BIT:
12211 case IEMMODE_32BIT:
12212 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12213 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
12214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12215 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
12216 IEM_MC_END();
12217 break;
12218
12219 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12220 }
12221}
12222
12223
12224/**
12225 * @opcode 0xea
12226 */
12227FNIEMOP_DEF(iemOp_jmp_Ap)
12228{
12229 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
12230 IEMOP_HLP_NO_64BIT();
12231
12232 /* Decode the far pointer address and pass it on to the far call C implementation. */
12233 uint32_t off32Seg;
12234 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
12235 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
12236 else
12237 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
12238 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
12239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12240 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
12241 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
12242 iemCImpl_FarJmp, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
12243}
12244
12245
12246/**
12247 * @opcode 0xeb
12248 */
12249FNIEMOP_DEF(iemOp_jmp_Jb)
12250{
12251 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
12252 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12253 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12254
12255 IEM_MC_BEGIN(0, 0, 0, 0);
12256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12257 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12258 IEM_MC_END();
12259}
12260
12261
12262/** Opcode 0xec */
12263FNIEMOP_DEF(iemOp_in_AL_DX)
12264{
12265 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
12266 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12267 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12268 iemCImpl_in_eAX_DX, 1, pVCpu->iem.s.enmEffAddrMode);
12269}
12270
12271
12272/** Opcode 0xed */
12273FNIEMOP_DEF(iemOp_in_eAX_DX)
12274{
12275 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
12276 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12277 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12278 iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12279 pVCpu->iem.s.enmEffAddrMode);
12280}
12281
12282
12283/** Opcode 0xee */
12284FNIEMOP_DEF(iemOp_out_DX_AL)
12285{
12286 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
12287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12288 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12289 iemCImpl_out_DX_eAX, 1, pVCpu->iem.s.enmEffAddrMode);
12290}
12291
12292
12293/** Opcode 0xef */
12294FNIEMOP_DEF(iemOp_out_DX_eAX)
12295{
12296 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
12297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12298 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12299 iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12300 pVCpu->iem.s.enmEffAddrMode);
12301}
12302
12303
12304/**
12305 * @opcode 0xf0
12306 */
12307FNIEMOP_DEF(iemOp_lock)
12308{
12309 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
12310 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12311 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
12312
12313 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12314 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12315}
12316
12317
12318/**
12319 * @opcode 0xf1
12320 */
12321FNIEMOP_DEF(iemOp_int1)
12322{
12323 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
12324 /** @todo Does not generate \#UD on 286, or so they say... Was allegedly a
12325 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
12326 * LOADALL memo. Needs some testing. */
12327 IEMOP_HLP_MIN_386();
12328 /** @todo testcase! */
12329 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
12330 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
12331 iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
12332}
12333
12334
12335/**
12336 * @opcode 0xf2
12337 */
12338FNIEMOP_DEF(iemOp_repne)
12339{
12340 /* This overrides any previous REPE prefix. */
12341 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
12342 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
12343 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
12344
12345 /* For the 4 entry opcode tables, REPNZ overrides any previous
12346 REPZ and operand size prefixes. */
12347 pVCpu->iem.s.idxPrefix = 3;
12348
12349 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12350 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12351}
12352
12353
12354/**
12355 * @opcode 0xf3
12356 */
12357FNIEMOP_DEF(iemOp_repe)
12358{
12359 /* This overrides any previous REPNE prefix. */
12360 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
12361 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
12362 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
12363
12364 /* For the 4 entry opcode tables, REPNZ overrides any previous
12365 REPNZ and operand size prefixes. */
12366 pVCpu->iem.s.idxPrefix = 2;
12367
12368 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12369 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12370}
12371
12372
12373/**
12374 * @opcode 0xf4
12375 */
12376FNIEMOP_DEF(iemOp_hlt)
12377{
12378 IEMOP_MNEMONIC(hlt, "hlt");
12379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12380 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, iemCImpl_hlt);
12381}
12382
12383
12384/**
12385 * @opcode 0xf5
12386 */
12387FNIEMOP_DEF(iemOp_cmc)
12388{
12389 IEMOP_MNEMONIC(cmc, "cmc");
12390 IEM_MC_BEGIN(0, 0, 0, 0);
12391 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12392 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
12393 IEM_MC_ADVANCE_RIP_AND_FINISH();
12394 IEM_MC_END();
12395}
12396
12397
12398/**
12399 * Body for of 'inc/dec/not/neg Eb'.
12400 */
12401#define IEMOP_BODY_UNARY_Eb(a_bRm, a_fnNormalU8, a_fnLockedU8) \
12402 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
12403 { \
12404 /* register access */ \
12405 IEM_MC_BEGIN(2, 0, 0, 0); \
12406 IEMOP_HLP_DONE_DECODING(); \
12407 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12408 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12409 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
12410 IEM_MC_REF_EFLAGS(pEFlags); \
12411 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
12412 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12413 IEM_MC_END(); \
12414 } \
12415 else \
12416 { \
12417 /* memory access. */ \
12418 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
12419 { \
12420 IEM_MC_BEGIN(2, 2, 0, 0); \
12421 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12422 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12423 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12424 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12425 \
12426 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
12427 IEMOP_HLP_DONE_DECODING(); \
12428 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12429 IEM_MC_FETCH_EFLAGS(EFlags); \
12430 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
12431 \
12432 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
12433 IEM_MC_COMMIT_EFLAGS(EFlags); \
12434 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12435 IEM_MC_END(); \
12436 } \
12437 else \
12438 { \
12439 IEM_MC_BEGIN(2, 2, 0, 0); \
12440 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12441 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12442 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12443 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12444 \
12445 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
12446 IEMOP_HLP_DONE_DECODING(); \
12447 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12448 IEM_MC_FETCH_EFLAGS(EFlags); \
12449 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU8, pu8Dst, pEFlags); \
12450 \
12451 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
12452 IEM_MC_COMMIT_EFLAGS(EFlags); \
12453 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12454 IEM_MC_END(); \
12455 } \
12456 } \
12457 (void)0
12458
12459
12460/**
12461 * Body for 'inc/dec/not/neg Ev' (groups 3 and 5).
12462 */
12463#define IEMOP_BODY_UNARY_Ev(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
12464 if (IEM_IS_MODRM_REG_MODE(bRm)) \
12465 { \
12466 /* \
12467 * Register target \
12468 */ \
12469 switch (pVCpu->iem.s.enmEffOpSize) \
12470 { \
12471 case IEMMODE_16BIT: \
12472 IEM_MC_BEGIN(2, 0, 0, 0); \
12473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12474 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
12475 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12476 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
12477 IEM_MC_REF_EFLAGS(pEFlags); \
12478 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
12479 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12480 IEM_MC_END(); \
12481 break; \
12482 \
12483 case IEMMODE_32BIT: \
12484 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0); \
12485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12486 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
12487 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12488 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
12489 IEM_MC_REF_EFLAGS(pEFlags); \
12490 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
12491 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
12492 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12493 IEM_MC_END(); \
12494 break; \
12495 \
12496 case IEMMODE_64BIT: \
12497 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0); \
12498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12499 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
12500 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12501 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
12502 IEM_MC_REF_EFLAGS(pEFlags); \
12503 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
12504 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12505 IEM_MC_END(); \
12506 break; \
12507 \
12508 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12509 } \
12510 } \
12511 else \
12512 { \
12513 /* \
12514 * Memory target. \
12515 */ \
12516 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
12517 { \
12518 switch (pVCpu->iem.s.enmEffOpSize) \
12519 { \
12520 case IEMMODE_16BIT: \
12521 IEM_MC_BEGIN(2, 3, 0, 0); \
12522 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
12523 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12524 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12525 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12526 \
12527 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12529 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12530 IEM_MC_FETCH_EFLAGS(EFlags); \
12531 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
12532 \
12533 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
12534 IEM_MC_COMMIT_EFLAGS(EFlags); \
12535 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12536 IEM_MC_END(); \
12537 break; \
12538 \
12539 case IEMMODE_32BIT: \
12540 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386, 0); \
12541 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
12542 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12543 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12544 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12545 \
12546 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12548 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12549 IEM_MC_FETCH_EFLAGS(EFlags); \
12550 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
12551 \
12552 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
12553 IEM_MC_COMMIT_EFLAGS(EFlags); \
12554 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12555 IEM_MC_END(); \
12556 break; \
12557 \
12558 case IEMMODE_64BIT: \
12559 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT, 0); \
12560 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
12561 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12562 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12563 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12564 \
12565 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12567 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12568 IEM_MC_FETCH_EFLAGS(EFlags); \
12569 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
12570 \
12571 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
12572 IEM_MC_COMMIT_EFLAGS(EFlags); \
12573 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12574 IEM_MC_END(); \
12575 break; \
12576 \
12577 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12578 } \
12579 } \
12580 else \
12581 { \
12582 (void)0
12583
12584#define IEMOP_BODY_UNARY_Ev_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
12585 switch (pVCpu->iem.s.enmEffOpSize) \
12586 { \
12587 case IEMMODE_16BIT: \
12588 IEM_MC_BEGIN(2, 3, 0, 0); \
12589 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
12590 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12591 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12592 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12593 \
12594 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12595 IEMOP_HLP_DONE_DECODING(); \
12596 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12597 IEM_MC_FETCH_EFLAGS(EFlags); \
12598 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU16, pu16Dst, pEFlags); \
12599 \
12600 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
12601 IEM_MC_COMMIT_EFLAGS(EFlags); \
12602 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12603 IEM_MC_END(); \
12604 break; \
12605 \
12606 case IEMMODE_32BIT: \
12607 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386, 0); \
12608 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
12609 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12610 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12611 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12612 \
12613 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12614 IEMOP_HLP_DONE_DECODING(); \
12615 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12616 IEM_MC_FETCH_EFLAGS(EFlags); \
12617 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU32, pu32Dst, pEFlags); \
12618 \
12619 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
12620 IEM_MC_COMMIT_EFLAGS(EFlags); \
12621 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12622 IEM_MC_END(); \
12623 break; \
12624 \
12625 case IEMMODE_64BIT: \
12626 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT, 0); \
12627 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
12628 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12629 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12630 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12631 \
12632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12633 IEMOP_HLP_DONE_DECODING(); \
12634 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12635 IEM_MC_FETCH_EFLAGS(EFlags); \
12636 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU64, pu64Dst, pEFlags); \
12637 \
12638 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
12639 IEM_MC_COMMIT_EFLAGS(EFlags); \
12640 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12641 IEM_MC_END(); \
12642 break; \
12643 \
12644 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12645 } \
12646 } \
12647 } \
12648 (void)0
12649
12650
12651/**
12652 * @opmaps grp3_f6
12653 * @opcode /0
12654 * @todo also /1
12655 */
12656FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
12657{
12658 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
12659 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12660
12661 if (IEM_IS_MODRM_REG_MODE(bRm))
12662 {
12663 /* register access */
12664 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12665 IEM_MC_BEGIN(3, 0, 0, 0);
12666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12667 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12668 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
12669 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12670 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12671 IEM_MC_REF_EFLAGS(pEFlags);
12672 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
12673 IEM_MC_ADVANCE_RIP_AND_FINISH();
12674 IEM_MC_END();
12675 }
12676 else
12677 {
12678 /* memory access. */
12679 IEM_MC_BEGIN(3, 3, 0, 0);
12680 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12681 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12682
12683 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12685
12686 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12687 IEM_MC_ARG(uint8_t const *, pu8Dst, 0);
12688 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12689
12690 IEM_MC_ARG_CONST(uint8_t, u8Src, u8Imm, 1);
12691 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
12692 IEM_MC_FETCH_EFLAGS(EFlags);
12693 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
12694
12695 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu8Dst, bUnmapInfo);
12696 IEM_MC_COMMIT_EFLAGS(EFlags);
12697 IEM_MC_ADVANCE_RIP_AND_FINISH();
12698 IEM_MC_END();
12699 }
12700}
12701
12702
12703/** Opcode 0xf6 /4, /5, /6 and /7. */
12704FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
12705{
12706 if (IEM_IS_MODRM_REG_MODE(bRm))
12707 {
12708 /* register access */
12709 IEM_MC_BEGIN(3, 1, 0, 0);
12710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12711 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12712 IEM_MC_ARG(uint8_t, u8Value, 1);
12713 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12714 IEM_MC_LOCAL(int32_t, rc);
12715
12716 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12717 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12718 IEM_MC_REF_EFLAGS(pEFlags);
12719 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
12720 IEM_MC_IF_LOCAL_IS_Z(rc) {
12721 IEM_MC_ADVANCE_RIP_AND_FINISH();
12722 } IEM_MC_ELSE() {
12723 IEM_MC_RAISE_DIVIDE_ERROR();
12724 } IEM_MC_ENDIF();
12725
12726 IEM_MC_END();
12727 }
12728 else
12729 {
12730 /* memory access. */
12731 IEM_MC_BEGIN(3, 2, 0, 0);
12732 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12733 IEM_MC_ARG(uint8_t, u8Value, 1);
12734 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12735 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12736 IEM_MC_LOCAL(int32_t, rc);
12737
12738 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12740 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12741 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12742 IEM_MC_REF_EFLAGS(pEFlags);
12743 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
12744 IEM_MC_IF_LOCAL_IS_Z(rc) {
12745 IEM_MC_ADVANCE_RIP_AND_FINISH();
12746 } IEM_MC_ELSE() {
12747 IEM_MC_RAISE_DIVIDE_ERROR();
12748 } IEM_MC_ENDIF();
12749
12750 IEM_MC_END();
12751 }
12752}
12753
12754
12755/** Opcode 0xf7 /4, /5, /6 and /7. */
12756FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
12757{
12758 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12759
12760 if (IEM_IS_MODRM_REG_MODE(bRm))
12761 {
12762 /* register access */
12763 switch (pVCpu->iem.s.enmEffOpSize)
12764 {
12765 case IEMMODE_16BIT:
12766 IEM_MC_BEGIN(4, 1, 0, 0);
12767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12768 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12769 IEM_MC_ARG(uint16_t *, pu16DX, 1);
12770 IEM_MC_ARG(uint16_t, u16Value, 2);
12771 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12772 IEM_MC_LOCAL(int32_t, rc);
12773
12774 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12775 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12776 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
12777 IEM_MC_REF_EFLAGS(pEFlags);
12778 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
12779 IEM_MC_IF_LOCAL_IS_Z(rc) {
12780 IEM_MC_ADVANCE_RIP_AND_FINISH();
12781 } IEM_MC_ELSE() {
12782 IEM_MC_RAISE_DIVIDE_ERROR();
12783 } IEM_MC_ENDIF();
12784
12785 IEM_MC_END();
12786 break;
12787
12788 case IEMMODE_32BIT:
12789 IEM_MC_BEGIN(4, 1, IEM_MC_F_MIN_386, 0);
12790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12791 IEM_MC_ARG(uint32_t *, pu32AX, 0);
12792 IEM_MC_ARG(uint32_t *, pu32DX, 1);
12793 IEM_MC_ARG(uint32_t, u32Value, 2);
12794 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12795 IEM_MC_LOCAL(int32_t, rc);
12796
12797 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12798 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
12799 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
12800 IEM_MC_REF_EFLAGS(pEFlags);
12801 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
12802 IEM_MC_IF_LOCAL_IS_Z(rc) {
12803 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX);
12804 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX);
12805 IEM_MC_ADVANCE_RIP_AND_FINISH();
12806 } IEM_MC_ELSE() {
12807 IEM_MC_RAISE_DIVIDE_ERROR();
12808 } IEM_MC_ENDIF();
12809
12810 IEM_MC_END();
12811 break;
12812
12813 case IEMMODE_64BIT:
12814 IEM_MC_BEGIN(4, 1, IEM_MC_F_64BIT, 0);
12815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12816 IEM_MC_ARG(uint64_t *, pu64AX, 0);
12817 IEM_MC_ARG(uint64_t *, pu64DX, 1);
12818 IEM_MC_ARG(uint64_t, u64Value, 2);
12819 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12820 IEM_MC_LOCAL(int32_t, rc);
12821
12822 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12823 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
12824 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
12825 IEM_MC_REF_EFLAGS(pEFlags);
12826 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
12827 IEM_MC_IF_LOCAL_IS_Z(rc) {
12828 IEM_MC_ADVANCE_RIP_AND_FINISH();
12829 } IEM_MC_ELSE() {
12830 IEM_MC_RAISE_DIVIDE_ERROR();
12831 } IEM_MC_ENDIF();
12832
12833 IEM_MC_END();
12834 break;
12835
12836 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12837 }
12838 }
12839 else
12840 {
12841 /* memory access. */
12842 switch (pVCpu->iem.s.enmEffOpSize)
12843 {
12844 case IEMMODE_16BIT:
12845 IEM_MC_BEGIN(4, 2, 0, 0);
12846 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12847 IEM_MC_ARG(uint16_t *, pu16DX, 1);
12848 IEM_MC_ARG(uint16_t, u16Value, 2);
12849 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12850 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12851 IEM_MC_LOCAL(int32_t, rc);
12852
12853 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12855 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12856 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12857 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
12858 IEM_MC_REF_EFLAGS(pEFlags);
12859 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
12860 IEM_MC_IF_LOCAL_IS_Z(rc) {
12861 IEM_MC_ADVANCE_RIP_AND_FINISH();
12862 } IEM_MC_ELSE() {
12863 IEM_MC_RAISE_DIVIDE_ERROR();
12864 } IEM_MC_ENDIF();
12865
12866 IEM_MC_END();
12867 break;
12868
12869 case IEMMODE_32BIT:
12870 IEM_MC_BEGIN(4, 2, IEM_MC_F_MIN_386, 0);
12871 IEM_MC_ARG(uint32_t *, pu32AX, 0);
12872 IEM_MC_ARG(uint32_t *, pu32DX, 1);
12873 IEM_MC_ARG(uint32_t, u32Value, 2);
12874 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12875 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12876 IEM_MC_LOCAL(int32_t, rc);
12877
12878 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12879 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12880 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12881 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
12882 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
12883 IEM_MC_REF_EFLAGS(pEFlags);
12884 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
12885 IEM_MC_IF_LOCAL_IS_Z(rc) {
12886 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX);
12887 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX);
12888 IEM_MC_ADVANCE_RIP_AND_FINISH();
12889 } IEM_MC_ELSE() {
12890 IEM_MC_RAISE_DIVIDE_ERROR();
12891 } IEM_MC_ENDIF();
12892
12893 IEM_MC_END();
12894 break;
12895
12896 case IEMMODE_64BIT:
12897 IEM_MC_BEGIN(4, 2, IEM_MC_F_64BIT, 0);
12898 IEM_MC_ARG(uint64_t *, pu64AX, 0);
12899 IEM_MC_ARG(uint64_t *, pu64DX, 1);
12900 IEM_MC_ARG(uint64_t, u64Value, 2);
12901 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12902 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12903 IEM_MC_LOCAL(int32_t, rc);
12904
12905 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12906 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12907 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12908 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
12909 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
12910 IEM_MC_REF_EFLAGS(pEFlags);
12911 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
12912 IEM_MC_IF_LOCAL_IS_Z(rc) {
12913 IEM_MC_ADVANCE_RIP_AND_FINISH();
12914 } IEM_MC_ELSE() {
12915 IEM_MC_RAISE_DIVIDE_ERROR();
12916 } IEM_MC_ENDIF();
12917
12918 IEM_MC_END();
12919 break;
12920
12921 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12922 }
12923 }
12924}
12925
12926
12927/**
12928 * @opmaps grp3_f6
12929 * @opcode /2
12930 */
12931FNIEMOP_DEF_1(iemOp_grp3_not_Eb, uint8_t, bRm)
12932{
12933 IEMOP_MNEMONIC(not_Eb, "not Eb");
12934 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_not_u8, iemAImpl_not_u8_locked);
12935}
12936
12937
12938/**
12939 * @opmaps grp3_f6
12940 * @opcode /3
12941 */
12942FNIEMOP_DEF_1(iemOp_grp3_neg_Eb, uint8_t, bRm)
12943{
12944 IEMOP_MNEMONIC(net_Eb, "neg Eb");
12945 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_neg_u8, iemAImpl_neg_u8_locked);
12946}
12947
12948
12949/**
12950 * @opcode 0xf6
12951 */
12952FNIEMOP_DEF(iemOp_Grp3_Eb)
12953{
12954 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12955 switch (IEM_GET_MODRM_REG_8(bRm))
12956 {
12957 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
12958 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
12959 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Eb, bRm);
12960 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Eb, bRm);
12961 case 4:
12962 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
12963 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12964 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
12965 case 5:
12966 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
12967 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12968 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
12969 case 6:
12970 IEMOP_MNEMONIC(div_Eb, "div Eb");
12971 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12972 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
12973 case 7:
12974 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
12975 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12976 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
12977 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12978 }
12979}
12980
12981
12982/** Opcode 0xf7 /0. */
12983FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
12984{
12985 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
12986 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12987
12988 if (IEM_IS_MODRM_REG_MODE(bRm))
12989 {
12990 /* register access */
12991 switch (pVCpu->iem.s.enmEffOpSize)
12992 {
12993 case IEMMODE_16BIT:
12994 IEM_MC_BEGIN(3, 0, 0, 0);
12995 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12997 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12998 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
12999 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13000 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13001 IEM_MC_REF_EFLAGS(pEFlags);
13002 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
13003 IEM_MC_ADVANCE_RIP_AND_FINISH();
13004 IEM_MC_END();
13005 break;
13006
13007 case IEMMODE_32BIT:
13008 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
13009 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13011 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13012 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
13013 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13014 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13015 IEM_MC_REF_EFLAGS(pEFlags);
13016 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
13017 /* No clearing the high dword here - test doesn't write back the result. */
13018 IEM_MC_ADVANCE_RIP_AND_FINISH();
13019 IEM_MC_END();
13020 break;
13021
13022 case IEMMODE_64BIT:
13023 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
13024 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13026 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13027 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
13028 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13029 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13030 IEM_MC_REF_EFLAGS(pEFlags);
13031 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
13032 IEM_MC_ADVANCE_RIP_AND_FINISH();
13033 IEM_MC_END();
13034 break;
13035
13036 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13037 }
13038 }
13039 else
13040 {
13041 /* memory access. */
13042 switch (pVCpu->iem.s.enmEffOpSize)
13043 {
13044 case IEMMODE_16BIT:
13045 IEM_MC_BEGIN(3, 3, 0, 0);
13046 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13047 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
13048
13049 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13051
13052 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13053 IEM_MC_ARG(uint16_t const *, pu16Dst, 0);
13054 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13055
13056 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
13057 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13058 IEM_MC_FETCH_EFLAGS(EFlags);
13059 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
13060
13061 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo);
13062 IEM_MC_COMMIT_EFLAGS(EFlags);
13063 IEM_MC_ADVANCE_RIP_AND_FINISH();
13064 IEM_MC_END();
13065 break;
13066
13067 case IEMMODE_32BIT:
13068 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
13069 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13070 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13071
13072 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13073 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13074
13075 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13076 IEM_MC_ARG(uint32_t const *, pu32Dst, 0);
13077 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13078
13079 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
13080 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13081 IEM_MC_FETCH_EFLAGS(EFlags);
13082 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
13083
13084 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo);
13085 IEM_MC_COMMIT_EFLAGS(EFlags);
13086 IEM_MC_ADVANCE_RIP_AND_FINISH();
13087 IEM_MC_END();
13088 break;
13089
13090 case IEMMODE_64BIT:
13091 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
13092 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13093 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13094
13095 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13097
13098 IEM_MC_ARG(uint64_t const *, pu64Dst, 0);
13099 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13100 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13101
13102 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1);
13103 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13104 IEM_MC_FETCH_EFLAGS(EFlags);
13105 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
13106
13107 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo);
13108 IEM_MC_COMMIT_EFLAGS(EFlags);
13109 IEM_MC_ADVANCE_RIP_AND_FINISH();
13110 IEM_MC_END();
13111 break;
13112
13113 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13114 }
13115 }
13116}
13117
13118
13119/** Opcode 0xf7 /2. */
13120FNIEMOP_DEF_1(iemOp_grp3_not_Ev, uint8_t, bRm)
13121{
13122 IEMOP_MNEMONIC(not_Ev, "not Ev");
13123 IEMOP_BODY_UNARY_Ev( iemAImpl_not_u16, iemAImpl_not_u32, iemAImpl_not_u64);
13124 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_not_u16_locked, iemAImpl_not_u32_locked, iemAImpl_not_u64_locked);
13125}
13126
13127
13128/** Opcode 0xf7 /3. */
13129FNIEMOP_DEF_1(iemOp_grp3_neg_Ev, uint8_t, bRm)
13130{
13131 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
13132 IEMOP_BODY_UNARY_Ev( iemAImpl_neg_u16, iemAImpl_neg_u32, iemAImpl_neg_u64);
13133 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_neg_u16_locked, iemAImpl_neg_u32_locked, iemAImpl_neg_u64_locked);
13134}
13135
13136
13137/**
13138 * @opcode 0xf7
13139 */
13140FNIEMOP_DEF(iemOp_Grp3_Ev)
13141{
13142 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13143 switch (IEM_GET_MODRM_REG_8(bRm))
13144 {
13145 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
13146 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
13147 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Ev, bRm);
13148 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Ev, bRm);
13149 case 4:
13150 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
13151 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13152 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
13153 case 5:
13154 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
13155 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13156 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
13157 case 6:
13158 IEMOP_MNEMONIC(div_Ev, "div Ev");
13159 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13160 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
13161 case 7:
13162 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
13163 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13164 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
13165 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13166 }
13167}
13168
13169
13170/**
13171 * @opcode 0xf8
13172 */
13173FNIEMOP_DEF(iemOp_clc)
13174{
13175 IEMOP_MNEMONIC(clc, "clc");
13176 IEM_MC_BEGIN(0, 0, 0, 0);
13177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13178 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
13179 IEM_MC_ADVANCE_RIP_AND_FINISH();
13180 IEM_MC_END();
13181}
13182
13183
13184/**
13185 * @opcode 0xf9
13186 */
13187FNIEMOP_DEF(iemOp_stc)
13188{
13189 IEMOP_MNEMONIC(stc, "stc");
13190 IEM_MC_BEGIN(0, 0, 0, 0);
13191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13192 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
13193 IEM_MC_ADVANCE_RIP_AND_FINISH();
13194 IEM_MC_END();
13195}
13196
13197
13198/**
13199 * @opcode 0xfa
13200 */
13201FNIEMOP_DEF(iemOp_cli)
13202{
13203 IEMOP_MNEMONIC(cli, "cli");
13204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13205 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_CHECK_IRQ_BEFORE, iemCImpl_cli);
13206}
13207
13208
13209FNIEMOP_DEF(iemOp_sti)
13210{
13211 IEMOP_MNEMONIC(sti, "sti");
13212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13213 IEM_MC_DEFER_TO_CIMPL_0_RET( IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_AFTER
13214 | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_INHIBIT_SHADOW, iemCImpl_sti);
13215}
13216
13217
13218/**
13219 * @opcode 0xfc
13220 */
13221FNIEMOP_DEF(iemOp_cld)
13222{
13223 IEMOP_MNEMONIC(cld, "cld");
13224 IEM_MC_BEGIN(0, 0, 0, 0);
13225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13226 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
13227 IEM_MC_ADVANCE_RIP_AND_FINISH();
13228 IEM_MC_END();
13229}
13230
13231
13232/**
13233 * @opcode 0xfd
13234 */
13235FNIEMOP_DEF(iemOp_std)
13236{
13237 IEMOP_MNEMONIC(std, "std");
13238 IEM_MC_BEGIN(0, 0, 0, 0);
13239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13240 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
13241 IEM_MC_ADVANCE_RIP_AND_FINISH();
13242 IEM_MC_END();
13243}
13244
13245
13246/**
13247 * @opmaps grp4
13248 * @opcode /0
13249 */
13250FNIEMOP_DEF_1(iemOp_Grp4_inc_Eb, uint8_t, bRm)
13251{
13252 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
13253 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_inc_u8, iemAImpl_inc_u8_locked);
13254}
13255
13256
13257/**
13258 * @opmaps grp4
13259 * @opcode /1
13260 */
13261FNIEMOP_DEF_1(iemOp_Grp4_dec_Eb, uint8_t, bRm)
13262{
13263 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
13264 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_dec_u8, iemAImpl_dec_u8_locked);
13265}
13266
13267
13268/**
13269 * @opcode 0xfe
13270 */
13271FNIEMOP_DEF(iemOp_Grp4)
13272{
13273 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13274 switch (IEM_GET_MODRM_REG_8(bRm))
13275 {
13276 case 0: return FNIEMOP_CALL_1(iemOp_Grp4_inc_Eb, bRm);
13277 case 1: return FNIEMOP_CALL_1(iemOp_Grp4_dec_Eb, bRm);
13278 default:
13279 /** @todo is the eff-addr decoded? */
13280 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
13281 IEMOP_RAISE_INVALID_OPCODE_RET();
13282 }
13283}
13284
13285/** Opcode 0xff /0. */
13286FNIEMOP_DEF_1(iemOp_Grp5_inc_Ev, uint8_t, bRm)
13287{
13288 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
13289 IEMOP_BODY_UNARY_Ev( iemAImpl_inc_u16, iemAImpl_inc_u32, iemAImpl_inc_u64);
13290 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_inc_u16_locked, iemAImpl_inc_u32_locked, iemAImpl_inc_u64_locked);
13291}
13292
13293
13294/** Opcode 0xff /1. */
13295FNIEMOP_DEF_1(iemOp_Grp5_dec_Ev, uint8_t, bRm)
13296{
13297 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
13298 IEMOP_BODY_UNARY_Ev( iemAImpl_dec_u16, iemAImpl_dec_u32, iemAImpl_dec_u64);
13299 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_dec_u16_locked, iemAImpl_dec_u32_locked, iemAImpl_dec_u64_locked);
13300}
13301
13302
13303/**
13304 * Opcode 0xff /2.
13305 * @param bRm The RM byte.
13306 */
13307FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
13308{
13309 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
13310 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13311
13312 if (IEM_IS_MODRM_REG_MODE(bRm))
13313 {
13314 /* The new RIP is taken from a register. */
13315 switch (pVCpu->iem.s.enmEffOpSize)
13316 {
13317 case IEMMODE_16BIT:
13318 IEM_MC_BEGIN(1, 0, 0, 0);
13319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13320 IEM_MC_ARG(uint16_t, u16Target, 0);
13321 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13322 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_16, u16Target);
13323 IEM_MC_END();
13324 break;
13325
13326 case IEMMODE_32BIT:
13327 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_386, 0);
13328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13329 IEM_MC_ARG(uint32_t, u32Target, 0);
13330 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13331 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_32, u32Target);
13332 IEM_MC_END();
13333 break;
13334
13335 case IEMMODE_64BIT:
13336 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT, 0);
13337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13338 IEM_MC_ARG(uint64_t, u64Target, 0);
13339 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13340 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_64, u64Target);
13341 IEM_MC_END();
13342 break;
13343
13344 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13345 }
13346 }
13347 else
13348 {
13349 /* The new RIP is taken from a register. */
13350 switch (pVCpu->iem.s.enmEffOpSize)
13351 {
13352 case IEMMODE_16BIT:
13353 IEM_MC_BEGIN(1, 1, 0, 0);
13354 IEM_MC_ARG(uint16_t, u16Target, 0);
13355 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13356 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13358 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13359 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_16, u16Target);
13360 IEM_MC_END();
13361 break;
13362
13363 case IEMMODE_32BIT:
13364 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_386, 0);
13365 IEM_MC_ARG(uint32_t, u32Target, 0);
13366 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13368 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13369 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13370 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_32, u32Target);
13371 IEM_MC_END();
13372 break;
13373
13374 case IEMMODE_64BIT:
13375 IEM_MC_BEGIN(1, 1, IEM_MC_F_64BIT, 0);
13376 IEM_MC_ARG(uint64_t, u64Target, 0);
13377 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13378 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13380 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13381 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_64, u64Target);
13382 IEM_MC_END();
13383 break;
13384
13385 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13386 }
13387 }
13388}
13389
13390#define IEMOP_BODY_GRP5_FAR_EP(a_bRm, a_fnCImpl) \
13391 /* Registers? How?? */ \
13392 if (RT_LIKELY(IEM_IS_MODRM_MEM_MODE(a_bRm))) \
13393 { /* likely */ } \
13394 else \
13395 IEMOP_RAISE_INVALID_OPCODE_RET(); /* callf eax is not legal */ \
13396 \
13397 /* 64-bit mode: Default is 32-bit, but only intel respects a REX.W prefix. */ \
13398 /** @todo what does VIA do? */ \
13399 if (!IEM_IS_64BIT_CODE(pVCpu) || pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT || IEM_IS_GUEST_CPU_INTEL(pVCpu)) \
13400 { /* likely */ } \
13401 else \
13402 pVCpu->iem.s.enmEffOpSize = IEMMODE_32BIT; \
13403 \
13404 /* Far pointer loaded from memory. */ \
13405 switch (pVCpu->iem.s.enmEffOpSize) \
13406 { \
13407 case IEMMODE_16BIT: \
13408 IEM_MC_BEGIN(3, 1, 0, 0); \
13409 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13410 IEM_MC_ARG(uint16_t, offSeg, 1); \
13411 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2); \
13412 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13413 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13415 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13416 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2); \
13417 IEM_MC_HINT_FLUSH_GUEST_SHADOW_GREG(X86_GREG_xSP); \
13418 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR \
13419 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, \
13420 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13421 IEM_MC_END(); \
13422 break; \
13423 \
13424 case IEMMODE_32BIT: \
13425 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0); \
13426 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13427 IEM_MC_ARG(uint32_t, offSeg, 1); \
13428 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2); \
13429 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13430 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13432 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13433 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4); \
13434 IEM_MC_HINT_FLUSH_GUEST_SHADOW_GREG(X86_GREG_xSP); \
13435 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR \
13436 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, \
13437 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13438 IEM_MC_END(); \
13439 break; \
13440 \
13441 case IEMMODE_64BIT: \
13442 Assert(!IEM_IS_GUEST_CPU_AMD(pVCpu)); \
13443 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0); \
13444 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13445 IEM_MC_ARG(uint64_t, offSeg, 1); \
13446 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_64BIT, 2); \
13447 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13448 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13450 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13451 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8); \
13452 IEM_MC_HINT_FLUSH_GUEST_SHADOW_GREG(X86_GREG_xSP); \
13453 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_MODE /* no gates */, \
13454 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13455 IEM_MC_END(); \
13456 break; \
13457 \
13458 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13459 } do {} while (0)
13460
13461
13462/**
13463 * Opcode 0xff /3.
13464 * @param bRm The RM byte.
13465 */
13466FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
13467{
13468 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
13469 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_callf);
13470}
13471
13472
13473/**
13474 * Opcode 0xff /4.
13475 * @param bRm The RM byte.
13476 */
13477FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
13478{
13479 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
13480 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13481
13482 if (IEM_IS_MODRM_REG_MODE(bRm))
13483 {
13484 /* The new RIP is taken from a register. */
13485 switch (pVCpu->iem.s.enmEffOpSize)
13486 {
13487 case IEMMODE_16BIT:
13488 IEM_MC_BEGIN(0, 1, 0, 0);
13489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13490 IEM_MC_LOCAL(uint16_t, u16Target);
13491 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13492 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
13493 IEM_MC_END();
13494 break;
13495
13496 case IEMMODE_32BIT:
13497 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
13498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13499 IEM_MC_LOCAL(uint32_t, u32Target);
13500 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13501 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
13502 IEM_MC_END();
13503 break;
13504
13505 case IEMMODE_64BIT:
13506 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
13507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13508 IEM_MC_LOCAL(uint64_t, u64Target);
13509 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13510 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
13511 IEM_MC_END();
13512 break;
13513
13514 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13515 }
13516 }
13517 else
13518 {
13519 /* The new RIP is taken from a memory location. */
13520 switch (pVCpu->iem.s.enmEffOpSize)
13521 {
13522 case IEMMODE_16BIT:
13523 IEM_MC_BEGIN(0, 2, 0, 0);
13524 IEM_MC_LOCAL(uint16_t, u16Target);
13525 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13526 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13528 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13529 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
13530 IEM_MC_END();
13531 break;
13532
13533 case IEMMODE_32BIT:
13534 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
13535 IEM_MC_LOCAL(uint32_t, u32Target);
13536 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13537 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13539 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13540 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
13541 IEM_MC_END();
13542 break;
13543
13544 case IEMMODE_64BIT:
13545 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
13546 IEM_MC_LOCAL(uint64_t, u64Target);
13547 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13548 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13550 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13551 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
13552 IEM_MC_END();
13553 break;
13554
13555 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13556 }
13557 }
13558}
13559
13560
13561/**
13562 * Opcode 0xff /5.
13563 * @param bRm The RM byte.
13564 */
13565FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
13566{
13567 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
13568 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_FarJmp);
13569}
13570
13571
13572/**
13573 * Opcode 0xff /6.
13574 * @param bRm The RM byte.
13575 */
13576FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
13577{
13578 IEMOP_MNEMONIC(push_Ev, "push Ev");
13579
13580 /* Registers are handled by a common worker. */
13581 if (IEM_IS_MODRM_REG_MODE(bRm))
13582 return FNIEMOP_CALL_1(iemOpCommonPushGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
13583
13584 /* Memory we do here. */
13585 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13586 switch (pVCpu->iem.s.enmEffOpSize)
13587 {
13588 case IEMMODE_16BIT:
13589 IEM_MC_BEGIN(0, 2, 0, 0);
13590 IEM_MC_LOCAL(uint16_t, u16Src);
13591 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13592 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13594 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13595 IEM_MC_PUSH_U16(u16Src);
13596 IEM_MC_ADVANCE_RIP_AND_FINISH();
13597 IEM_MC_END();
13598 break;
13599
13600 case IEMMODE_32BIT:
13601 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
13602 IEM_MC_LOCAL(uint32_t, u32Src);
13603 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13605 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13606 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13607 IEM_MC_PUSH_U32(u32Src);
13608 IEM_MC_ADVANCE_RIP_AND_FINISH();
13609 IEM_MC_END();
13610 break;
13611
13612 case IEMMODE_64BIT:
13613 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
13614 IEM_MC_LOCAL(uint64_t, u64Src);
13615 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13616 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13618 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13619 IEM_MC_PUSH_U64(u64Src);
13620 IEM_MC_ADVANCE_RIP_AND_FINISH();
13621 IEM_MC_END();
13622 break;
13623
13624 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13625 }
13626}
13627
13628
13629/**
13630 * @opcode 0xff
13631 */
13632FNIEMOP_DEF(iemOp_Grp5)
13633{
13634 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13635 switch (IEM_GET_MODRM_REG_8(bRm))
13636 {
13637 case 0:
13638 return FNIEMOP_CALL_1(iemOp_Grp5_inc_Ev, bRm);
13639 case 1:
13640 return FNIEMOP_CALL_1(iemOp_Grp5_dec_Ev, bRm);
13641 case 2:
13642 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
13643 case 3:
13644 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
13645 case 4:
13646 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
13647 case 5:
13648 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
13649 case 6:
13650 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
13651 case 7:
13652 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
13653 IEMOP_RAISE_INVALID_OPCODE_RET();
13654 }
13655 AssertFailedReturn(VERR_IEM_IPE_3);
13656}
13657
13658
13659
13660const PFNIEMOP g_apfnOneByteMap[256] =
13661{
13662 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
13663 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
13664 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
13665 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
13666 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
13667 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
13668 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
13669 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
13670 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
13671 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
13672 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
13673 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
13674 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
13675 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
13676 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
13677 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
13678 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
13679 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
13680 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
13681 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
13682 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
13683 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
13684 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
13685 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
13686 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
13687 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
13688 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
13689 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
13690 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
13691 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
13692 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
13693 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
13694 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
13695 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
13696 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
13697 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
13698 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
13699 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
13700 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
13701 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
13702 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
13703 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
13704 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
13705 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
13706 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
13707 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
13708 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
13709 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
13710 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
13711 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
13712 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
13713 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
13714 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
13715 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
13716 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
13717 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
13718 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
13719 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
13720 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
13721 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
13722 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
13723 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
13724 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
13725 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
13726};
13727
13728
13729/** @} */
13730
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette