VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstOneByte.cpp.h@ 101504

Last change on this file since 101504 was 101484, checked in by vboxsync, 15 months ago

VMM/IEM: Basic register allocator sketches that incorporates simple skipping of guest register value loads. Sketched out varable and argument managmenet. Start telling GDB our jitted code to help with backtraces. ++ bugref:10371

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 490.9 KB
Line 
1/* $Id: IEMAllInstOneByte.cpp.h 101484 2023-10-18 01:32:17Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*******************************************************************************
30* Global Variables *
31*******************************************************************************/
32extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
33
34/* Instruction group definitions: */
35
36/** @defgroup og_gen General
37 * @{ */
38 /** @defgroup og_gen_arith Arithmetic
39 * @{ */
40 /** @defgroup og_gen_arith_bin Binary numbers */
41 /** @defgroup og_gen_arith_dec Decimal numbers */
42 /** @} */
43/** @} */
44
45/** @defgroup og_stack Stack
46 * @{ */
47 /** @defgroup og_stack_sreg Segment registers */
48/** @} */
49
50/** @defgroup og_prefix Prefixes */
51/** @defgroup og_escapes Escape bytes */
52
53
54
55/** @name One byte opcodes.
56 * @{
57 */
58
59/**
60 * Body for instructions like ADD, AND, OR, TEST, CMP, ++ with a byte
61 * memory/register as the destination.
62 *
63 * Used with IEMOP_BODY_BINARY_rm_r8_NO_LOCK or IEMOP_BODY_BINARY_rm_r8_LOCKED.
64 */
65#define IEMOP_BODY_BINARY_rm_r8_RW(a_fnNormalU8) \
66 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
67 \
68 /* \
69 * If rm is denoting a register, no more instruction bytes. \
70 */ \
71 if (IEM_IS_MODRM_REG_MODE(bRm)) \
72 { \
73 IEM_MC_BEGIN(3, 0, 0, 0); \
74 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
75 IEM_MC_ARG(uint8_t, u8Src, 1); \
76 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
77 \
78 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
79 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
80 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
81 IEM_MC_REF_EFLAGS(pEFlags); \
82 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
83 \
84 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
85 IEM_MC_END(); \
86 } \
87 else \
88 { \
89 /* \
90 * We're accessing memory. \
91 * Note! We're putting the eflags on the stack here so we can commit them \
92 * after the memory. \
93 */ \
94 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
95 { \
96 IEM_MC_BEGIN(3, 3, 0, 0); \
97 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
98 IEM_MC_ARG(uint8_t, u8Src, 1); \
99 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
101 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
102 \
103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
104 IEMOP_HLP_DONE_DECODING(); \
105 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
106 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
107 IEM_MC_FETCH_EFLAGS(EFlags); \
108 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
109 \
110 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
111 IEM_MC_COMMIT_EFLAGS(EFlags); \
112 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
113 IEM_MC_END(); \
114 } \
115 else \
116 { \
117 (void)0
118
119/**
120 * Body for instructions like TEST & CMP, ++ with a byte memory/registers as
121 * operands.
122 *
123 * Used with IEMOP_BODY_BINARY_rm_r8_NO_LOCK or IEMOP_BODY_BINARY_rm_r8_LOCKED.
124 */
125#define IEMOP_BODY_BINARY_rm_r8_RO(a_fnNormalU8) \
126 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
127 \
128 /* \
129 * If rm is denoting a register, no more instruction bytes. \
130 */ \
131 if (IEM_IS_MODRM_REG_MODE(bRm)) \
132 { \
133 IEM_MC_BEGIN(3, 0, 0, 0); \
134 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
135 IEM_MC_ARG(uint8_t, u8Src, 1); \
136 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
137 \
138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
139 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
140 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
141 IEM_MC_REF_EFLAGS(pEFlags); \
142 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
143 \
144 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
145 IEM_MC_END(); \
146 } \
147 else \
148 { \
149 /* \
150 * We're accessing memory. \
151 * Note! We're putting the eflags on the stack here so we can commit them \
152 * after the memory. \
153 */ \
154 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
155 { \
156 IEM_MC_BEGIN(3, 3, 0, 0); \
157 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
158 IEM_MC_ARG(uint8_t, u8Src, 1); \
159 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
160 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
161 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
162 \
163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
164 IEMOP_HLP_DONE_DECODING(); \
165 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
166 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
167 IEM_MC_FETCH_EFLAGS(EFlags); \
168 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
169 \
170 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu8Dst, bUnmapInfo); \
171 IEM_MC_COMMIT_EFLAGS(EFlags); \
172 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
173 IEM_MC_END(); \
174 } \
175 else \
176 { \
177 (void)0
178
179#define IEMOP_BODY_BINARY_rm_r8_NO_LOCK() \
180 IEMOP_HLP_DONE_DECODING(); \
181 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
182 } \
183 } \
184 (void)0
185
186#define IEMOP_BODY_BINARY_rm_r8_LOCKED(a_fnLockedU8) \
187 IEM_MC_BEGIN(3, 3, 0, 0); \
188 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
189 IEM_MC_ARG(uint8_t, u8Src, 1); \
190 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
192 IEM_MC_LOCAL(uint8_t, bMapInfoDst); \
193 \
194 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
195 IEMOP_HLP_DONE_DECODING(); \
196 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bMapInfoDst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
197 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
198 IEM_MC_FETCH_EFLAGS(EFlags); \
199 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
200 \
201 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bMapInfoDst); \
202 IEM_MC_COMMIT_EFLAGS(EFlags); \
203 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
204 IEM_MC_END(); \
205 } \
206 } \
207 (void)0
208
209/**
210 * Body for byte instructions like ADD, AND, OR, ++ with a register as the
211 * destination.
212 */
213#define IEMOP_BODY_BINARY_r8_rm(a_fnNormalU8) \
214 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
215 \
216 /* \
217 * If rm is denoting a register, no more instruction bytes. \
218 */ \
219 if (IEM_IS_MODRM_REG_MODE(bRm)) \
220 { \
221 IEM_MC_BEGIN(3, 0, 0, 0); \
222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
223 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
224 IEM_MC_ARG(uint8_t, u8Src, 1); \
225 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
226 \
227 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
228 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
229 IEM_MC_REF_EFLAGS(pEFlags); \
230 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
231 \
232 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
233 IEM_MC_END(); \
234 } \
235 else \
236 { \
237 /* \
238 * We're accessing memory. \
239 */ \
240 IEM_MC_BEGIN(3, 1, 0, 0); \
241 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
242 IEM_MC_ARG(uint8_t, u8Src, 1); \
243 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
244 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
245 \
246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
248 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
249 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
250 IEM_MC_REF_EFLAGS(pEFlags); \
251 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
252 \
253 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
254 IEM_MC_END(); \
255 } \
256 (void)0
257
258
259/**
260 * Body for word/dword/qword instructions like ADD, AND, OR, ++ with
261 * memory/register as the destination.
262 */
263#define IEMOP_BODY_BINARY_rm_rv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
264 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
265 \
266 /* \
267 * If rm is denoting a register, no more instruction bytes. \
268 */ \
269 if (IEM_IS_MODRM_REG_MODE(bRm)) \
270 { \
271 switch (pVCpu->iem.s.enmEffOpSize) \
272 { \
273 case IEMMODE_16BIT: \
274 IEM_MC_BEGIN(3, 0, 0, 0); \
275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
276 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
277 IEM_MC_ARG(uint16_t, u16Src, 1); \
278 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
279 \
280 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
281 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
282 IEM_MC_REF_EFLAGS(pEFlags); \
283 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
284 \
285 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
286 IEM_MC_END(); \
287 break; \
288 \
289 case IEMMODE_32BIT: \
290 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
292 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
293 IEM_MC_ARG(uint32_t, u32Src, 1); \
294 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
295 \
296 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
297 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
298 IEM_MC_REF_EFLAGS(pEFlags); \
299 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
300 \
301 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
302 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
303 IEM_MC_END(); \
304 break; \
305 \
306 case IEMMODE_64BIT: \
307 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
309 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
310 IEM_MC_ARG(uint64_t, u64Src, 1); \
311 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
312 \
313 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
314 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
315 IEM_MC_REF_EFLAGS(pEFlags); \
316 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
317 \
318 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
319 IEM_MC_END(); \
320 break; \
321 \
322 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
323 } \
324 } \
325 else \
326 { \
327 /* \
328 * We're accessing memory. \
329 * Note! We're putting the eflags on the stack here so we can commit them \
330 * after the memory. \
331 */ \
332 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
333 { \
334 switch (pVCpu->iem.s.enmEffOpSize) \
335 { \
336 case IEMMODE_16BIT: \
337 IEM_MC_BEGIN(3, 3, 0, 0); \
338 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
339 IEM_MC_ARG(uint16_t, u16Src, 1); \
340 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
341 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
342 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
343 \
344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
345 IEMOP_HLP_DONE_DECODING(); \
346 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
347 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
348 IEM_MC_FETCH_EFLAGS(EFlags); \
349 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
350 \
351 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
352 IEM_MC_COMMIT_EFLAGS(EFlags); \
353 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
354 IEM_MC_END(); \
355 break; \
356 \
357 case IEMMODE_32BIT: \
358 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
359 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
360 IEM_MC_ARG(uint32_t, u32Src, 1); \
361 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
362 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
363 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
364 \
365 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
366 IEMOP_HLP_DONE_DECODING(); \
367 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
368 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
369 IEM_MC_FETCH_EFLAGS(EFlags); \
370 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
371 \
372 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
373 IEM_MC_COMMIT_EFLAGS(EFlags); \
374 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
375 IEM_MC_END(); \
376 break; \
377 \
378 case IEMMODE_64BIT: \
379 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
380 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
381 IEM_MC_ARG(uint64_t, u64Src, 1); \
382 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
383 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
384 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
385 \
386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
387 IEMOP_HLP_DONE_DECODING(); \
388 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
389 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
390 IEM_MC_FETCH_EFLAGS(EFlags); \
391 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
392 \
393 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
394 IEM_MC_COMMIT_EFLAGS(EFlags); \
395 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
396 IEM_MC_END(); \
397 break; \
398 \
399 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
400 } \
401 } \
402 else \
403 { \
404 (void)0
405/* Separate macro to work around parsing issue in IEMAllInstPython.py */
406#define IEMOP_BODY_BINARY_rm_rv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
407 switch (pVCpu->iem.s.enmEffOpSize) \
408 { \
409 case IEMMODE_16BIT: \
410 IEM_MC_BEGIN(3, 3, 0, 0); \
411 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
412 IEM_MC_ARG(uint16_t, u16Src, 1); \
413 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
415 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
416 \
417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
418 IEMOP_HLP_DONE_DECODING(); \
419 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
420 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
421 IEM_MC_FETCH_EFLAGS(EFlags); \
422 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
423 \
424 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
425 IEM_MC_COMMIT_EFLAGS(EFlags); \
426 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
427 IEM_MC_END(); \
428 break; \
429 \
430 case IEMMODE_32BIT: \
431 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
432 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
433 IEM_MC_ARG(uint32_t, u32Src, 1); \
434 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
436 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
437 \
438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
439 IEMOP_HLP_DONE_DECODING(); \
440 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
441 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
442 IEM_MC_FETCH_EFLAGS(EFlags); \
443 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
444 \
445 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo /* CMP,TEST */); \
446 IEM_MC_COMMIT_EFLAGS(EFlags); \
447 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
448 IEM_MC_END(); \
449 break; \
450 \
451 case IEMMODE_64BIT: \
452 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
453 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
454 IEM_MC_ARG(uint64_t, u64Src, 1); \
455 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
456 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
457 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
458 \
459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
460 IEMOP_HLP_DONE_DECODING(); \
461 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
462 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
463 IEM_MC_FETCH_EFLAGS(EFlags); \
464 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
465 \
466 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
467 IEM_MC_COMMIT_EFLAGS(EFlags); \
468 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
469 IEM_MC_END(); \
470 break; \
471 \
472 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
473 } \
474 } \
475 } \
476 (void)0
477
478/**
479 * Body for read-only word/dword/qword instructions like TEST and CMP with
480 * memory/register as the destination.
481 */
482#define IEMOP_BODY_BINARY_rm_rv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
483 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
484 \
485 /* \
486 * If rm is denoting a register, no more instruction bytes. \
487 */ \
488 if (IEM_IS_MODRM_REG_MODE(bRm)) \
489 { \
490 switch (pVCpu->iem.s.enmEffOpSize) \
491 { \
492 case IEMMODE_16BIT: \
493 IEM_MC_BEGIN(3, 0, 0, 0); \
494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
495 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
496 IEM_MC_ARG(uint16_t, u16Src, 1); \
497 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
498 \
499 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
500 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
501 IEM_MC_REF_EFLAGS(pEFlags); \
502 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
503 \
504 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
505 IEM_MC_END(); \
506 break; \
507 \
508 case IEMMODE_32BIT: \
509 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
511 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
512 IEM_MC_ARG(uint32_t, u32Src, 1); \
513 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
514 \
515 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
516 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
517 IEM_MC_REF_EFLAGS(pEFlags); \
518 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
519 \
520 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
521 IEM_MC_END(); \
522 break; \
523 \
524 case IEMMODE_64BIT: \
525 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
527 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
528 IEM_MC_ARG(uint64_t, u64Src, 1); \
529 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
530 \
531 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
532 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
533 IEM_MC_REF_EFLAGS(pEFlags); \
534 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
535 \
536 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
537 IEM_MC_END(); \
538 break; \
539 \
540 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
541 } \
542 } \
543 else \
544 { \
545 /* \
546 * We're accessing memory. \
547 * Note! We're putting the eflags on the stack here so we can commit them \
548 * after the memory. \
549 */ \
550 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
551 { \
552 switch (pVCpu->iem.s.enmEffOpSize) \
553 { \
554 case IEMMODE_16BIT: \
555 IEM_MC_BEGIN(3, 3, 0, 0); \
556 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
557 IEM_MC_ARG(uint16_t, u16Src, 1); \
558 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
560 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
561 \
562 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
563 IEMOP_HLP_DONE_DECODING(); \
564 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
565 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
566 IEM_MC_FETCH_EFLAGS(EFlags); \
567 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
568 \
569 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
570 IEM_MC_COMMIT_EFLAGS(EFlags); \
571 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
572 IEM_MC_END(); \
573 break; \
574 \
575 case IEMMODE_32BIT: \
576 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
577 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
578 IEM_MC_ARG(uint32_t, u32Src, 1); \
579 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
580 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
581 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
582 \
583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
584 IEMOP_HLP_DONE_DECODING(); \
585 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
586 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
587 IEM_MC_FETCH_EFLAGS(EFlags); \
588 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
589 \
590 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
591 IEM_MC_COMMIT_EFLAGS(EFlags); \
592 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
593 IEM_MC_END(); \
594 break; \
595 \
596 case IEMMODE_64BIT: \
597 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
598 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
599 IEM_MC_ARG(uint64_t, u64Src, 1); \
600 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
602 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
603 \
604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
605 IEMOP_HLP_DONE_DECODING(); \
606 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
607 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
608 IEM_MC_FETCH_EFLAGS(EFlags); \
609 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
610 \
611 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
612 IEM_MC_COMMIT_EFLAGS(EFlags); \
613 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
614 IEM_MC_END(); \
615 break; \
616 \
617 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
618 } \
619 } \
620 else \
621 { \
622 IEMOP_HLP_DONE_DECODING(); \
623 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
624 } \
625 } \
626 (void)0
627
628
629/**
630 * Body for instructions like ADD, AND, OR, ++ with working on AL with
631 * a byte immediate.
632 */
633#define IEMOP_BODY_BINARY_AL_Ib(a_fnNormalU8) \
634 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
635 \
636 IEM_MC_BEGIN(3, 0, 0, 0); \
637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
638 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
639 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1); \
640 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
641 \
642 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX); \
643 IEM_MC_REF_EFLAGS(pEFlags); \
644 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
645 \
646 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
647 IEM_MC_END()
648
649/**
650 * Body for instructions like ADD, AND, OR, ++ with working on
651 * AX/EAX/RAX with a word/dword immediate.
652 */
653#define IEMOP_BODY_BINARY_rAX_Iz(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fModifiesDstReg) \
654 switch (pVCpu->iem.s.enmEffOpSize) \
655 { \
656 case IEMMODE_16BIT: \
657 { \
658 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
659 \
660 IEM_MC_BEGIN(3, 0, 0, 0); \
661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
662 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
663 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1); \
664 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
665 \
666 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX); \
667 IEM_MC_REF_EFLAGS(pEFlags); \
668 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
669 \
670 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
671 IEM_MC_END(); \
672 } \
673 \
674 case IEMMODE_32BIT: \
675 { \
676 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
677 \
678 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
680 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
681 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1); \
682 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
683 \
684 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX); \
685 IEM_MC_REF_EFLAGS(pEFlags); \
686 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
687 \
688 if (a_fModifiesDstReg) \
689 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
690 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
691 IEM_MC_END(); \
692 } \
693 \
694 case IEMMODE_64BIT: \
695 { \
696 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
697 \
698 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
700 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
701 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1); \
702 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
703 \
704 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX); \
705 IEM_MC_REF_EFLAGS(pEFlags); \
706 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
707 \
708 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
709 IEM_MC_END(); \
710 } \
711 \
712 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
713 } \
714 (void)0
715
716
717
718/* Instruction specification format - work in progress: */
719
720/**
721 * @opcode 0x00
722 * @opmnemonic add
723 * @op1 rm:Eb
724 * @op2 reg:Gb
725 * @opmaps one
726 * @openc ModR/M
727 * @opflmodify cf,pf,af,zf,sf,of
728 * @ophints harmless ignores_op_sizes
729 * @opstats add_Eb_Gb
730 * @opgroup og_gen_arith_bin
731 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
732 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
733 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
734 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
735 */
736FNIEMOP_DEF(iemOp_add_Eb_Gb)
737{
738 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
739 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_add_u8);
740 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_add_u8_locked);
741}
742
743
744/**
745 * @opcode 0x01
746 * @opgroup og_gen_arith_bin
747 * @opflmodify cf,pf,af,zf,sf,of
748 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
749 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
750 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
751 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
752 */
753FNIEMOP_DEF(iemOp_add_Ev_Gv)
754{
755 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
756 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
757 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
758}
759
760
761/**
762 * @opcode 0x02
763 * @opgroup og_gen_arith_bin
764 * @opflmodify cf,pf,af,zf,sf,of
765 * @opcopytests iemOp_add_Eb_Gb
766 */
767FNIEMOP_DEF(iemOp_add_Gb_Eb)
768{
769 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
770 IEMOP_BODY_BINARY_r8_rm(iemAImpl_add_u8);
771}
772
773
774/**
775 * @opcode 0x03
776 * @opgroup og_gen_arith_bin
777 * @opflmodify cf,pf,af,zf,sf,of
778 * @opcopytests iemOp_add_Ev_Gv
779 */
780FNIEMOP_DEF(iemOp_add_Gv_Ev)
781{
782 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
783 IEMOP_BODY_BINARY_rv_rm(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1, 0);
784}
785
786
787/**
788 * @opcode 0x04
789 * @opgroup og_gen_arith_bin
790 * @opflmodify cf,pf,af,zf,sf,of
791 * @opcopytests iemOp_add_Eb_Gb
792 */
793FNIEMOP_DEF(iemOp_add_Al_Ib)
794{
795 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
796 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_add_u8);
797}
798
799
800/**
801 * @opcode 0x05
802 * @opgroup og_gen_arith_bin
803 * @opflmodify cf,pf,af,zf,sf,of
804 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
805 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
806 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
807 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
808 */
809FNIEMOP_DEF(iemOp_add_eAX_Iz)
810{
811 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
812 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
813}
814
815
816/**
817 * @opcode 0x06
818 * @opgroup og_stack_sreg
819 */
820FNIEMOP_DEF(iemOp_push_ES)
821{
822 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
823 IEMOP_HLP_NO_64BIT();
824 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
825}
826
827
828/**
829 * @opcode 0x07
830 * @opgroup og_stack_sreg
831 */
832FNIEMOP_DEF(iemOp_pop_ES)
833{
834 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
835 IEMOP_HLP_NO_64BIT();
836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
837 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE, iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
838}
839
840
841/**
842 * @opcode 0x08
843 * @opgroup og_gen_arith_bin
844 * @opflmodify cf,pf,af,zf,sf,of
845 * @opflundef af
846 * @opflclear of,cf
847 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
848 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
849 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
850 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
851 */
852FNIEMOP_DEF(iemOp_or_Eb_Gb)
853{
854 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
855 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
856 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_or_u8);
857 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_or_u8_locked);
858}
859
860
861/*
862 * @opcode 0x09
863 * @opgroup og_gen_arith_bin
864 * @opflmodify cf,pf,af,zf,sf,of
865 * @opflundef af
866 * @opflclear of,cf
867 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
868 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
869 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
870 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
871 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
872 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
873 */
874FNIEMOP_DEF(iemOp_or_Ev_Gv)
875{
876 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
877 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
878 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
879 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
880}
881
882
883/**
884 * @opcode 0x0a
885 * @opgroup og_gen_arith_bin
886 * @opflmodify cf,pf,af,zf,sf,of
887 * @opflundef af
888 * @opflclear of,cf
889 * @opcopytests iemOp_or_Eb_Gb
890 */
891FNIEMOP_DEF(iemOp_or_Gb_Eb)
892{
893 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
894 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
895 IEMOP_BODY_BINARY_r8_rm(iemAImpl_or_u8);
896}
897
898
899/**
900 * @opcode 0x0b
901 * @opgroup og_gen_arith_bin
902 * @opflmodify cf,pf,af,zf,sf,of
903 * @opflundef af
904 * @opflclear of,cf
905 * @opcopytests iemOp_or_Ev_Gv
906 */
907FNIEMOP_DEF(iemOp_or_Gv_Ev)
908{
909 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
910 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
911 IEMOP_BODY_BINARY_rv_rm(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1, 0);
912}
913
914
915/**
916 * @opcode 0x0c
917 * @opgroup og_gen_arith_bin
918 * @opflmodify cf,pf,af,zf,sf,of
919 * @opflundef af
920 * @opflclear of,cf
921 * @opcopytests iemOp_or_Eb_Gb
922 */
923FNIEMOP_DEF(iemOp_or_Al_Ib)
924{
925 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
926 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
927 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_or_u8);
928}
929
930
931/**
932 * @opcode 0x0d
933 * @opgroup og_gen_arith_bin
934 * @opflmodify cf,pf,af,zf,sf,of
935 * @opflundef af
936 * @opflclear of,cf
937 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
938 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
939 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
940 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
941 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
942 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
943 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
944 */
945FNIEMOP_DEF(iemOp_or_eAX_Iz)
946{
947 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
948 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
949 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
950}
951
952
953/**
954 * @opcode 0x0e
955 * @opgroup og_stack_sreg
956 */
957FNIEMOP_DEF(iemOp_push_CS)
958{
959 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, 0);
960 IEMOP_HLP_NO_64BIT();
961 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
962}
963
964
965/**
966 * @opcode 0x0f
967 * @opmnemonic EscTwo0f
968 * @openc two0f
969 * @opdisenum OP_2B_ESC
970 * @ophints harmless
971 * @opgroup og_escapes
972 */
973FNIEMOP_DEF(iemOp_2byteEscape)
974{
975#if 0 /// @todo def VBOX_STRICT
976 /* Sanity check the table the first time around. */
977 static bool s_fTested = false;
978 if (RT_LIKELY(s_fTested)) { /* likely */ }
979 else
980 {
981 s_fTested = true;
982 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
983 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
984 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
985 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
986 }
987#endif
988
989 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
990 {
991 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
992 IEMOP_HLP_MIN_286();
993 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
994 }
995 /* @opdone */
996
997 /*
998 * On the 8086 this is a POP CS instruction.
999 * For the time being we don't specify this this.
1000 */
1001 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, IEMOPHINT_SKIP_PYTHON);
1002 IEMOP_HLP_NO_64BIT();
1003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1004 /** @todo eliminate END_TB here */
1005 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_END_TB,
1006 iemCImpl_pop_Sreg, X86_SREG_CS, pVCpu->iem.s.enmEffOpSize);
1007}
1008
1009/**
1010 * @opcode 0x10
1011 * @opgroup og_gen_arith_bin
1012 * @opfltest cf
1013 * @opflmodify cf,pf,af,zf,sf,of
1014 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1015 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1016 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1017 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1018 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1019 */
1020FNIEMOP_DEF(iemOp_adc_Eb_Gb)
1021{
1022 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1023 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_adc_u8);
1024 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_adc_u8_locked);
1025}
1026
1027
1028/**
1029 * @opcode 0x11
1030 * @opgroup og_gen_arith_bin
1031 * @opfltest cf
1032 * @opflmodify cf,pf,af,zf,sf,of
1033 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1034 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1035 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1036 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1037 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1038 */
1039FNIEMOP_DEF(iemOp_adc_Ev_Gv)
1040{
1041 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1042 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
1043 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
1044}
1045
1046
1047/**
1048 * @opcode 0x12
1049 * @opgroup og_gen_arith_bin
1050 * @opfltest cf
1051 * @opflmodify cf,pf,af,zf,sf,of
1052 * @opcopytests iemOp_adc_Eb_Gb
1053 */
1054FNIEMOP_DEF(iemOp_adc_Gb_Eb)
1055{
1056 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1057 IEMOP_BODY_BINARY_r8_rm(iemAImpl_adc_u8);
1058}
1059
1060
1061/**
1062 * @opcode 0x13
1063 * @opgroup og_gen_arith_bin
1064 * @opfltest cf
1065 * @opflmodify cf,pf,af,zf,sf,of
1066 * @opcopytests iemOp_adc_Ev_Gv
1067 */
1068FNIEMOP_DEF(iemOp_adc_Gv_Ev)
1069{
1070 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1071 IEMOP_BODY_BINARY_rv_rm(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1, 0);
1072}
1073
1074
1075/**
1076 * @opcode 0x14
1077 * @opgroup og_gen_arith_bin
1078 * @opfltest cf
1079 * @opflmodify cf,pf,af,zf,sf,of
1080 * @opcopytests iemOp_adc_Eb_Gb
1081 */
1082FNIEMOP_DEF(iemOp_adc_Al_Ib)
1083{
1084 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1085 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_adc_u8);
1086}
1087
1088
1089/**
1090 * @opcode 0x15
1091 * @opgroup og_gen_arith_bin
1092 * @opfltest cf
1093 * @opflmodify cf,pf,af,zf,sf,of
1094 * @opcopytests iemOp_adc_Ev_Gv
1095 */
1096FNIEMOP_DEF(iemOp_adc_eAX_Iz)
1097{
1098 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1099 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
1100}
1101
1102
1103/**
1104 * @opcode 0x16
1105 */
1106FNIEMOP_DEF(iemOp_push_SS)
1107{
1108 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1109 IEMOP_HLP_NO_64BIT();
1110 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
1111}
1112
1113
1114/**
1115 * @opcode 0x17
1116 * @opgroup og_gen_arith_bin
1117 * @opfltest cf
1118 * @opflmodify cf,pf,af,zf,sf,of
1119 */
1120FNIEMOP_DEF(iemOp_pop_SS)
1121{
1122 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
1123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1124 IEMOP_HLP_NO_64BIT();
1125 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_INHIBIT_SHADOW,
1126 iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
1127}
1128
1129
1130/**
1131 * @opcode 0x18
1132 * @opgroup og_gen_arith_bin
1133 * @opfltest cf
1134 * @opflmodify cf,pf,af,zf,sf,of
1135 */
1136FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
1137{
1138 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1139 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_sbb_u8);
1140 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sbb_u8_locked);
1141}
1142
1143
1144/**
1145 * @opcode 0x19
1146 * @opgroup og_gen_arith_bin
1147 * @opfltest cf
1148 * @opflmodify cf,pf,af,zf,sf,of
1149 */
1150FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
1151{
1152 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1153 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
1154 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
1155}
1156
1157
1158/**
1159 * @opcode 0x1a
1160 * @opgroup og_gen_arith_bin
1161 * @opfltest cf
1162 * @opflmodify cf,pf,af,zf,sf,of
1163 */
1164FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
1165{
1166 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1167 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sbb_u8);
1168}
1169
1170
1171/**
1172 * @opcode 0x1b
1173 * @opgroup og_gen_arith_bin
1174 * @opfltest cf
1175 * @opflmodify cf,pf,af,zf,sf,of
1176 */
1177FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
1178{
1179 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1180 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1, 0);
1181}
1182
1183
1184/**
1185 * @opcode 0x1c
1186 * @opgroup og_gen_arith_bin
1187 * @opfltest cf
1188 * @opflmodify cf,pf,af,zf,sf,of
1189 */
1190FNIEMOP_DEF(iemOp_sbb_Al_Ib)
1191{
1192 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1193 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sbb_u8);
1194}
1195
1196
1197/**
1198 * @opcode 0x1d
1199 * @opgroup og_gen_arith_bin
1200 * @opfltest cf
1201 * @opflmodify cf,pf,af,zf,sf,of
1202 */
1203FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
1204{
1205 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1206 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
1207}
1208
1209
1210/**
1211 * @opcode 0x1e
1212 * @opgroup og_stack_sreg
1213 */
1214FNIEMOP_DEF(iemOp_push_DS)
1215{
1216 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1217 IEMOP_HLP_NO_64BIT();
1218 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
1219}
1220
1221
1222/**
1223 * @opcode 0x1f
1224 * @opgroup og_stack_sreg
1225 */
1226FNIEMOP_DEF(iemOp_pop_DS)
1227{
1228 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1230 IEMOP_HLP_NO_64BIT();
1231 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE, iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
1232}
1233
1234
1235/**
1236 * @opcode 0x20
1237 * @opgroup og_gen_arith_bin
1238 * @opflmodify cf,pf,af,zf,sf,of
1239 * @opflundef af
1240 * @opflclear of,cf
1241 */
1242FNIEMOP_DEF(iemOp_and_Eb_Gb)
1243{
1244 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1245 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1246 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_and_u8);
1247 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_and_u8_locked);
1248}
1249
1250
1251/**
1252 * @opcode 0x21
1253 * @opgroup og_gen_arith_bin
1254 * @opflmodify cf,pf,af,zf,sf,of
1255 * @opflundef af
1256 * @opflclear of,cf
1257 */
1258FNIEMOP_DEF(iemOp_and_Ev_Gv)
1259{
1260 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1261 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1262 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
1263 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
1264}
1265
1266
1267/**
1268 * @opcode 0x22
1269 * @opgroup og_gen_arith_bin
1270 * @opflmodify cf,pf,af,zf,sf,of
1271 * @opflundef af
1272 * @opflclear of,cf
1273 */
1274FNIEMOP_DEF(iemOp_and_Gb_Eb)
1275{
1276 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1277 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1278 IEMOP_BODY_BINARY_r8_rm(iemAImpl_and_u8);
1279}
1280
1281
1282/**
1283 * @opcode 0x23
1284 * @opgroup og_gen_arith_bin
1285 * @opflmodify cf,pf,af,zf,sf,of
1286 * @opflundef af
1287 * @opflclear of,cf
1288 */
1289FNIEMOP_DEF(iemOp_and_Gv_Ev)
1290{
1291 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1292 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1293 IEMOP_BODY_BINARY_rv_rm(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1, 0);
1294}
1295
1296
1297/**
1298 * @opcode 0x24
1299 * @opgroup og_gen_arith_bin
1300 * @opflmodify cf,pf,af,zf,sf,of
1301 * @opflundef af
1302 * @opflclear of,cf
1303 */
1304FNIEMOP_DEF(iemOp_and_Al_Ib)
1305{
1306 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
1307 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1308 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_and_u8);
1309}
1310
1311
1312/**
1313 * @opcode 0x25
1314 * @opgroup og_gen_arith_bin
1315 * @opflmodify cf,pf,af,zf,sf,of
1316 * @opflundef af
1317 * @opflclear of,cf
1318 */
1319FNIEMOP_DEF(iemOp_and_eAX_Iz)
1320{
1321 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1322 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1323 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1324}
1325
1326
1327/**
1328 * @opcode 0x26
1329 * @opmnemonic SEG
1330 * @op1 ES
1331 * @opgroup og_prefix
1332 * @openc prefix
1333 * @opdisenum OP_SEG
1334 * @ophints harmless
1335 */
1336FNIEMOP_DEF(iemOp_seg_ES)
1337{
1338 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
1339 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
1340 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
1341
1342 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1343 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1344}
1345
1346
1347/**
1348 * @opcode 0x27
1349 * @opfltest af,cf
1350 * @opflmodify cf,pf,af,zf,sf,of
1351 * @opflundef of
1352 */
1353FNIEMOP_DEF(iemOp_daa)
1354{
1355 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1356 IEMOP_HLP_NO_64BIT();
1357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1358 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1359 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_daa);
1360}
1361
1362
1363/**
1364 * @opcode 0x28
1365 * @opgroup og_gen_arith_bin
1366 * @opflmodify cf,pf,af,zf,sf,of
1367 */
1368FNIEMOP_DEF(iemOp_sub_Eb_Gb)
1369{
1370 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1371 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_sub_u8);
1372 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sub_u8_locked);
1373}
1374
1375
1376/**
1377 * @opcode 0x29
1378 * @opgroup og_gen_arith_bin
1379 * @opflmodify cf,pf,af,zf,sf,of
1380 */
1381FNIEMOP_DEF(iemOp_sub_Ev_Gv)
1382{
1383 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1384 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
1385 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
1386}
1387
1388
1389/**
1390 * @opcode 0x2a
1391 * @opgroup og_gen_arith_bin
1392 * @opflmodify cf,pf,af,zf,sf,of
1393 */
1394FNIEMOP_DEF(iemOp_sub_Gb_Eb)
1395{
1396 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1397 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sub_u8);
1398}
1399
1400
1401/**
1402 * @opcode 0x2b
1403 * @opgroup og_gen_arith_bin
1404 * @opflmodify cf,pf,af,zf,sf,of
1405 */
1406FNIEMOP_DEF(iemOp_sub_Gv_Ev)
1407{
1408 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1409 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1, 0);
1410}
1411
1412
1413/**
1414 * @opcode 0x2c
1415 * @opgroup og_gen_arith_bin
1416 * @opflmodify cf,pf,af,zf,sf,of
1417 */
1418FNIEMOP_DEF(iemOp_sub_Al_Ib)
1419{
1420 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1421 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sub_u8);
1422}
1423
1424
1425/**
1426 * @opcode 0x2d
1427 * @opgroup og_gen_arith_bin
1428 * @opflmodify cf,pf,af,zf,sf,of
1429 */
1430FNIEMOP_DEF(iemOp_sub_eAX_Iz)
1431{
1432 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1433 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1434}
1435
1436
1437/**
1438 * @opcode 0x2e
1439 * @opmnemonic SEG
1440 * @op1 CS
1441 * @opgroup og_prefix
1442 * @openc prefix
1443 * @opdisenum OP_SEG
1444 * @ophints harmless
1445 */
1446FNIEMOP_DEF(iemOp_seg_CS)
1447{
1448 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
1449 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
1450 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
1451
1452 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1453 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1454}
1455
1456
1457/**
1458 * @opcode 0x2f
1459 * @opfltest af,cf
1460 * @opflmodify cf,pf,af,zf,sf,of
1461 * @opflundef of
1462 */
1463FNIEMOP_DEF(iemOp_das)
1464{
1465 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1466 IEMOP_HLP_NO_64BIT();
1467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1468 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1469 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_das);
1470}
1471
1472
1473/**
1474 * @opcode 0x30
1475 * @opgroup og_gen_arith_bin
1476 * @opflmodify cf,pf,af,zf,sf,of
1477 * @opflundef af
1478 * @opflclear of,cf
1479 */
1480FNIEMOP_DEF(iemOp_xor_Eb_Gb)
1481{
1482 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1483 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1484 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_xor_u8);
1485 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_xor_u8_locked);
1486}
1487
1488
1489/**
1490 * @opcode 0x31
1491 * @opgroup og_gen_arith_bin
1492 * @opflmodify cf,pf,af,zf,sf,of
1493 * @opflundef af
1494 * @opflclear of,cf
1495 */
1496FNIEMOP_DEF(iemOp_xor_Ev_Gv)
1497{
1498 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1499 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1500 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
1501 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
1502}
1503
1504
1505/**
1506 * @opcode 0x32
1507 * @opgroup og_gen_arith_bin
1508 * @opflmodify cf,pf,af,zf,sf,of
1509 * @opflundef af
1510 * @opflclear of,cf
1511 */
1512FNIEMOP_DEF(iemOp_xor_Gb_Eb)
1513{
1514 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1515 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1516 IEMOP_BODY_BINARY_r8_rm(iemAImpl_xor_u8);
1517}
1518
1519
1520/**
1521 * @opcode 0x33
1522 * @opgroup og_gen_arith_bin
1523 * @opflmodify cf,pf,af,zf,sf,of
1524 * @opflundef af
1525 * @opflclear of,cf
1526 */
1527FNIEMOP_DEF(iemOp_xor_Gv_Ev)
1528{
1529 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1530 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1531 IEMOP_BODY_BINARY_rv_rm(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1, 0);
1532}
1533
1534
1535/**
1536 * @opcode 0x34
1537 * @opgroup og_gen_arith_bin
1538 * @opflmodify cf,pf,af,zf,sf,of
1539 * @opflundef af
1540 * @opflclear of,cf
1541 */
1542FNIEMOP_DEF(iemOp_xor_Al_Ib)
1543{
1544 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1545 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1546 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_xor_u8);
1547}
1548
1549
1550/**
1551 * @opcode 0x35
1552 * @opgroup og_gen_arith_bin
1553 * @opflmodify cf,pf,af,zf,sf,of
1554 * @opflundef af
1555 * @opflclear of,cf
1556 */
1557FNIEMOP_DEF(iemOp_xor_eAX_Iz)
1558{
1559 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1560 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1561 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1562}
1563
1564
1565/**
1566 * @opcode 0x36
1567 * @opmnemonic SEG
1568 * @op1 SS
1569 * @opgroup og_prefix
1570 * @openc prefix
1571 * @opdisenum OP_SEG
1572 * @ophints harmless
1573 */
1574FNIEMOP_DEF(iemOp_seg_SS)
1575{
1576 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
1577 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
1578 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
1579
1580 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1581 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1582}
1583
1584
1585/**
1586 * @opcode 0x37
1587 * @opfltest af,cf
1588 * @opflmodify cf,pf,af,zf,sf,of
1589 * @opflundef pf,zf,sf,of
1590 * @opgroup og_gen_arith_dec
1591 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
1592 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
1593 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1594 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1595 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1596 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
1597 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
1598 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
1599 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
1600 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
1601 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
1602 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
1603 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
1604 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
1605 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
1606 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1607 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1608 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1609 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1610 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
1611 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
1612 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
1613 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
1614 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
1615 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
1616 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1617 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
1618 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
1619 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
1620 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
1621 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
1622 */
1623FNIEMOP_DEF(iemOp_aaa)
1624{
1625 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1626 IEMOP_HLP_NO_64BIT();
1627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1628 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1629
1630 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aaa);
1631}
1632
1633
1634/**
1635 * @opcode 0x38
1636 */
1637FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
1638{
1639 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
1640 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_cmp_u8);
1641 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
1642}
1643
1644
1645/**
1646 * @opcode 0x39
1647 */
1648FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
1649{
1650 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
1651 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
1652}
1653
1654
1655/**
1656 * @opcode 0x3a
1657 */
1658FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
1659{
1660 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
1661 IEMOP_BODY_BINARY_r8_rm(iemAImpl_cmp_u8);
1662}
1663
1664
1665/**
1666 * @opcode 0x3b
1667 */
1668FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
1669{
1670 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
1671 IEMOP_BODY_BINARY_rv_rm(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0, 0);
1672}
1673
1674
1675/**
1676 * @opcode 0x3c
1677 */
1678FNIEMOP_DEF(iemOp_cmp_Al_Ib)
1679{
1680 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
1681 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_cmp_u8);
1682}
1683
1684
1685/**
1686 * @opcode 0x3d
1687 */
1688FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1689{
1690 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1691 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1692}
1693
1694
1695/**
1696 * @opcode 0x3e
1697 */
1698FNIEMOP_DEF(iemOp_seg_DS)
1699{
1700 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1701 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1702 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1703
1704 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1705 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1706}
1707
1708
1709/**
1710 * @opcode 0x3f
1711 * @opfltest af,cf
1712 * @opflmodify cf,pf,af,zf,sf,of
1713 * @opflundef pf,zf,sf,of
1714 * @opgroup og_gen_arith_dec
1715 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1716 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1717 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1718 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1719 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1720 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1721 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1722 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1723 * @optest amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1724 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1725 * @optest amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1726 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1727 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1728 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1729 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1730 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1731 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1732 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1733 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1734 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1735 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
1736 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1737 * @optest amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
1738 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1739 * @optest amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
1740 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1741 * @optest amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
1742 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1743 * @optest amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
1744 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1745 * @optest amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
1746 * @optest intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1747 * @optest amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1748 * @optest intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1749 * @optest amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1750 */
1751FNIEMOP_DEF(iemOp_aas)
1752{
1753 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1754 IEMOP_HLP_NO_64BIT();
1755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1756 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1757
1758 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aas);
1759}
1760
1761
1762/**
1763 * Common 'inc/dec register' helper.
1764 *
1765 * Not for 64-bit code, only for what became the rex prefixes.
1766 */
1767#define IEMOP_BODY_UNARY_GReg(a_fnNormalU16, a_fnNormalU32, a_iReg) \
1768 switch (pVCpu->iem.s.enmEffOpSize) \
1769 { \
1770 case IEMMODE_16BIT: \
1771 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_64BIT, 0); \
1772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1773 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
1774 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1775 IEM_MC_REF_GREG_U16(pu16Dst, a_iReg); \
1776 IEM_MC_REF_EFLAGS(pEFlags); \
1777 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
1778 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1779 IEM_MC_END(); \
1780 break; \
1781 \
1782 case IEMMODE_32BIT: \
1783 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0); \
1784 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1785 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
1786 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1787 IEM_MC_REF_GREG_U32(pu32Dst, a_iReg); \
1788 IEM_MC_REF_EFLAGS(pEFlags); \
1789 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
1790 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
1791 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1792 IEM_MC_END(); \
1793 break; \
1794 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1795 } \
1796 (void)0
1797
1798/**
1799 * @opcode 0x40
1800 */
1801FNIEMOP_DEF(iemOp_inc_eAX)
1802{
1803 /*
1804 * This is a REX prefix in 64-bit mode.
1805 */
1806 if (IEM_IS_64BIT_CODE(pVCpu))
1807 {
1808 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1809 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1810
1811 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1812 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1813 }
1814
1815 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1816 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xAX);
1817}
1818
1819
1820/**
1821 * @opcode 0x41
1822 */
1823FNIEMOP_DEF(iemOp_inc_eCX)
1824{
1825 /*
1826 * This is a REX prefix in 64-bit mode.
1827 */
1828 if (IEM_IS_64BIT_CODE(pVCpu))
1829 {
1830 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1831 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1832 pVCpu->iem.s.uRexB = 1 << 3;
1833
1834 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1835 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1836 }
1837
1838 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1839 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xCX);
1840}
1841
1842
1843/**
1844 * @opcode 0x42
1845 */
1846FNIEMOP_DEF(iemOp_inc_eDX)
1847{
1848 /*
1849 * This is a REX prefix in 64-bit mode.
1850 */
1851 if (IEM_IS_64BIT_CODE(pVCpu))
1852 {
1853 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1854 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1855 pVCpu->iem.s.uRexIndex = 1 << 3;
1856
1857 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1858 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1859 }
1860
1861 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1862 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDX);
1863}
1864
1865
1866
1867/**
1868 * @opcode 0x43
1869 */
1870FNIEMOP_DEF(iemOp_inc_eBX)
1871{
1872 /*
1873 * This is a REX prefix in 64-bit mode.
1874 */
1875 if (IEM_IS_64BIT_CODE(pVCpu))
1876 {
1877 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1878 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1879 pVCpu->iem.s.uRexB = 1 << 3;
1880 pVCpu->iem.s.uRexIndex = 1 << 3;
1881
1882 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1883 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1884 }
1885
1886 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1887 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBX);
1888}
1889
1890
1891/**
1892 * @opcode 0x44
1893 */
1894FNIEMOP_DEF(iemOp_inc_eSP)
1895{
1896 /*
1897 * This is a REX prefix in 64-bit mode.
1898 */
1899 if (IEM_IS_64BIT_CODE(pVCpu))
1900 {
1901 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1902 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1903 pVCpu->iem.s.uRexReg = 1 << 3;
1904
1905 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1906 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1907 }
1908
1909 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1910 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSP);
1911}
1912
1913
1914/**
1915 * @opcode 0x45
1916 */
1917FNIEMOP_DEF(iemOp_inc_eBP)
1918{
1919 /*
1920 * This is a REX prefix in 64-bit mode.
1921 */
1922 if (IEM_IS_64BIT_CODE(pVCpu))
1923 {
1924 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1925 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1926 pVCpu->iem.s.uRexReg = 1 << 3;
1927 pVCpu->iem.s.uRexB = 1 << 3;
1928
1929 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1930 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1931 }
1932
1933 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1934 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBP);
1935}
1936
1937
1938/**
1939 * @opcode 0x46
1940 */
1941FNIEMOP_DEF(iemOp_inc_eSI)
1942{
1943 /*
1944 * This is a REX prefix in 64-bit mode.
1945 */
1946 if (IEM_IS_64BIT_CODE(pVCpu))
1947 {
1948 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1949 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1950 pVCpu->iem.s.uRexReg = 1 << 3;
1951 pVCpu->iem.s.uRexIndex = 1 << 3;
1952
1953 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1954 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1955 }
1956
1957 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1958 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSI);
1959}
1960
1961
1962/**
1963 * @opcode 0x47
1964 */
1965FNIEMOP_DEF(iemOp_inc_eDI)
1966{
1967 /*
1968 * This is a REX prefix in 64-bit mode.
1969 */
1970 if (IEM_IS_64BIT_CODE(pVCpu))
1971 {
1972 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1973 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1974 pVCpu->iem.s.uRexReg = 1 << 3;
1975 pVCpu->iem.s.uRexB = 1 << 3;
1976 pVCpu->iem.s.uRexIndex = 1 << 3;
1977
1978 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1979 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1980 }
1981
1982 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1983 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDI);
1984}
1985
1986
1987/**
1988 * @opcode 0x48
1989 */
1990FNIEMOP_DEF(iemOp_dec_eAX)
1991{
1992 /*
1993 * This is a REX prefix in 64-bit mode.
1994 */
1995 if (IEM_IS_64BIT_CODE(pVCpu))
1996 {
1997 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
1998 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
1999 iemRecalEffOpSize(pVCpu);
2000
2001 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2002 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2003 }
2004
2005 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
2006 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xAX);
2007}
2008
2009
2010/**
2011 * @opcode 0x49
2012 */
2013FNIEMOP_DEF(iemOp_dec_eCX)
2014{
2015 /*
2016 * This is a REX prefix in 64-bit mode.
2017 */
2018 if (IEM_IS_64BIT_CODE(pVCpu))
2019 {
2020 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
2021 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2022 pVCpu->iem.s.uRexB = 1 << 3;
2023 iemRecalEffOpSize(pVCpu);
2024
2025 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2026 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2027 }
2028
2029 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
2030 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xCX);
2031}
2032
2033
2034/**
2035 * @opcode 0x4a
2036 */
2037FNIEMOP_DEF(iemOp_dec_eDX)
2038{
2039 /*
2040 * This is a REX prefix in 64-bit mode.
2041 */
2042 if (IEM_IS_64BIT_CODE(pVCpu))
2043 {
2044 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
2045 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2046 pVCpu->iem.s.uRexIndex = 1 << 3;
2047 iemRecalEffOpSize(pVCpu);
2048
2049 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2050 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2051 }
2052
2053 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
2054 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDX);
2055}
2056
2057
2058/**
2059 * @opcode 0x4b
2060 */
2061FNIEMOP_DEF(iemOp_dec_eBX)
2062{
2063 /*
2064 * This is a REX prefix in 64-bit mode.
2065 */
2066 if (IEM_IS_64BIT_CODE(pVCpu))
2067 {
2068 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
2069 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2070 pVCpu->iem.s.uRexB = 1 << 3;
2071 pVCpu->iem.s.uRexIndex = 1 << 3;
2072 iemRecalEffOpSize(pVCpu);
2073
2074 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2075 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2076 }
2077
2078 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
2079 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBX);
2080}
2081
2082
2083/**
2084 * @opcode 0x4c
2085 */
2086FNIEMOP_DEF(iemOp_dec_eSP)
2087{
2088 /*
2089 * This is a REX prefix in 64-bit mode.
2090 */
2091 if (IEM_IS_64BIT_CODE(pVCpu))
2092 {
2093 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
2094 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
2095 pVCpu->iem.s.uRexReg = 1 << 3;
2096 iemRecalEffOpSize(pVCpu);
2097
2098 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2099 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2100 }
2101
2102 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
2103 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSP);
2104}
2105
2106
2107/**
2108 * @opcode 0x4d
2109 */
2110FNIEMOP_DEF(iemOp_dec_eBP)
2111{
2112 /*
2113 * This is a REX prefix in 64-bit mode.
2114 */
2115 if (IEM_IS_64BIT_CODE(pVCpu))
2116 {
2117 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
2118 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2119 pVCpu->iem.s.uRexReg = 1 << 3;
2120 pVCpu->iem.s.uRexB = 1 << 3;
2121 iemRecalEffOpSize(pVCpu);
2122
2123 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2124 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2125 }
2126
2127 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
2128 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBP);
2129}
2130
2131
2132/**
2133 * @opcode 0x4e
2134 */
2135FNIEMOP_DEF(iemOp_dec_eSI)
2136{
2137 /*
2138 * This is a REX prefix in 64-bit mode.
2139 */
2140 if (IEM_IS_64BIT_CODE(pVCpu))
2141 {
2142 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
2143 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2144 pVCpu->iem.s.uRexReg = 1 << 3;
2145 pVCpu->iem.s.uRexIndex = 1 << 3;
2146 iemRecalEffOpSize(pVCpu);
2147
2148 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2149 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2150 }
2151
2152 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
2153 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSI);
2154}
2155
2156
2157/**
2158 * @opcode 0x4f
2159 */
2160FNIEMOP_DEF(iemOp_dec_eDI)
2161{
2162 /*
2163 * This is a REX prefix in 64-bit mode.
2164 */
2165 if (IEM_IS_64BIT_CODE(pVCpu))
2166 {
2167 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
2168 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2169 pVCpu->iem.s.uRexReg = 1 << 3;
2170 pVCpu->iem.s.uRexB = 1 << 3;
2171 pVCpu->iem.s.uRexIndex = 1 << 3;
2172 iemRecalEffOpSize(pVCpu);
2173
2174 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2175 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2176 }
2177
2178 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
2179 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDI);
2180}
2181
2182
2183/**
2184 * Common 'push register' helper.
2185 */
2186FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
2187{
2188 if (IEM_IS_64BIT_CODE(pVCpu))
2189 {
2190 iReg |= pVCpu->iem.s.uRexB;
2191 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2192 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2193 }
2194
2195 switch (pVCpu->iem.s.enmEffOpSize)
2196 {
2197 case IEMMODE_16BIT:
2198 IEM_MC_BEGIN(0, 1, 0, 0);
2199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2200 IEM_MC_LOCAL(uint16_t, u16Value);
2201 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
2202 IEM_MC_PUSH_U16(u16Value);
2203 IEM_MC_ADVANCE_RIP_AND_FINISH();
2204 IEM_MC_END();
2205 break;
2206
2207 case IEMMODE_32BIT:
2208 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2210 IEM_MC_LOCAL(uint32_t, u32Value);
2211 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
2212 IEM_MC_PUSH_U32(u32Value);
2213 IEM_MC_ADVANCE_RIP_AND_FINISH();
2214 IEM_MC_END();
2215 break;
2216
2217 case IEMMODE_64BIT:
2218 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2220 IEM_MC_LOCAL(uint64_t, u64Value);
2221 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
2222 IEM_MC_PUSH_U64(u64Value);
2223 IEM_MC_ADVANCE_RIP_AND_FINISH();
2224 IEM_MC_END();
2225 break;
2226
2227 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2228 }
2229}
2230
2231
2232/**
2233 * @opcode 0x50
2234 */
2235FNIEMOP_DEF(iemOp_push_eAX)
2236{
2237 IEMOP_MNEMONIC(push_rAX, "push rAX");
2238 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
2239}
2240
2241
2242/**
2243 * @opcode 0x51
2244 */
2245FNIEMOP_DEF(iemOp_push_eCX)
2246{
2247 IEMOP_MNEMONIC(push_rCX, "push rCX");
2248 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
2249}
2250
2251
2252/**
2253 * @opcode 0x52
2254 */
2255FNIEMOP_DEF(iemOp_push_eDX)
2256{
2257 IEMOP_MNEMONIC(push_rDX, "push rDX");
2258 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
2259}
2260
2261
2262/**
2263 * @opcode 0x53
2264 */
2265FNIEMOP_DEF(iemOp_push_eBX)
2266{
2267 IEMOP_MNEMONIC(push_rBX, "push rBX");
2268 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
2269}
2270
2271
2272/**
2273 * @opcode 0x54
2274 */
2275FNIEMOP_DEF(iemOp_push_eSP)
2276{
2277 IEMOP_MNEMONIC(push_rSP, "push rSP");
2278 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
2279 {
2280 IEM_MC_BEGIN(0, 1, IEM_MC_F_ONLY_8086, 0);
2281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2282 IEM_MC_LOCAL(uint16_t, u16Value);
2283 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
2284 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
2285 IEM_MC_PUSH_U16(u16Value);
2286 IEM_MC_ADVANCE_RIP_AND_FINISH();
2287 IEM_MC_END();
2288 }
2289 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
2290}
2291
2292
2293/**
2294 * @opcode 0x55
2295 */
2296FNIEMOP_DEF(iemOp_push_eBP)
2297{
2298 IEMOP_MNEMONIC(push_rBP, "push rBP");
2299 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
2300}
2301
2302
2303/**
2304 * @opcode 0x56
2305 */
2306FNIEMOP_DEF(iemOp_push_eSI)
2307{
2308 IEMOP_MNEMONIC(push_rSI, "push rSI");
2309 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
2310}
2311
2312
2313/**
2314 * @opcode 0x57
2315 */
2316FNIEMOP_DEF(iemOp_push_eDI)
2317{
2318 IEMOP_MNEMONIC(push_rDI, "push rDI");
2319 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
2320}
2321
2322
2323/**
2324 * Common 'pop register' helper.
2325 */
2326FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
2327{
2328 if (IEM_IS_64BIT_CODE(pVCpu))
2329 {
2330 iReg |= pVCpu->iem.s.uRexB;
2331 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2332 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2333 }
2334
2335 switch (pVCpu->iem.s.enmEffOpSize)
2336 {
2337 case IEMMODE_16BIT:
2338 IEM_MC_BEGIN(0, 1, 0, 0);
2339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2340 IEM_MC_LOCAL(uint16_t *, pu16Dst);
2341 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
2342 IEM_MC_POP_U16(pu16Dst);
2343 IEM_MC_ADVANCE_RIP_AND_FINISH();
2344 IEM_MC_END();
2345 break;
2346
2347 case IEMMODE_32BIT:
2348 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2350 IEM_MC_LOCAL(uint32_t *, pu32Dst);
2351 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
2352 IEM_MC_POP_U32(pu32Dst);
2353 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
2354 IEM_MC_ADVANCE_RIP_AND_FINISH();
2355 IEM_MC_END();
2356 break;
2357
2358 case IEMMODE_64BIT:
2359 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2361 IEM_MC_LOCAL(uint64_t *, pu64Dst);
2362 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
2363 IEM_MC_POP_U64(pu64Dst);
2364 IEM_MC_ADVANCE_RIP_AND_FINISH();
2365 IEM_MC_END();
2366 break;
2367
2368 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2369 }
2370}
2371
2372
2373/**
2374 * @opcode 0x58
2375 */
2376FNIEMOP_DEF(iemOp_pop_eAX)
2377{
2378 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
2379 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
2380}
2381
2382
2383/**
2384 * @opcode 0x59
2385 */
2386FNIEMOP_DEF(iemOp_pop_eCX)
2387{
2388 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
2389 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
2390}
2391
2392
2393/**
2394 * @opcode 0x5a
2395 */
2396FNIEMOP_DEF(iemOp_pop_eDX)
2397{
2398 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
2399 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
2400}
2401
2402
2403/**
2404 * @opcode 0x5b
2405 */
2406FNIEMOP_DEF(iemOp_pop_eBX)
2407{
2408 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
2409 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
2410}
2411
2412
2413/**
2414 * @opcode 0x5c
2415 */
2416FNIEMOP_DEF(iemOp_pop_eSP)
2417{
2418 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
2419 if (IEM_IS_64BIT_CODE(pVCpu))
2420 {
2421 if (pVCpu->iem.s.uRexB)
2422 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
2423 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2424 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2425 }
2426
2427 /** @todo add testcase for this instruction. */
2428 switch (pVCpu->iem.s.enmEffOpSize)
2429 {
2430 case IEMMODE_16BIT:
2431 IEM_MC_BEGIN(0, 1, 0, 0);
2432 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2433 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2434 IEM_MC_LOCAL(uint16_t, u16Dst);
2435 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
2436 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
2437 IEM_MC_ADVANCE_RIP_AND_FINISH();
2438 IEM_MC_END();
2439 break;
2440
2441 case IEMMODE_32BIT:
2442 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
2443 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2444 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2445 IEM_MC_LOCAL(uint32_t, u32Dst);
2446 IEM_MC_POP_U32(&u32Dst);
2447 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
2448 IEM_MC_ADVANCE_RIP_AND_FINISH();
2449 IEM_MC_END();
2450 break;
2451
2452 case IEMMODE_64BIT:
2453 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2454 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2455 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2456 IEM_MC_LOCAL(uint64_t, u64Dst);
2457 IEM_MC_POP_U64(&u64Dst);
2458 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
2459 IEM_MC_ADVANCE_RIP_AND_FINISH();
2460 IEM_MC_END();
2461 break;
2462
2463 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2464 }
2465}
2466
2467
2468/**
2469 * @opcode 0x5d
2470 */
2471FNIEMOP_DEF(iemOp_pop_eBP)
2472{
2473 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
2474 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
2475}
2476
2477
2478/**
2479 * @opcode 0x5e
2480 */
2481FNIEMOP_DEF(iemOp_pop_eSI)
2482{
2483 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
2484 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
2485}
2486
2487
2488/**
2489 * @opcode 0x5f
2490 */
2491FNIEMOP_DEF(iemOp_pop_eDI)
2492{
2493 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
2494 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
2495}
2496
2497
2498/**
2499 * @opcode 0x60
2500 */
2501FNIEMOP_DEF(iemOp_pusha)
2502{
2503 IEMOP_MNEMONIC(pusha, "pusha");
2504 IEMOP_HLP_MIN_186();
2505 IEMOP_HLP_NO_64BIT();
2506 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2507 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_pusha_16);
2508 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2509 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_pusha_32);
2510}
2511
2512
2513/**
2514 * @opcode 0x61
2515 */
2516FNIEMOP_DEF(iemOp_popa__mvex)
2517{
2518 if (!IEM_IS_64BIT_CODE(pVCpu))
2519 {
2520 IEMOP_MNEMONIC(popa, "popa");
2521 IEMOP_HLP_MIN_186();
2522 IEMOP_HLP_NO_64BIT();
2523 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2524 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_popa_16);
2525 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2526 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_popa_32);
2527 }
2528 IEMOP_MNEMONIC(mvex, "mvex");
2529 Log(("mvex prefix is not supported!\n"));
2530 IEMOP_RAISE_INVALID_OPCODE_RET();
2531}
2532
2533
2534/**
2535 * @opcode 0x62
2536 * @opmnemonic bound
2537 * @op1 Gv_RO
2538 * @op2 Ma
2539 * @opmincpu 80186
2540 * @ophints harmless x86_invalid_64
2541 * @optest op1=0 op2=0 ->
2542 * @optest op1=1 op2=0 -> value.xcpt=5
2543 * @optest o16 / op1=0xffff op2=0x0000fffe ->
2544 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
2545 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
2546 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
2547 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
2548 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
2549 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
2550 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
2551 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
2552 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
2553 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
2554 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
2555 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
2556 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
2557 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
2558 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
2559 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
2560 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
2561 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
2562 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
2563 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
2564 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
2565 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
2566 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
2567 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
2568 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
2569 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
2570 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
2571 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
2572 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
2573 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
2574 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
2575 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
2576 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
2577 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
2578 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
2579 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
2580 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
2581 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
2582 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
2583 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
2584 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
2585 */
2586FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
2587{
2588 /* The BOUND instruction is invalid 64-bit mode. In legacy and
2589 compatability mode it is invalid with MOD=3.
2590
2591 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
2592 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
2593 given as R and X without an exact description, so we assume it builds on
2594 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
2595 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
2596 uint8_t bRm;
2597 if (!IEM_IS_64BIT_CODE(pVCpu))
2598 {
2599 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2600 IEMOP_HLP_MIN_186();
2601 IEM_OPCODE_GET_NEXT_U8(&bRm);
2602 if (IEM_IS_MODRM_MEM_MODE(bRm))
2603 {
2604 /** @todo testcase: check that there are two memory accesses involved. Check
2605 * whether they're both read before the \#BR triggers. */
2606 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2607 {
2608 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186 | IEM_MC_F_NOT_64BIT, 0);
2609 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2610 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
2611 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
2612 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2613
2614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2616
2617 IEM_MC_FETCH_GREG_U16(u16Index, IEM_GET_MODRM_REG_8(bRm));
2618 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2619 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
2620
2621 IEM_MC_CALL_CIMPL_3(0, iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
2622 IEM_MC_END();
2623 }
2624 else /* 32-bit operands */
2625 {
2626 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2627 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2628 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
2629 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
2630 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2631
2632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2634
2635 IEM_MC_FETCH_GREG_U32(u32Index, IEM_GET_MODRM_REG_8(bRm));
2636 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2637 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
2638
2639 IEM_MC_CALL_CIMPL_3(0, iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
2640 IEM_MC_END();
2641 }
2642 }
2643
2644 /*
2645 * @opdone
2646 */
2647 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2648 {
2649 /* Note that there is no need for the CPU to fetch further bytes
2650 here because MODRM.MOD == 3. */
2651 Log(("evex not supported by the guest CPU!\n"));
2652 IEMOP_RAISE_INVALID_OPCODE_RET();
2653 }
2654 }
2655 else
2656 {
2657 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
2658 * does modr/m read, whereas AMD probably doesn't... */
2659 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2660 {
2661 Log(("evex not supported by the guest CPU!\n"));
2662 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
2663 }
2664 IEM_OPCODE_GET_NEXT_U8(&bRm);
2665 }
2666
2667 IEMOP_MNEMONIC(evex, "evex");
2668 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
2669 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
2670 Log(("evex prefix is not implemented!\n"));
2671 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2672}
2673
2674
2675/** Opcode 0x63 - non-64-bit modes. */
2676FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
2677{
2678 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
2679 IEMOP_HLP_MIN_286();
2680 IEMOP_HLP_NO_REAL_OR_V86_MODE();
2681 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2682
2683 if (IEM_IS_MODRM_REG_MODE(bRm))
2684 {
2685 /* Register */
2686 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2687 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2688 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2689 IEM_MC_ARG(uint16_t, u16Src, 1);
2690 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2691
2692 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2693 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM_8(bRm));
2694 IEM_MC_REF_EFLAGS(pEFlags);
2695 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2696
2697 IEM_MC_ADVANCE_RIP_AND_FINISH();
2698 IEM_MC_END();
2699 }
2700 else
2701 {
2702 /* Memory */
2703 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2704 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2705 IEM_MC_ARG(uint16_t, u16Src, 1);
2706 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2707 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2708 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
2709
2710 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2711 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2712 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2713 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2714 IEM_MC_FETCH_EFLAGS(EFlags);
2715 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2716
2717 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
2718 IEM_MC_COMMIT_EFLAGS(EFlags);
2719 IEM_MC_ADVANCE_RIP_AND_FINISH();
2720 IEM_MC_END();
2721 }
2722}
2723
2724
2725/**
2726 * @opcode 0x63
2727 *
2728 * @note This is a weird one. It works like a regular move instruction if
2729 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2730 * @todo This definitely needs a testcase to verify the odd cases. */
2731FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2732{
2733 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2734
2735 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2736 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2737
2738 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2739 {
2740 if (IEM_IS_MODRM_REG_MODE(bRm))
2741 {
2742 /*
2743 * Register to register.
2744 */
2745 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2747 IEM_MC_LOCAL(uint64_t, u64Value);
2748 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
2749 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2750 IEM_MC_ADVANCE_RIP_AND_FINISH();
2751 IEM_MC_END();
2752 }
2753 else
2754 {
2755 /*
2756 * We're loading a register from memory.
2757 */
2758 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
2759 IEM_MC_LOCAL(uint64_t, u64Value);
2760 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2761 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2763 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2764 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2765 IEM_MC_ADVANCE_RIP_AND_FINISH();
2766 IEM_MC_END();
2767 }
2768 }
2769 else
2770 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
2771}
2772
2773
2774/**
2775 * @opcode 0x64
2776 * @opmnemonic segfs
2777 * @opmincpu 80386
2778 * @opgroup og_prefixes
2779 */
2780FNIEMOP_DEF(iemOp_seg_FS)
2781{
2782 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2783 IEMOP_HLP_MIN_386();
2784
2785 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2786 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2787
2788 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2789 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2790}
2791
2792
2793/**
2794 * @opcode 0x65
2795 * @opmnemonic seggs
2796 * @opmincpu 80386
2797 * @opgroup og_prefixes
2798 */
2799FNIEMOP_DEF(iemOp_seg_GS)
2800{
2801 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2802 IEMOP_HLP_MIN_386();
2803
2804 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2805 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2806
2807 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2808 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2809}
2810
2811
2812/**
2813 * @opcode 0x66
2814 * @opmnemonic opsize
2815 * @openc prefix
2816 * @opmincpu 80386
2817 * @ophints harmless
2818 * @opgroup og_prefixes
2819 */
2820FNIEMOP_DEF(iemOp_op_size)
2821{
2822 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2823 IEMOP_HLP_MIN_386();
2824
2825 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2826 iemRecalEffOpSize(pVCpu);
2827
2828 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2829 when REPZ or REPNZ are present. */
2830 if (pVCpu->iem.s.idxPrefix == 0)
2831 pVCpu->iem.s.idxPrefix = 1;
2832
2833 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2834 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2835}
2836
2837
2838/**
2839 * @opcode 0x67
2840 * @opmnemonic addrsize
2841 * @openc prefix
2842 * @opmincpu 80386
2843 * @ophints harmless
2844 * @opgroup og_prefixes
2845 */
2846FNIEMOP_DEF(iemOp_addr_size)
2847{
2848 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2849 IEMOP_HLP_MIN_386();
2850
2851 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2852 switch (pVCpu->iem.s.enmDefAddrMode)
2853 {
2854 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2855 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2856 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2857 default: AssertFailed();
2858 }
2859
2860 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2861 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2862}
2863
2864
2865/**
2866 * @opcode 0x68
2867 */
2868FNIEMOP_DEF(iemOp_push_Iz)
2869{
2870 IEMOP_MNEMONIC(push_Iz, "push Iz");
2871 IEMOP_HLP_MIN_186();
2872 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2873 switch (pVCpu->iem.s.enmEffOpSize)
2874 {
2875 case IEMMODE_16BIT:
2876 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_186, 0);
2877 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2879 IEM_MC_PUSH_U16(u16Imm);
2880 IEM_MC_ADVANCE_RIP_AND_FINISH();
2881 IEM_MC_END();
2882 break;
2883
2884 case IEMMODE_32BIT:
2885 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2886 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2888 IEM_MC_PUSH_U32(u32Imm);
2889 IEM_MC_ADVANCE_RIP_AND_FINISH();
2890 IEM_MC_END();
2891 break;
2892
2893 case IEMMODE_64BIT:
2894 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
2895 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2897 IEM_MC_PUSH_U64(u64Imm);
2898 IEM_MC_ADVANCE_RIP_AND_FINISH();
2899 IEM_MC_END();
2900 break;
2901
2902 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2903 }
2904}
2905
2906
2907/**
2908 * @opcode 0x69
2909 */
2910FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
2911{
2912 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
2913 IEMOP_HLP_MIN_186();
2914 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2915 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2916
2917 switch (pVCpu->iem.s.enmEffOpSize)
2918 {
2919 case IEMMODE_16BIT:
2920 {
2921 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
2922 if (IEM_IS_MODRM_REG_MODE(bRm))
2923 {
2924 /* register operand */
2925 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2926 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186, 0);
2927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2928 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2929 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
2930 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2931 IEM_MC_LOCAL(uint16_t, u16Tmp);
2932
2933 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2934 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2935 IEM_MC_REF_EFLAGS(pEFlags);
2936 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2937 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2938
2939 IEM_MC_ADVANCE_RIP_AND_FINISH();
2940 IEM_MC_END();
2941 }
2942 else
2943 {
2944 /* memory operand */
2945 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186, 0);
2946 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2947 IEM_MC_ARG(uint16_t, u16Src, 1);
2948 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2949 IEM_MC_LOCAL(uint16_t, u16Tmp);
2950 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2951
2952 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2953 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2954 IEM_MC_ASSIGN(u16Src, u16Imm);
2955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2956 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2957 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2958 IEM_MC_REF_EFLAGS(pEFlags);
2959 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2960 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2961
2962 IEM_MC_ADVANCE_RIP_AND_FINISH();
2963 IEM_MC_END();
2964 }
2965 break;
2966 }
2967
2968 case IEMMODE_32BIT:
2969 {
2970 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
2971 if (IEM_IS_MODRM_REG_MODE(bRm))
2972 {
2973 /* register operand */
2974 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2975 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
2976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2977 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2978 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
2979 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2980 IEM_MC_LOCAL(uint32_t, u32Tmp);
2981
2982 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2983 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2984 IEM_MC_REF_EFLAGS(pEFlags);
2985 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2986 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2987
2988 IEM_MC_ADVANCE_RIP_AND_FINISH();
2989 IEM_MC_END();
2990 }
2991 else
2992 {
2993 /* memory operand */
2994 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0);
2995 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2996 IEM_MC_ARG(uint32_t, u32Src, 1);
2997 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2998 IEM_MC_LOCAL(uint32_t, u32Tmp);
2999 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3000
3001 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3002 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3003 IEM_MC_ASSIGN(u32Src, u32Imm);
3004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3005 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3006 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
3007 IEM_MC_REF_EFLAGS(pEFlags);
3008 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3009 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3010
3011 IEM_MC_ADVANCE_RIP_AND_FINISH();
3012 IEM_MC_END();
3013 }
3014 break;
3015 }
3016
3017 case IEMMODE_64BIT:
3018 {
3019 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3020 if (IEM_IS_MODRM_REG_MODE(bRm))
3021 {
3022 /* register operand */
3023 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3024 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
3025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3026 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3027 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
3028 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3029 IEM_MC_LOCAL(uint64_t, u64Tmp);
3030
3031 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3032 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3033 IEM_MC_REF_EFLAGS(pEFlags);
3034 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3035 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3036
3037 IEM_MC_ADVANCE_RIP_AND_FINISH();
3038 IEM_MC_END();
3039 }
3040 else
3041 {
3042 /* memory operand */
3043 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3044 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3045 IEM_MC_ARG(uint64_t, u64Src, 1);
3046 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3047 IEM_MC_LOCAL(uint64_t, u64Tmp);
3048 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3049
3050 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3051 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); /* Not using IEM_OPCODE_GET_NEXT_S32_SX_U64 to reduce the */
3052 IEM_MC_ASSIGN_U32_SX_U64(u64Src, u32Imm); /* parameter count for the threaded function for this block. */
3053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3054 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3055 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3056 IEM_MC_REF_EFLAGS(pEFlags);
3057 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3058 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3059
3060 IEM_MC_ADVANCE_RIP_AND_FINISH();
3061 IEM_MC_END();
3062 }
3063 break;
3064 }
3065
3066 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3067 }
3068}
3069
3070
3071/**
3072 * @opcode 0x6a
3073 */
3074FNIEMOP_DEF(iemOp_push_Ib)
3075{
3076 IEMOP_MNEMONIC(push_Ib, "push Ib");
3077 IEMOP_HLP_MIN_186();
3078 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3079 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3080
3081 switch (pVCpu->iem.s.enmEffOpSize)
3082 {
3083 case IEMMODE_16BIT:
3084 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_186, 0);
3085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3086 IEM_MC_PUSH_U16(i8Imm);
3087 IEM_MC_ADVANCE_RIP_AND_FINISH();
3088 IEM_MC_END();
3089 break;
3090 case IEMMODE_32BIT:
3091 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
3092 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3093 IEM_MC_PUSH_U32(i8Imm);
3094 IEM_MC_ADVANCE_RIP_AND_FINISH();
3095 IEM_MC_END();
3096 break;
3097 case IEMMODE_64BIT:
3098 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
3099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3100 IEM_MC_PUSH_U64(i8Imm);
3101 IEM_MC_ADVANCE_RIP_AND_FINISH();
3102 IEM_MC_END();
3103 break;
3104 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3105 }
3106}
3107
3108
3109/**
3110 * @opcode 0x6b
3111 */
3112FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
3113{
3114 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
3115 IEMOP_HLP_MIN_186();
3116 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3117 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3118
3119 switch (pVCpu->iem.s.enmEffOpSize)
3120 {
3121 case IEMMODE_16BIT:
3122 {
3123 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
3124 if (IEM_IS_MODRM_REG_MODE(bRm))
3125 {
3126 /* register operand */
3127 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186, 0);
3128 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3130 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3131 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
3132 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3133 IEM_MC_LOCAL(uint16_t, u16Tmp);
3134
3135 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3136 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
3137 IEM_MC_REF_EFLAGS(pEFlags);
3138 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3139 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3140
3141 IEM_MC_ADVANCE_RIP_AND_FINISH();
3142 IEM_MC_END();
3143 }
3144 else
3145 {
3146 /* memory operand */
3147 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186, 0);
3148 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3149 IEM_MC_ARG(uint16_t, u16Src, 1);
3150 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3151 IEM_MC_LOCAL(uint16_t, u16Tmp);
3152 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3153
3154 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3155 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
3156 IEM_MC_ASSIGN(u16Src, u16Imm);
3157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3158 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3159 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
3160 IEM_MC_REF_EFLAGS(pEFlags);
3161 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3162 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3163
3164 IEM_MC_ADVANCE_RIP_AND_FINISH();
3165 IEM_MC_END();
3166 }
3167 break;
3168 }
3169
3170 case IEMMODE_32BIT:
3171 {
3172 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
3173 if (IEM_IS_MODRM_REG_MODE(bRm))
3174 {
3175 /* register operand */
3176 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3177 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
3178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3179 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3180 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
3181 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3182 IEM_MC_LOCAL(uint32_t, u32Tmp);
3183
3184 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3185 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
3186 IEM_MC_REF_EFLAGS(pEFlags);
3187 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3188 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3189
3190 IEM_MC_ADVANCE_RIP_AND_FINISH();
3191 IEM_MC_END();
3192 }
3193 else
3194 {
3195 /* memory operand */
3196 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0);
3197 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3198 IEM_MC_ARG(uint32_t, u32Src, 1);
3199 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3200 IEM_MC_LOCAL(uint32_t, u32Tmp);
3201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3202
3203 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3204 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
3205 IEM_MC_ASSIGN(u32Src, u32Imm);
3206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3207 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3208 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
3209 IEM_MC_REF_EFLAGS(pEFlags);
3210 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3211 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3212
3213 IEM_MC_ADVANCE_RIP_AND_FINISH();
3214 IEM_MC_END();
3215 }
3216 break;
3217 }
3218
3219 case IEMMODE_64BIT:
3220 {
3221 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3222 if (IEM_IS_MODRM_REG_MODE(bRm))
3223 {
3224 /* register operand */
3225 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3226 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
3227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3228 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3229 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
3230 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3231 IEM_MC_LOCAL(uint64_t, u64Tmp);
3232
3233 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3234 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3235 IEM_MC_REF_EFLAGS(pEFlags);
3236 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3237 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3238
3239 IEM_MC_ADVANCE_RIP_AND_FINISH();
3240 IEM_MC_END();
3241 }
3242 else
3243 {
3244 /* memory operand */
3245 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3246 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3247 IEM_MC_ARG(uint64_t, u64Src, 1);
3248 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3249 IEM_MC_LOCAL(uint64_t, u64Tmp);
3250 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3251
3252 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3253 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); /* Not using IEM_OPCODE_GET_NEXT_S8_SX_U64 to reduce the */
3254 IEM_MC_ASSIGN_U8_SX_U64(u64Src, u8Imm); /* parameter count for the threaded function for this block. */
3255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3256 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3257 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3258 IEM_MC_REF_EFLAGS(pEFlags);
3259 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3260 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3261
3262 IEM_MC_ADVANCE_RIP_AND_FINISH();
3263 IEM_MC_END();
3264 }
3265 break;
3266 }
3267
3268 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3269 }
3270}
3271
3272
3273/**
3274 * @opcode 0x6c
3275 */
3276FNIEMOP_DEF(iemOp_insb_Yb_DX)
3277{
3278 IEMOP_HLP_MIN_186();
3279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3280 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3281 {
3282 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
3283 switch (pVCpu->iem.s.enmEffAddrMode)
3284 {
3285 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3286 iemCImpl_rep_ins_op8_addr16, false);
3287 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3288 iemCImpl_rep_ins_op8_addr32, false);
3289 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3290 iemCImpl_rep_ins_op8_addr64, false);
3291 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3292 }
3293 }
3294 else
3295 {
3296 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
3297 switch (pVCpu->iem.s.enmEffAddrMode)
3298 {
3299 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3300 iemCImpl_ins_op8_addr16, false);
3301 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3302 iemCImpl_ins_op8_addr32, false);
3303 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3304 iemCImpl_ins_op8_addr64, false);
3305 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3306 }
3307 }
3308}
3309
3310
3311/**
3312 * @opcode 0x6d
3313 */
3314FNIEMOP_DEF(iemOp_inswd_Yv_DX)
3315{
3316 IEMOP_HLP_MIN_186();
3317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3318 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3319 {
3320 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
3321 switch (pVCpu->iem.s.enmEffOpSize)
3322 {
3323 case IEMMODE_16BIT:
3324 switch (pVCpu->iem.s.enmEffAddrMode)
3325 {
3326 case IEMMODE_16BIT:
3327 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3328 iemCImpl_rep_ins_op16_addr16, false);
3329 case IEMMODE_32BIT:
3330 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3331 iemCImpl_rep_ins_op16_addr32, false);
3332 case IEMMODE_64BIT:
3333 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3334 iemCImpl_rep_ins_op16_addr64, false);
3335 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3336 }
3337 break;
3338 case IEMMODE_64BIT:
3339 case IEMMODE_32BIT:
3340 switch (pVCpu->iem.s.enmEffAddrMode)
3341 {
3342 case IEMMODE_16BIT:
3343 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3344 iemCImpl_rep_ins_op32_addr16, false);
3345 case IEMMODE_32BIT:
3346 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3347 iemCImpl_rep_ins_op32_addr32, false);
3348 case IEMMODE_64BIT:
3349 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3350 iemCImpl_rep_ins_op32_addr64, false);
3351 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3352 }
3353 break;
3354 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3355 }
3356 }
3357 else
3358 {
3359 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
3360 switch (pVCpu->iem.s.enmEffOpSize)
3361 {
3362 case IEMMODE_16BIT:
3363 switch (pVCpu->iem.s.enmEffAddrMode)
3364 {
3365 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3366 iemCImpl_ins_op16_addr16, false);
3367 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3368 iemCImpl_ins_op16_addr32, false);
3369 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3370 iemCImpl_ins_op16_addr64, false);
3371 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3372 }
3373 break;
3374 case IEMMODE_64BIT:
3375 case IEMMODE_32BIT:
3376 switch (pVCpu->iem.s.enmEffAddrMode)
3377 {
3378 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3379 iemCImpl_ins_op32_addr16, false);
3380 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3381 iemCImpl_ins_op32_addr32, false);
3382 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3383 iemCImpl_ins_op32_addr64, false);
3384 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3385 }
3386 break;
3387 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3388 }
3389 }
3390}
3391
3392
3393/**
3394 * @opcode 0x6e
3395 */
3396FNIEMOP_DEF(iemOp_outsb_Yb_DX)
3397{
3398 IEMOP_HLP_MIN_186();
3399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3400 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3401 {
3402 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
3403 switch (pVCpu->iem.s.enmEffAddrMode)
3404 {
3405 case IEMMODE_16BIT:
3406 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3407 iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3408 case IEMMODE_32BIT:
3409 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3410 iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3411 case IEMMODE_64BIT:
3412 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3413 iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3414 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3415 }
3416 }
3417 else
3418 {
3419 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
3420 switch (pVCpu->iem.s.enmEffAddrMode)
3421 {
3422 case IEMMODE_16BIT:
3423 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3424 iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3425 case IEMMODE_32BIT:
3426 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3427 iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3428 case IEMMODE_64BIT:
3429 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3430 iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3431 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3432 }
3433 }
3434}
3435
3436
3437/**
3438 * @opcode 0x6f
3439 */
3440FNIEMOP_DEF(iemOp_outswd_Yv_DX)
3441{
3442 IEMOP_HLP_MIN_186();
3443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3444 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3445 {
3446 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
3447 switch (pVCpu->iem.s.enmEffOpSize)
3448 {
3449 case IEMMODE_16BIT:
3450 switch (pVCpu->iem.s.enmEffAddrMode)
3451 {
3452 case IEMMODE_16BIT:
3453 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3454 iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3455 case IEMMODE_32BIT:
3456 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3457 iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3458 case IEMMODE_64BIT:
3459 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3460 iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3461 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3462 }
3463 break;
3464 case IEMMODE_64BIT:
3465 case IEMMODE_32BIT:
3466 switch (pVCpu->iem.s.enmEffAddrMode)
3467 {
3468 case IEMMODE_16BIT:
3469 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3470 iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3471 case IEMMODE_32BIT:
3472 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3473 iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3474 case IEMMODE_64BIT:
3475 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3476 iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3477 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3478 }
3479 break;
3480 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3481 }
3482 }
3483 else
3484 {
3485 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
3486 switch (pVCpu->iem.s.enmEffOpSize)
3487 {
3488 case IEMMODE_16BIT:
3489 switch (pVCpu->iem.s.enmEffAddrMode)
3490 {
3491 case IEMMODE_16BIT:
3492 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3493 iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3494 case IEMMODE_32BIT:
3495 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3496 iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3497 case IEMMODE_64BIT:
3498 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3499 iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3500 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3501 }
3502 break;
3503 case IEMMODE_64BIT:
3504 case IEMMODE_32BIT:
3505 switch (pVCpu->iem.s.enmEffAddrMode)
3506 {
3507 case IEMMODE_16BIT:
3508 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3509 iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3510 case IEMMODE_32BIT:
3511 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3512 iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3513 case IEMMODE_64BIT:
3514 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3515 iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3516 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3517 }
3518 break;
3519 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3520 }
3521 }
3522}
3523
3524
3525/**
3526 * @opcode 0x70
3527 */
3528FNIEMOP_DEF(iemOp_jo_Jb)
3529{
3530 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
3531 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3532 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3533
3534 IEM_MC_BEGIN(0, 0, 0, 0);
3535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3536 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3537 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3538 } IEM_MC_ELSE() {
3539 IEM_MC_ADVANCE_RIP_AND_FINISH();
3540 } IEM_MC_ENDIF();
3541 IEM_MC_END();
3542}
3543
3544
3545/**
3546 * @opcode 0x71
3547 */
3548FNIEMOP_DEF(iemOp_jno_Jb)
3549{
3550 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
3551 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3552 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3553
3554 IEM_MC_BEGIN(0, 0, 0, 0);
3555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3556 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3557 IEM_MC_ADVANCE_RIP_AND_FINISH();
3558 } IEM_MC_ELSE() {
3559 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3560 } IEM_MC_ENDIF();
3561 IEM_MC_END();
3562}
3563
3564/**
3565 * @opcode 0x72
3566 */
3567FNIEMOP_DEF(iemOp_jc_Jb)
3568{
3569 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
3570 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3571 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3572
3573 IEM_MC_BEGIN(0, 0, 0, 0);
3574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3575 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3576 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3577 } IEM_MC_ELSE() {
3578 IEM_MC_ADVANCE_RIP_AND_FINISH();
3579 } IEM_MC_ENDIF();
3580 IEM_MC_END();
3581}
3582
3583
3584/**
3585 * @opcode 0x73
3586 */
3587FNIEMOP_DEF(iemOp_jnc_Jb)
3588{
3589 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
3590 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3591 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3592
3593 IEM_MC_BEGIN(0, 0, 0, 0);
3594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3595 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3596 IEM_MC_ADVANCE_RIP_AND_FINISH();
3597 } IEM_MC_ELSE() {
3598 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3599 } IEM_MC_ENDIF();
3600 IEM_MC_END();
3601}
3602
3603
3604/**
3605 * @opcode 0x74
3606 */
3607FNIEMOP_DEF(iemOp_je_Jb)
3608{
3609 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
3610 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3611 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3612
3613 IEM_MC_BEGIN(0, 0, 0, 0);
3614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3615 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3616 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3617 } IEM_MC_ELSE() {
3618 IEM_MC_ADVANCE_RIP_AND_FINISH();
3619 } IEM_MC_ENDIF();
3620 IEM_MC_END();
3621}
3622
3623
3624/**
3625 * @opcode 0x75
3626 */
3627FNIEMOP_DEF(iemOp_jne_Jb)
3628{
3629 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
3630 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3631 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3632
3633 IEM_MC_BEGIN(0, 0, 0, 0);
3634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3635 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3636 IEM_MC_ADVANCE_RIP_AND_FINISH();
3637 } IEM_MC_ELSE() {
3638 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3639 } IEM_MC_ENDIF();
3640 IEM_MC_END();
3641}
3642
3643
3644/**
3645 * @opcode 0x76
3646 */
3647FNIEMOP_DEF(iemOp_jbe_Jb)
3648{
3649 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
3650 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3651 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3652
3653 IEM_MC_BEGIN(0, 0, 0, 0);
3654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3655 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3656 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3657 } IEM_MC_ELSE() {
3658 IEM_MC_ADVANCE_RIP_AND_FINISH();
3659 } IEM_MC_ENDIF();
3660 IEM_MC_END();
3661}
3662
3663
3664/**
3665 * @opcode 0x77
3666 */
3667FNIEMOP_DEF(iemOp_jnbe_Jb)
3668{
3669 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
3670 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3671 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3672
3673 IEM_MC_BEGIN(0, 0, 0, 0);
3674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3675 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3676 IEM_MC_ADVANCE_RIP_AND_FINISH();
3677 } IEM_MC_ELSE() {
3678 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3679 } IEM_MC_ENDIF();
3680 IEM_MC_END();
3681}
3682
3683
3684/**
3685 * @opcode 0x78
3686 */
3687FNIEMOP_DEF(iemOp_js_Jb)
3688{
3689 IEMOP_MNEMONIC(js_Jb, "js Jb");
3690 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3691 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3692
3693 IEM_MC_BEGIN(0, 0, 0, 0);
3694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3695 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3696 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3697 } IEM_MC_ELSE() {
3698 IEM_MC_ADVANCE_RIP_AND_FINISH();
3699 } IEM_MC_ENDIF();
3700 IEM_MC_END();
3701}
3702
3703
3704/**
3705 * @opcode 0x79
3706 */
3707FNIEMOP_DEF(iemOp_jns_Jb)
3708{
3709 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
3710 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3711 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3712
3713 IEM_MC_BEGIN(0, 0, 0, 0);
3714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3715 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3716 IEM_MC_ADVANCE_RIP_AND_FINISH();
3717 } IEM_MC_ELSE() {
3718 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3719 } IEM_MC_ENDIF();
3720 IEM_MC_END();
3721}
3722
3723
3724/**
3725 * @opcode 0x7a
3726 */
3727FNIEMOP_DEF(iemOp_jp_Jb)
3728{
3729 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
3730 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3731 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3732
3733 IEM_MC_BEGIN(0, 0, 0, 0);
3734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3735 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3736 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3737 } IEM_MC_ELSE() {
3738 IEM_MC_ADVANCE_RIP_AND_FINISH();
3739 } IEM_MC_ENDIF();
3740 IEM_MC_END();
3741}
3742
3743
3744/**
3745 * @opcode 0x7b
3746 */
3747FNIEMOP_DEF(iemOp_jnp_Jb)
3748{
3749 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
3750 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3751 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3752
3753 IEM_MC_BEGIN(0, 0, 0, 0);
3754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3755 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3756 IEM_MC_ADVANCE_RIP_AND_FINISH();
3757 } IEM_MC_ELSE() {
3758 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3759 } IEM_MC_ENDIF();
3760 IEM_MC_END();
3761}
3762
3763
3764/**
3765 * @opcode 0x7c
3766 */
3767FNIEMOP_DEF(iemOp_jl_Jb)
3768{
3769 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
3770 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3771 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3772
3773 IEM_MC_BEGIN(0, 0, 0, 0);
3774 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3775 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3776 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3777 } IEM_MC_ELSE() {
3778 IEM_MC_ADVANCE_RIP_AND_FINISH();
3779 } IEM_MC_ENDIF();
3780 IEM_MC_END();
3781}
3782
3783
3784/**
3785 * @opcode 0x7d
3786 */
3787FNIEMOP_DEF(iemOp_jnl_Jb)
3788{
3789 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
3790 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3791 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3792
3793 IEM_MC_BEGIN(0, 0, 0, 0);
3794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3795 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3796 IEM_MC_ADVANCE_RIP_AND_FINISH();
3797 } IEM_MC_ELSE() {
3798 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3799 } IEM_MC_ENDIF();
3800 IEM_MC_END();
3801}
3802
3803
3804/**
3805 * @opcode 0x7e
3806 */
3807FNIEMOP_DEF(iemOp_jle_Jb)
3808{
3809 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
3810 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3811 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3812
3813 IEM_MC_BEGIN(0, 0, 0, 0);
3814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3815 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3816 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3817 } IEM_MC_ELSE() {
3818 IEM_MC_ADVANCE_RIP_AND_FINISH();
3819 } IEM_MC_ENDIF();
3820 IEM_MC_END();
3821}
3822
3823
3824/**
3825 * @opcode 0x7f
3826 */
3827FNIEMOP_DEF(iemOp_jnle_Jb)
3828{
3829 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
3830 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3831 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3832
3833 IEM_MC_BEGIN(0, 0, 0, 0);
3834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3835 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3836 IEM_MC_ADVANCE_RIP_AND_FINISH();
3837 } IEM_MC_ELSE() {
3838 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3839 } IEM_MC_ENDIF();
3840 IEM_MC_END();
3841}
3842
3843
3844/**
3845 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
3846 * iemOp_Grp1_Eb_Ib_80.
3847 */
3848#define IEMOP_BODY_BINARY_Eb_Ib_RW(a_fnNormalU8) \
3849 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3850 { \
3851 /* register target */ \
3852 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3853 IEM_MC_BEGIN(3, 0, 0, 0); \
3854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3855 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3856 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3857 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3858 \
3859 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3860 IEM_MC_REF_EFLAGS(pEFlags); \
3861 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3862 \
3863 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3864 IEM_MC_END(); \
3865 } \
3866 else \
3867 { \
3868 /* memory target */ \
3869 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
3870 { \
3871 IEM_MC_BEGIN(3, 3, 0, 0); \
3872 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3873 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3874 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3875 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3876 \
3877 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3878 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3879 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3880 IEMOP_HLP_DONE_DECODING(); \
3881 \
3882 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3883 IEM_MC_FETCH_EFLAGS(EFlags); \
3884 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3885 \
3886 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
3887 IEM_MC_COMMIT_EFLAGS(EFlags); \
3888 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3889 IEM_MC_END(); \
3890 } \
3891 else \
3892 { \
3893 (void)0
3894
3895#define IEMOP_BODY_BINARY_Eb_Ib_LOCKED(a_fnLockedU8) \
3896 IEM_MC_BEGIN(3, 3, 0, 0); \
3897 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3898 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3899 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3900 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3901 \
3902 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3903 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3904 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3905 IEMOP_HLP_DONE_DECODING(); \
3906 \
3907 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3908 IEM_MC_FETCH_EFLAGS(EFlags); \
3909 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
3910 \
3911 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
3912 IEM_MC_COMMIT_EFLAGS(EFlags); \
3913 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3914 IEM_MC_END(); \
3915 } \
3916 } \
3917 (void)0
3918
3919#define IEMOP_BODY_BINARY_Eb_Ib_RO(a_fnNormalU8) \
3920 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3921 { \
3922 /* register target */ \
3923 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3924 IEM_MC_BEGIN(3, 0, 0, 0); \
3925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3926 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3927 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3928 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3929 \
3930 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3931 IEM_MC_REF_EFLAGS(pEFlags); \
3932 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3933 \
3934 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3935 IEM_MC_END(); \
3936 } \
3937 else \
3938 { \
3939 /* memory target */ \
3940 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
3941 { \
3942 IEM_MC_BEGIN(3, 3, 0, 0); \
3943 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
3944 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3945 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3946 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3947 \
3948 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3949 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3950 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3951 IEMOP_HLP_DONE_DECODING(); \
3952 \
3953 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3954 IEM_MC_FETCH_EFLAGS(EFlags); \
3955 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3956 \
3957 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu8Dst, bUnmapInfo); \
3958 IEM_MC_COMMIT_EFLAGS(EFlags); \
3959 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3960 IEM_MC_END(); \
3961 } \
3962 else \
3963 { \
3964 (void)0
3965
3966#define IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK() \
3967 IEMOP_HLP_DONE_DECODING(); \
3968 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
3969 } \
3970 } \
3971 (void)0
3972
3973
3974
3975/**
3976 * @opmaps grp1_80,grp1_83
3977 * @opcode /0
3978 */
3979FNIEMOP_DEF_1(iemOp_Grp1_add_Eb_Ib, uint8_t, bRm)
3980{
3981 IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib");
3982 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_add_u8);
3983 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_add_u8_locked);
3984}
3985
3986
3987/**
3988 * @opmaps grp1_80,grp1_83
3989 * @opcode /1
3990 */
3991FNIEMOP_DEF_1(iemOp_Grp1_or_Eb_Ib, uint8_t, bRm)
3992{
3993 IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib");
3994 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_or_u8);
3995 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_or_u8_locked);
3996}
3997
3998
3999/**
4000 * @opmaps grp1_80,grp1_83
4001 * @opcode /2
4002 */
4003FNIEMOP_DEF_1(iemOp_Grp1_adc_Eb_Ib, uint8_t, bRm)
4004{
4005 IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib");
4006 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_adc_u8);
4007 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_adc_u8_locked);
4008}
4009
4010
4011/**
4012 * @opmaps grp1_80,grp1_83
4013 * @opcode /3
4014 */
4015FNIEMOP_DEF_1(iemOp_Grp1_sbb_Eb_Ib, uint8_t, bRm)
4016{
4017 IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib");
4018 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sbb_u8);
4019 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sbb_u8_locked);
4020}
4021
4022
4023/**
4024 * @opmaps grp1_80,grp1_83
4025 * @opcode /4
4026 */
4027FNIEMOP_DEF_1(iemOp_Grp1_and_Eb_Ib, uint8_t, bRm)
4028{
4029 IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib");
4030 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_and_u8);
4031 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_and_u8_locked);
4032}
4033
4034
4035/**
4036 * @opmaps grp1_80,grp1_83
4037 * @opcode /5
4038 */
4039FNIEMOP_DEF_1(iemOp_Grp1_sub_Eb_Ib, uint8_t, bRm)
4040{
4041 IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib");
4042 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sub_u8);
4043 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sub_u8_locked);
4044}
4045
4046
4047/**
4048 * @opmaps grp1_80,grp1_83
4049 * @opcode /6
4050 */
4051FNIEMOP_DEF_1(iemOp_Grp1_xor_Eb_Ib, uint8_t, bRm)
4052{
4053 IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib");
4054 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_xor_u8);
4055 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_xor_u8_locked);
4056}
4057
4058
4059/**
4060 * @opmaps grp1_80,grp1_83
4061 * @opcode /7
4062 */
4063FNIEMOP_DEF_1(iemOp_Grp1_cmp_Eb_Ib, uint8_t, bRm)
4064{
4065 IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib");
4066 IEMOP_BODY_BINARY_Eb_Ib_RO(iemAImpl_cmp_u8);
4067 IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK();
4068}
4069
4070
4071/**
4072 * @opcode 0x80
4073 */
4074FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
4075{
4076 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4077 switch (IEM_GET_MODRM_REG_8(bRm))
4078 {
4079 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Eb_Ib, bRm);
4080 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Eb_Ib, bRm);
4081 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Eb_Ib, bRm);
4082 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Eb_Ib, bRm);
4083 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Eb_Ib, bRm);
4084 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Eb_Ib, bRm);
4085 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Eb_Ib, bRm);
4086 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Eb_Ib, bRm);
4087 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4088 }
4089}
4090
4091
4092/**
4093 * Body for a group 1 binary operator.
4094 */
4095#define IEMOP_BODY_BINARY_Ev_Iz_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4096 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4097 { \
4098 /* register target */ \
4099 switch (pVCpu->iem.s.enmEffOpSize) \
4100 { \
4101 case IEMMODE_16BIT: \
4102 { \
4103 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4104 IEM_MC_BEGIN(3, 0, 0, 0); \
4105 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4106 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4107 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4108 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4109 \
4110 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4111 IEM_MC_REF_EFLAGS(pEFlags); \
4112 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4113 \
4114 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4115 IEM_MC_END(); \
4116 break; \
4117 } \
4118 \
4119 case IEMMODE_32BIT: \
4120 { \
4121 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4122 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4124 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4125 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4126 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4127 \
4128 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4129 IEM_MC_REF_EFLAGS(pEFlags); \
4130 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4131 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
4132 \
4133 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4134 IEM_MC_END(); \
4135 break; \
4136 } \
4137 \
4138 case IEMMODE_64BIT: \
4139 { \
4140 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4141 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4143 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4144 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4145 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4146 \
4147 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4148 IEM_MC_REF_EFLAGS(pEFlags); \
4149 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4150 \
4151 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4152 IEM_MC_END(); \
4153 break; \
4154 } \
4155 \
4156 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4157 } \
4158 } \
4159 else \
4160 { \
4161 /* memory target */ \
4162 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4163 { \
4164 switch (pVCpu->iem.s.enmEffOpSize) \
4165 { \
4166 case IEMMODE_16BIT: \
4167 { \
4168 IEM_MC_BEGIN(3, 3, 0, 0); \
4169 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4170 IEM_MC_ARG(uint16_t, u16Src, 1); \
4171 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4172 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4173 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4174 \
4175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4176 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4177 IEM_MC_ASSIGN(u16Src, u16Imm); \
4178 IEMOP_HLP_DONE_DECODING(); \
4179 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4180 IEM_MC_FETCH_EFLAGS(EFlags); \
4181 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4182 \
4183 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4184 IEM_MC_COMMIT_EFLAGS(EFlags); \
4185 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4186 IEM_MC_END(); \
4187 break; \
4188 } \
4189 \
4190 case IEMMODE_32BIT: \
4191 { \
4192 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4193 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4194 IEM_MC_ARG(uint32_t, u32Src, 1); \
4195 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4196 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4197 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4198 \
4199 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4200 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4201 IEM_MC_ASSIGN(u32Src, u32Imm); \
4202 IEMOP_HLP_DONE_DECODING(); \
4203 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4204 IEM_MC_FETCH_EFLAGS(EFlags); \
4205 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4206 \
4207 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4208 IEM_MC_COMMIT_EFLAGS(EFlags); \
4209 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4210 IEM_MC_END(); \
4211 break; \
4212 } \
4213 \
4214 case IEMMODE_64BIT: \
4215 { \
4216 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4217 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4218 IEM_MC_ARG(uint64_t, u64Src, 1); \
4219 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4220 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4221 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4222 \
4223 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4224 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4225 IEMOP_HLP_DONE_DECODING(); \
4226 IEM_MC_ASSIGN(u64Src, u64Imm); \
4227 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4228 IEM_MC_FETCH_EFLAGS(EFlags); \
4229 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4230 \
4231 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4232 IEM_MC_COMMIT_EFLAGS(EFlags); \
4233 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4234 IEM_MC_END(); \
4235 break; \
4236 } \
4237 \
4238 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4239 } \
4240 } \
4241 else \
4242 { \
4243 (void)0
4244/* This must be a separate macro due to parsing restrictions in IEMAllInstPython.py. */
4245#define IEMOP_BODY_BINARY_Ev_Iz_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4246 switch (pVCpu->iem.s.enmEffOpSize) \
4247 { \
4248 case IEMMODE_16BIT: \
4249 { \
4250 IEM_MC_BEGIN(3, 3, 0, 0); \
4251 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4252 IEM_MC_ARG(uint16_t, u16Src, 1); \
4253 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4254 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4255 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4256 \
4257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4258 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4259 IEM_MC_ASSIGN(u16Src, u16Imm); \
4260 IEMOP_HLP_DONE_DECODING(); \
4261 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4262 IEM_MC_FETCH_EFLAGS(EFlags); \
4263 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4264 \
4265 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4266 IEM_MC_COMMIT_EFLAGS(EFlags); \
4267 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4268 IEM_MC_END(); \
4269 break; \
4270 } \
4271 \
4272 case IEMMODE_32BIT: \
4273 { \
4274 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4275 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4276 IEM_MC_ARG(uint32_t, u32Src, 1); \
4277 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4279 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4280 \
4281 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4282 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4283 IEM_MC_ASSIGN(u32Src, u32Imm); \
4284 IEMOP_HLP_DONE_DECODING(); \
4285 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4286 IEM_MC_FETCH_EFLAGS(EFlags); \
4287 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4288 \
4289 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4290 IEM_MC_COMMIT_EFLAGS(EFlags); \
4291 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4292 IEM_MC_END(); \
4293 break; \
4294 } \
4295 \
4296 case IEMMODE_64BIT: \
4297 { \
4298 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4299 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4300 IEM_MC_ARG(uint64_t, u64Src, 1); \
4301 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4302 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4303 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4304 \
4305 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4306 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4307 IEMOP_HLP_DONE_DECODING(); \
4308 IEM_MC_ASSIGN(u64Src, u64Imm); \
4309 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4310 IEM_MC_FETCH_EFLAGS(EFlags); \
4311 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4312 \
4313 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4314 IEM_MC_COMMIT_EFLAGS(EFlags); \
4315 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4316 IEM_MC_END(); \
4317 break; \
4318 } \
4319 \
4320 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4321 } \
4322 } \
4323 } \
4324 (void)0
4325
4326/* read-only version */
4327#define IEMOP_BODY_BINARY_Ev_Iz_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4328 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4329 { \
4330 /* register target */ \
4331 switch (pVCpu->iem.s.enmEffOpSize) \
4332 { \
4333 case IEMMODE_16BIT: \
4334 { \
4335 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4336 IEM_MC_BEGIN(3, 0, 0, 0); \
4337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4338 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4339 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4340 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4341 \
4342 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4343 IEM_MC_REF_EFLAGS(pEFlags); \
4344 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4345 \
4346 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4347 IEM_MC_END(); \
4348 break; \
4349 } \
4350 \
4351 case IEMMODE_32BIT: \
4352 { \
4353 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4354 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4356 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4357 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4358 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4359 \
4360 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4361 IEM_MC_REF_EFLAGS(pEFlags); \
4362 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4363 \
4364 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4365 IEM_MC_END(); \
4366 break; \
4367 } \
4368 \
4369 case IEMMODE_64BIT: \
4370 { \
4371 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4372 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4374 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4375 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4376 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4377 \
4378 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4379 IEM_MC_REF_EFLAGS(pEFlags); \
4380 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4381 \
4382 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4383 IEM_MC_END(); \
4384 break; \
4385 } \
4386 \
4387 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4388 } \
4389 } \
4390 else \
4391 { \
4392 /* memory target */ \
4393 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4394 { \
4395 switch (pVCpu->iem.s.enmEffOpSize) \
4396 { \
4397 case IEMMODE_16BIT: \
4398 { \
4399 IEM_MC_BEGIN(3, 3, 0, 0); \
4400 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
4401 IEM_MC_ARG(uint16_t, u16Src, 1); \
4402 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4403 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4404 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4405 \
4406 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4407 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4408 IEM_MC_ASSIGN(u16Src, u16Imm); \
4409 IEMOP_HLP_DONE_DECODING(); \
4410 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4411 IEM_MC_FETCH_EFLAGS(EFlags); \
4412 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4413 \
4414 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
4415 IEM_MC_COMMIT_EFLAGS(EFlags); \
4416 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4417 IEM_MC_END(); \
4418 break; \
4419 } \
4420 \
4421 case IEMMODE_32BIT: \
4422 { \
4423 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4424 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
4425 IEM_MC_ARG(uint32_t, u32Src, 1); \
4426 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4427 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4428 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4429 \
4430 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4431 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4432 IEM_MC_ASSIGN(u32Src, u32Imm); \
4433 IEMOP_HLP_DONE_DECODING(); \
4434 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4435 IEM_MC_FETCH_EFLAGS(EFlags); \
4436 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4437 \
4438 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
4439 IEM_MC_COMMIT_EFLAGS(EFlags); \
4440 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4441 IEM_MC_END(); \
4442 break; \
4443 } \
4444 \
4445 case IEMMODE_64BIT: \
4446 { \
4447 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4448 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
4449 IEM_MC_ARG(uint64_t, u64Src, 1); \
4450 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4451 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4452 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4453 \
4454 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4455 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4456 IEMOP_HLP_DONE_DECODING(); \
4457 IEM_MC_ASSIGN(u64Src, u64Imm); \
4458 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4459 IEM_MC_FETCH_EFLAGS(EFlags); \
4460 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4461 \
4462 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
4463 IEM_MC_COMMIT_EFLAGS(EFlags); \
4464 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4465 IEM_MC_END(); \
4466 break; \
4467 } \
4468 \
4469 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4470 } \
4471 } \
4472 else \
4473 { \
4474 IEMOP_HLP_DONE_DECODING(); \
4475 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4476 } \
4477 } \
4478 (void)0
4479
4480
4481/**
4482 * @opmaps grp1_81
4483 * @opcode /0
4484 */
4485FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Iz, uint8_t, bRm)
4486{
4487 IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz");
4488 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
4489 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4490}
4491
4492
4493/**
4494 * @opmaps grp1_81
4495 * @opcode /1
4496 */
4497FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Iz, uint8_t, bRm)
4498{
4499 IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz");
4500 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
4501 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4502}
4503
4504
4505/**
4506 * @opmaps grp1_81
4507 * @opcode /2
4508 */
4509FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Iz, uint8_t, bRm)
4510{
4511 IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz");
4512 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
4513 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
4514}
4515
4516
4517/**
4518 * @opmaps grp1_81
4519 * @opcode /3
4520 */
4521FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Iz, uint8_t, bRm)
4522{
4523 IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz");
4524 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
4525 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
4526}
4527
4528
4529/**
4530 * @opmaps grp1_81
4531 * @opcode /4
4532 */
4533FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Iz, uint8_t, bRm)
4534{
4535 IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz");
4536 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
4537 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
4538}
4539
4540
4541/**
4542 * @opmaps grp1_81
4543 * @opcode /5
4544 */
4545FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Iz, uint8_t, bRm)
4546{
4547 IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz");
4548 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
4549 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
4550}
4551
4552
4553/**
4554 * @opmaps grp1_81
4555 * @opcode /6
4556 */
4557FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Iz, uint8_t, bRm)
4558{
4559 IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz");
4560 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
4561 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
4562}
4563
4564
4565/**
4566 * @opmaps grp1_81
4567 * @opcode /7
4568 */
4569FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Iz, uint8_t, bRm)
4570{
4571 IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz");
4572 IEMOP_BODY_BINARY_Ev_Iz_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
4573}
4574
4575
4576/**
4577 * @opcode 0x81
4578 */
4579FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
4580{
4581 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4582 switch (IEM_GET_MODRM_REG_8(bRm))
4583 {
4584 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Iz, bRm);
4585 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Iz, bRm);
4586 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Iz, bRm);
4587 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Iz, bRm);
4588 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Iz, bRm);
4589 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Iz, bRm);
4590 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Iz, bRm);
4591 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Iz, bRm);
4592 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4593 }
4594}
4595
4596
4597/**
4598 * @opcode 0x82
4599 * @opmnemonic grp1_82
4600 * @opgroup og_groups
4601 */
4602FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
4603{
4604 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
4605 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
4606}
4607
4608
4609/**
4610 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
4611 * iemOp_Grp1_Ev_Ib.
4612 */
4613#define IEMOP_BODY_BINARY_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4614 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4615 { \
4616 /* \
4617 * Register target \
4618 */ \
4619 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4620 switch (pVCpu->iem.s.enmEffOpSize) \
4621 { \
4622 case IEMMODE_16BIT: \
4623 IEM_MC_BEGIN(3, 0, 0, 0); \
4624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4625 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4626 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4627 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4628 \
4629 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4630 IEM_MC_REF_EFLAGS(pEFlags); \
4631 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4632 \
4633 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4634 IEM_MC_END(); \
4635 break; \
4636 \
4637 case IEMMODE_32BIT: \
4638 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4640 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4641 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4642 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4643 \
4644 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4645 IEM_MC_REF_EFLAGS(pEFlags); \
4646 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4647 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
4648 \
4649 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4650 IEM_MC_END(); \
4651 break; \
4652 \
4653 case IEMMODE_64BIT: \
4654 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4656 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4657 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4658 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4659 \
4660 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4661 IEM_MC_REF_EFLAGS(pEFlags); \
4662 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4663 \
4664 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4665 IEM_MC_END(); \
4666 break; \
4667 \
4668 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4669 } \
4670 } \
4671 else \
4672 { \
4673 /* \
4674 * Memory target. \
4675 */ \
4676 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4677 { \
4678 switch (pVCpu->iem.s.enmEffOpSize) \
4679 { \
4680 case IEMMODE_16BIT: \
4681 IEM_MC_BEGIN(3, 3, 0, 0); \
4682 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4683 IEM_MC_ARG(uint16_t, u16Src, 1); \
4684 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4685 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4686 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4687 \
4688 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4689 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4690 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm); \
4691 IEMOP_HLP_DONE_DECODING(); \
4692 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4693 IEM_MC_FETCH_EFLAGS(EFlags); \
4694 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4695 \
4696 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4697 IEM_MC_COMMIT_EFLAGS(EFlags); \
4698 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4699 IEM_MC_END(); \
4700 break; \
4701 \
4702 case IEMMODE_32BIT: \
4703 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4704 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4705 IEM_MC_ARG(uint32_t, u32Src, 1); \
4706 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4707 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4708 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4709 \
4710 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4711 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4712 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm); \
4713 IEMOP_HLP_DONE_DECODING(); \
4714 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4715 IEM_MC_FETCH_EFLAGS(EFlags); \
4716 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4717 \
4718 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4719 IEM_MC_COMMIT_EFLAGS(EFlags); \
4720 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4721 IEM_MC_END(); \
4722 break; \
4723 \
4724 case IEMMODE_64BIT: \
4725 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4726 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4727 IEM_MC_ARG(uint64_t, u64Src, 1); \
4728 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4729 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4730 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4731 \
4732 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4733 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4734 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm); \
4735 IEMOP_HLP_DONE_DECODING(); \
4736 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4737 IEM_MC_FETCH_EFLAGS(EFlags); \
4738 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4739 \
4740 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4741 IEM_MC_COMMIT_EFLAGS(EFlags); \
4742 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4743 IEM_MC_END(); \
4744 break; \
4745 \
4746 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4747 } \
4748 } \
4749 else \
4750 { \
4751 (void)0
4752/* Separate macro to work around parsing issue in IEMAllInstPython.py */
4753#define IEMOP_BODY_BINARY_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4754 switch (pVCpu->iem.s.enmEffOpSize) \
4755 { \
4756 case IEMMODE_16BIT: \
4757 IEM_MC_BEGIN(3, 3, 0, 0); \
4758 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4759 IEM_MC_ARG(uint16_t, u16Src, 1); \
4760 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4761 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4762 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4763 \
4764 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4765 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4766 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm); \
4767 IEMOP_HLP_DONE_DECODING(); \
4768 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4769 IEM_MC_FETCH_EFLAGS(EFlags); \
4770 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4771 \
4772 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4773 IEM_MC_COMMIT_EFLAGS(EFlags); \
4774 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4775 IEM_MC_END(); \
4776 break; \
4777 \
4778 case IEMMODE_32BIT: \
4779 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4780 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4781 IEM_MC_ARG(uint32_t, u32Src, 1); \
4782 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4783 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4784 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4785 \
4786 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4787 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4788 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm); \
4789 IEMOP_HLP_DONE_DECODING(); \
4790 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4791 IEM_MC_FETCH_EFLAGS(EFlags); \
4792 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4793 \
4794 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4795 IEM_MC_COMMIT_EFLAGS(EFlags); \
4796 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4797 IEM_MC_END(); \
4798 break; \
4799 \
4800 case IEMMODE_64BIT: \
4801 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4802 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4803 IEM_MC_ARG(uint64_t, u64Src, 1); \
4804 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4805 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4806 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4807 \
4808 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4809 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4810 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm); \
4811 IEMOP_HLP_DONE_DECODING(); \
4812 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4813 IEM_MC_FETCH_EFLAGS(EFlags); \
4814 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4815 \
4816 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4817 IEM_MC_COMMIT_EFLAGS(EFlags); \
4818 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4819 IEM_MC_END(); \
4820 break; \
4821 \
4822 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4823 } \
4824 } \
4825 } \
4826 (void)0
4827
4828/* read-only variant */
4829#define IEMOP_BODY_BINARY_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4830 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4831 { \
4832 /* \
4833 * Register target \
4834 */ \
4835 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4836 switch (pVCpu->iem.s.enmEffOpSize) \
4837 { \
4838 case IEMMODE_16BIT: \
4839 IEM_MC_BEGIN(3, 0, 0, 0); \
4840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4841 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4842 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4843 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4844 \
4845 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4846 IEM_MC_REF_EFLAGS(pEFlags); \
4847 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4848 \
4849 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4850 IEM_MC_END(); \
4851 break; \
4852 \
4853 case IEMMODE_32BIT: \
4854 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4856 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4857 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4858 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4859 \
4860 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4861 IEM_MC_REF_EFLAGS(pEFlags); \
4862 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4863 \
4864 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4865 IEM_MC_END(); \
4866 break; \
4867 \
4868 case IEMMODE_64BIT: \
4869 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4871 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4872 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4873 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4874 \
4875 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4876 IEM_MC_REF_EFLAGS(pEFlags); \
4877 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4878 \
4879 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4880 IEM_MC_END(); \
4881 break; \
4882 \
4883 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4884 } \
4885 } \
4886 else \
4887 { \
4888 /* \
4889 * Memory target. \
4890 */ \
4891 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4892 { \
4893 switch (pVCpu->iem.s.enmEffOpSize) \
4894 { \
4895 case IEMMODE_16BIT: \
4896 IEM_MC_BEGIN(3, 3, 0, 0); \
4897 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
4898 IEM_MC_ARG(uint16_t, u16Src, 1); \
4899 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4900 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4901 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4902 \
4903 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4904 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4905 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm); \
4906 IEMOP_HLP_DONE_DECODING(); \
4907 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4908 IEM_MC_FETCH_EFLAGS(EFlags); \
4909 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4910 \
4911 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
4912 IEM_MC_COMMIT_EFLAGS(EFlags); \
4913 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4914 IEM_MC_END(); \
4915 break; \
4916 \
4917 case IEMMODE_32BIT: \
4918 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4919 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
4920 IEM_MC_ARG(uint32_t, u32Src, 1); \
4921 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4922 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4923 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4924 \
4925 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4926 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4927 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm); \
4928 IEMOP_HLP_DONE_DECODING(); \
4929 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4930 IEM_MC_FETCH_EFLAGS(EFlags); \
4931 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4932 \
4933 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
4934 IEM_MC_COMMIT_EFLAGS(EFlags); \
4935 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4936 IEM_MC_END(); \
4937 break; \
4938 \
4939 case IEMMODE_64BIT: \
4940 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4941 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
4942 IEM_MC_ARG(uint64_t, u64Src, 1); \
4943 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4944 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4945 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4946 \
4947 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4948 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4949 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm); \
4950 IEMOP_HLP_DONE_DECODING(); \
4951 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4952 IEM_MC_FETCH_EFLAGS(EFlags); \
4953 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4954 \
4955 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
4956 IEM_MC_COMMIT_EFLAGS(EFlags); \
4957 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4958 IEM_MC_END(); \
4959 break; \
4960 \
4961 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4962 } \
4963 } \
4964 else \
4965 { \
4966 IEMOP_HLP_DONE_DECODING(); \
4967 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4968 } \
4969 } \
4970 (void)0
4971
4972/**
4973 * @opmaps grp1_83
4974 * @opcode /0
4975 */
4976FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Ib, uint8_t, bRm)
4977{
4978 IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib");
4979 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
4980 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4981}
4982
4983
4984/**
4985 * @opmaps grp1_83
4986 * @opcode /1
4987 */
4988FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Ib, uint8_t, bRm)
4989{
4990 IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib");
4991 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
4992 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4993}
4994
4995
4996/**
4997 * @opmaps grp1_83
4998 * @opcode /2
4999 */
5000FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Ib, uint8_t, bRm)
5001{
5002 IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib");
5003 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
5004 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
5005}
5006
5007
5008/**
5009 * @opmaps grp1_83
5010 * @opcode /3
5011 */
5012FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Ib, uint8_t, bRm)
5013{
5014 IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib");
5015 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
5016 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
5017}
5018
5019
5020/**
5021 * @opmaps grp1_83
5022 * @opcode /4
5023 */
5024FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Ib, uint8_t, bRm)
5025{
5026 IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib");
5027 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
5028 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
5029}
5030
5031
5032/**
5033 * @opmaps grp1_83
5034 * @opcode /5
5035 */
5036FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Ib, uint8_t, bRm)
5037{
5038 IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib");
5039 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
5040 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
5041}
5042
5043
5044/**
5045 * @opmaps grp1_83
5046 * @opcode /6
5047 */
5048FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Ib, uint8_t, bRm)
5049{
5050 IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib");
5051 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
5052 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
5053}
5054
5055
5056/**
5057 * @opmaps grp1_83
5058 * @opcode /7
5059 */
5060FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Ib, uint8_t, bRm)
5061{
5062 IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib");
5063 IEMOP_BODY_BINARY_Ev_Ib_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
5064}
5065
5066
5067/**
5068 * @opcode 0x83
5069 */
5070FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
5071{
5072 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
5073 to the 386 even if absent in the intel reference manuals and some
5074 3rd party opcode listings. */
5075 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5076 switch (IEM_GET_MODRM_REG_8(bRm))
5077 {
5078 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Ib, bRm);
5079 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Ib, bRm);
5080 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Ib, bRm);
5081 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Ib, bRm);
5082 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Ib, bRm);
5083 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Ib, bRm);
5084 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Ib, bRm);
5085 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Ib, bRm);
5086 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5087 }
5088}
5089
5090
5091/**
5092 * @opcode 0x84
5093 */
5094FNIEMOP_DEF(iemOp_test_Eb_Gb)
5095{
5096 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
5097 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5098 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_test_u8);
5099 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
5100}
5101
5102
5103/**
5104 * @opcode 0x85
5105 */
5106FNIEMOP_DEF(iemOp_test_Ev_Gv)
5107{
5108 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
5109 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5110 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64);
5111}
5112
5113
5114/**
5115 * @opcode 0x86
5116 */
5117FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
5118{
5119 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5120 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
5121
5122 /*
5123 * If rm is denoting a register, no more instruction bytes.
5124 */
5125 if (IEM_IS_MODRM_REG_MODE(bRm))
5126 {
5127 IEM_MC_BEGIN(0, 2, 0, 0);
5128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5129 IEM_MC_LOCAL(uint8_t, uTmp1);
5130 IEM_MC_LOCAL(uint8_t, uTmp2);
5131
5132 IEM_MC_FETCH_GREG_U8(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5133 IEM_MC_FETCH_GREG_U8(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5134 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5135 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5136
5137 IEM_MC_ADVANCE_RIP_AND_FINISH();
5138 IEM_MC_END();
5139 }
5140 else
5141 {
5142 /*
5143 * We're accessing memory.
5144 */
5145 IEM_MC_BEGIN(2, 4, 0, 0);
5146 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5147 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5148 IEM_MC_LOCAL(uint8_t, uTmpReg);
5149 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
5150 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, uTmpReg, 1);
5151
5152 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5154 IEM_MC_MEM_MAP_U8_RW(pu8Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5155 IEM_MC_FETCH_GREG_U8(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5156 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5157 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_locked, pu8Mem, pu8Reg);
5158 else
5159 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_unlocked, pu8Mem, pu8Reg);
5160 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Mem, bUnmapInfo);
5161 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5162
5163 IEM_MC_ADVANCE_RIP_AND_FINISH();
5164 IEM_MC_END();
5165 }
5166}
5167
5168
5169/**
5170 * @opcode 0x87
5171 */
5172FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
5173{
5174 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
5175 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5176
5177 /*
5178 * If rm is denoting a register, no more instruction bytes.
5179 */
5180 if (IEM_IS_MODRM_REG_MODE(bRm))
5181 {
5182 switch (pVCpu->iem.s.enmEffOpSize)
5183 {
5184 case IEMMODE_16BIT:
5185 IEM_MC_BEGIN(0, 2, 0, 0);
5186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5187 IEM_MC_LOCAL(uint16_t, uTmp1);
5188 IEM_MC_LOCAL(uint16_t, uTmp2);
5189
5190 IEM_MC_FETCH_GREG_U16(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5191 IEM_MC_FETCH_GREG_U16(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5192 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5193 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5194
5195 IEM_MC_ADVANCE_RIP_AND_FINISH();
5196 IEM_MC_END();
5197 break;
5198
5199 case IEMMODE_32BIT:
5200 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5202 IEM_MC_LOCAL(uint32_t, uTmp1);
5203 IEM_MC_LOCAL(uint32_t, uTmp2);
5204
5205 IEM_MC_FETCH_GREG_U32(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5206 IEM_MC_FETCH_GREG_U32(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5207 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5208 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5209
5210 IEM_MC_ADVANCE_RIP_AND_FINISH();
5211 IEM_MC_END();
5212 break;
5213
5214 case IEMMODE_64BIT:
5215 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5216 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5217 IEM_MC_LOCAL(uint64_t, uTmp1);
5218 IEM_MC_LOCAL(uint64_t, uTmp2);
5219
5220 IEM_MC_FETCH_GREG_U64(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5221 IEM_MC_FETCH_GREG_U64(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5222 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5223 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5224
5225 IEM_MC_ADVANCE_RIP_AND_FINISH();
5226 IEM_MC_END();
5227 break;
5228
5229 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5230 }
5231 }
5232 else
5233 {
5234 /*
5235 * We're accessing memory.
5236 */
5237 switch (pVCpu->iem.s.enmEffOpSize)
5238 {
5239 case IEMMODE_16BIT:
5240 IEM_MC_BEGIN(2, 4, 0, 0);
5241 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5242 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5243 IEM_MC_LOCAL(uint16_t, uTmpReg);
5244 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
5245 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, uTmpReg, 1);
5246
5247 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5249 IEM_MC_MEM_MAP_U16_RW(pu16Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5250 IEM_MC_FETCH_GREG_U16(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5251 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5252 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_locked, pu16Mem, pu16Reg);
5253 else
5254 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_unlocked, pu16Mem, pu16Reg);
5255 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Mem, bUnmapInfo);
5256 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5257
5258 IEM_MC_ADVANCE_RIP_AND_FINISH();
5259 IEM_MC_END();
5260 break;
5261
5262 case IEMMODE_32BIT:
5263 IEM_MC_BEGIN(2, 4, IEM_MC_F_MIN_386, 0);
5264 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5265 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5266 IEM_MC_LOCAL(uint32_t, uTmpReg);
5267 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
5268 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, uTmpReg, 1);
5269
5270 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5272 IEM_MC_MEM_MAP_U32_RW(pu32Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5273 IEM_MC_FETCH_GREG_U32(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5274 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5275 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_locked, pu32Mem, pu32Reg);
5276 else
5277 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_unlocked, pu32Mem, pu32Reg);
5278 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Mem, bUnmapInfo);
5279 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5280
5281 IEM_MC_ADVANCE_RIP_AND_FINISH();
5282 IEM_MC_END();
5283 break;
5284
5285 case IEMMODE_64BIT:
5286 IEM_MC_BEGIN(2, 4, IEM_MC_F_64BIT, 0);
5287 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5288 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5289 IEM_MC_LOCAL(uint64_t, uTmpReg);
5290 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
5291 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, uTmpReg, 1);
5292
5293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5295 IEM_MC_MEM_MAP_U64_RW(pu64Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5296 IEM_MC_FETCH_GREG_U64(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5297 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5298 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_locked, pu64Mem, pu64Reg);
5299 else
5300 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_unlocked, pu64Mem, pu64Reg);
5301 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Mem, bUnmapInfo);
5302 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5303
5304 IEM_MC_ADVANCE_RIP_AND_FINISH();
5305 IEM_MC_END();
5306 break;
5307
5308 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5309 }
5310 }
5311}
5312
5313
5314/**
5315 * @opcode 0x88
5316 */
5317FNIEMOP_DEF(iemOp_mov_Eb_Gb)
5318{
5319 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
5320
5321 uint8_t bRm;
5322 IEM_OPCODE_GET_NEXT_U8(&bRm);
5323
5324 /*
5325 * If rm is denoting a register, no more instruction bytes.
5326 */
5327 if (IEM_IS_MODRM_REG_MODE(bRm))
5328 {
5329 IEM_MC_BEGIN(0, 1, 0, 0);
5330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5331 IEM_MC_LOCAL(uint8_t, u8Value);
5332 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5333 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Value);
5334 IEM_MC_ADVANCE_RIP_AND_FINISH();
5335 IEM_MC_END();
5336 }
5337 else
5338 {
5339 /*
5340 * We're writing a register to memory.
5341 */
5342 IEM_MC_BEGIN(0, 2, 0, 0);
5343 IEM_MC_LOCAL(uint8_t, u8Value);
5344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5345 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5347 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5348 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
5349 IEM_MC_ADVANCE_RIP_AND_FINISH();
5350 IEM_MC_END();
5351 }
5352}
5353
5354
5355/**
5356 * @opcode 0x89
5357 */
5358FNIEMOP_DEF(iemOp_mov_Ev_Gv)
5359{
5360 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
5361
5362 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5363
5364 /*
5365 * If rm is denoting a register, no more instruction bytes.
5366 */
5367 if (IEM_IS_MODRM_REG_MODE(bRm))
5368 {
5369 switch (pVCpu->iem.s.enmEffOpSize)
5370 {
5371 case IEMMODE_16BIT:
5372 IEM_MC_BEGIN(0, 1, 0, 0);
5373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5374 IEM_MC_LOCAL(uint16_t, u16Value);
5375 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5376 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5377 IEM_MC_ADVANCE_RIP_AND_FINISH();
5378 IEM_MC_END();
5379 break;
5380
5381 case IEMMODE_32BIT:
5382 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5384 IEM_MC_LOCAL(uint32_t, u32Value);
5385 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5386 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5387 IEM_MC_ADVANCE_RIP_AND_FINISH();
5388 IEM_MC_END();
5389 break;
5390
5391 case IEMMODE_64BIT:
5392 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5394 IEM_MC_LOCAL(uint64_t, u64Value);
5395 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5396 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5397 IEM_MC_ADVANCE_RIP_AND_FINISH();
5398 IEM_MC_END();
5399 break;
5400
5401 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5402 }
5403 }
5404 else
5405 {
5406 /*
5407 * We're writing a register to memory.
5408 */
5409 switch (pVCpu->iem.s.enmEffOpSize)
5410 {
5411 case IEMMODE_16BIT:
5412 IEM_MC_BEGIN(0, 2, 0, 0);
5413 IEM_MC_LOCAL(uint16_t, u16Value);
5414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5415 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5417 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5418 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5419 IEM_MC_ADVANCE_RIP_AND_FINISH();
5420 IEM_MC_END();
5421 break;
5422
5423 case IEMMODE_32BIT:
5424 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5425 IEM_MC_LOCAL(uint32_t, u32Value);
5426 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5427 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5429 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5430 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
5431 IEM_MC_ADVANCE_RIP_AND_FINISH();
5432 IEM_MC_END();
5433 break;
5434
5435 case IEMMODE_64BIT:
5436 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5437 IEM_MC_LOCAL(uint64_t, u64Value);
5438 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5439 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5441 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5442 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
5443 IEM_MC_ADVANCE_RIP_AND_FINISH();
5444 IEM_MC_END();
5445 break;
5446
5447 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5448 }
5449 }
5450}
5451
5452
5453/**
5454 * @opcode 0x8a
5455 */
5456FNIEMOP_DEF(iemOp_mov_Gb_Eb)
5457{
5458 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
5459
5460 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5461
5462 /*
5463 * If rm is denoting a register, no more instruction bytes.
5464 */
5465 if (IEM_IS_MODRM_REG_MODE(bRm))
5466 {
5467 IEM_MC_BEGIN(0, 1, 0, 0);
5468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5469 IEM_MC_LOCAL(uint8_t, u8Value);
5470 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5471 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5472 IEM_MC_ADVANCE_RIP_AND_FINISH();
5473 IEM_MC_END();
5474 }
5475 else
5476 {
5477 /*
5478 * We're loading a register from memory.
5479 */
5480 IEM_MC_BEGIN(0, 2, 0, 0);
5481 IEM_MC_LOCAL(uint8_t, u8Value);
5482 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5483 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5485 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5486 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5487 IEM_MC_ADVANCE_RIP_AND_FINISH();
5488 IEM_MC_END();
5489 }
5490}
5491
5492
5493/**
5494 * @opcode 0x8b
5495 */
5496FNIEMOP_DEF(iemOp_mov_Gv_Ev)
5497{
5498 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
5499
5500 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5501
5502 /*
5503 * If rm is denoting a register, no more instruction bytes.
5504 */
5505 if (IEM_IS_MODRM_REG_MODE(bRm))
5506 {
5507 switch (pVCpu->iem.s.enmEffOpSize)
5508 {
5509 case IEMMODE_16BIT:
5510 IEM_MC_BEGIN(0, 1, 0, 0);
5511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5512 IEM_MC_LOCAL(uint16_t, u16Value);
5513 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5514 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5515 IEM_MC_ADVANCE_RIP_AND_FINISH();
5516 IEM_MC_END();
5517 break;
5518
5519 case IEMMODE_32BIT:
5520 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5522 IEM_MC_LOCAL(uint32_t, u32Value);
5523 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5524 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5525 IEM_MC_ADVANCE_RIP_AND_FINISH();
5526 IEM_MC_END();
5527 break;
5528
5529 case IEMMODE_64BIT:
5530 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5532 IEM_MC_LOCAL(uint64_t, u64Value);
5533 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5534 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5535 IEM_MC_ADVANCE_RIP_AND_FINISH();
5536 IEM_MC_END();
5537 break;
5538
5539 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5540 }
5541 }
5542 else
5543 {
5544 /*
5545 * We're loading a register from memory.
5546 */
5547 switch (pVCpu->iem.s.enmEffOpSize)
5548 {
5549 case IEMMODE_16BIT:
5550 IEM_MC_BEGIN(0, 2, 0, 0);
5551 IEM_MC_LOCAL(uint16_t, u16Value);
5552 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5553 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5555 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5556 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5557 IEM_MC_ADVANCE_RIP_AND_FINISH();
5558 IEM_MC_END();
5559 break;
5560
5561 case IEMMODE_32BIT:
5562 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5563 IEM_MC_LOCAL(uint32_t, u32Value);
5564 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5565 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5567 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5568 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5569 IEM_MC_ADVANCE_RIP_AND_FINISH();
5570 IEM_MC_END();
5571 break;
5572
5573 case IEMMODE_64BIT:
5574 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5575 IEM_MC_LOCAL(uint64_t, u64Value);
5576 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5577 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5579 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5580 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5581 IEM_MC_ADVANCE_RIP_AND_FINISH();
5582 IEM_MC_END();
5583 break;
5584
5585 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5586 }
5587 }
5588}
5589
5590
5591/**
5592 * opcode 0x63
5593 * @todo Table fixme
5594 */
5595FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
5596{
5597 if (!IEM_IS_64BIT_CODE(pVCpu))
5598 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
5599 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
5600 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
5601 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
5602}
5603
5604
5605/**
5606 * @opcode 0x8c
5607 */
5608FNIEMOP_DEF(iemOp_mov_Ev_Sw)
5609{
5610 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
5611
5612 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5613
5614 /*
5615 * Check that the destination register exists. The REX.R prefix is ignored.
5616 */
5617 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5618 if (iSegReg > X86_SREG_GS)
5619 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5620
5621 /*
5622 * If rm is denoting a register, no more instruction bytes.
5623 * In that case, the operand size is respected and the upper bits are
5624 * cleared (starting with some pentium).
5625 */
5626 if (IEM_IS_MODRM_REG_MODE(bRm))
5627 {
5628 switch (pVCpu->iem.s.enmEffOpSize)
5629 {
5630 case IEMMODE_16BIT:
5631 IEM_MC_BEGIN(0, 1, 0, 0);
5632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5633 IEM_MC_LOCAL(uint16_t, u16Value);
5634 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5635 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5636 IEM_MC_ADVANCE_RIP_AND_FINISH();
5637 IEM_MC_END();
5638 break;
5639
5640 case IEMMODE_32BIT:
5641 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5643 IEM_MC_LOCAL(uint32_t, u32Value);
5644 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
5645 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5646 IEM_MC_ADVANCE_RIP_AND_FINISH();
5647 IEM_MC_END();
5648 break;
5649
5650 case IEMMODE_64BIT:
5651 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5653 IEM_MC_LOCAL(uint64_t, u64Value);
5654 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
5655 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5656 IEM_MC_ADVANCE_RIP_AND_FINISH();
5657 IEM_MC_END();
5658 break;
5659
5660 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5661 }
5662 }
5663 else
5664 {
5665 /*
5666 * We're saving the register to memory. The access is word sized
5667 * regardless of operand size prefixes.
5668 */
5669#if 0 /* not necessary */
5670 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5671#endif
5672 IEM_MC_BEGIN(0, 2, 0, 0);
5673 IEM_MC_LOCAL(uint16_t, u16Value);
5674 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5675 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5677 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5678 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5679 IEM_MC_ADVANCE_RIP_AND_FINISH();
5680 IEM_MC_END();
5681 }
5682}
5683
5684
5685
5686
5687/**
5688 * @opcode 0x8d
5689 */
5690FNIEMOP_DEF(iemOp_lea_Gv_M)
5691{
5692 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
5693 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5694 if (IEM_IS_MODRM_REG_MODE(bRm))
5695 IEMOP_RAISE_INVALID_OPCODE_RET(); /* no register form */
5696
5697 switch (pVCpu->iem.s.enmEffOpSize)
5698 {
5699 case IEMMODE_16BIT:
5700 IEM_MC_BEGIN(0, 2, 0, 0);
5701 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5702 IEM_MC_LOCAL(uint16_t, u16Cast);
5703 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5705 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
5706 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Cast);
5707 IEM_MC_ADVANCE_RIP_AND_FINISH();
5708 IEM_MC_END();
5709 break;
5710
5711 case IEMMODE_32BIT:
5712 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5713 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5714 IEM_MC_LOCAL(uint32_t, u32Cast);
5715 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5717 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
5718 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Cast);
5719 IEM_MC_ADVANCE_RIP_AND_FINISH();
5720 IEM_MC_END();
5721 break;
5722
5723 case IEMMODE_64BIT:
5724 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5725 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5726 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5728 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), GCPtrEffSrc);
5729 IEM_MC_ADVANCE_RIP_AND_FINISH();
5730 IEM_MC_END();
5731 break;
5732
5733 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5734 }
5735}
5736
5737
5738/**
5739 * @opcode 0x8e
5740 */
5741FNIEMOP_DEF(iemOp_mov_Sw_Ev)
5742{
5743 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
5744
5745 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5746
5747 /*
5748 * The practical operand size is 16-bit.
5749 */
5750#if 0 /* not necessary */
5751 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5752#endif
5753
5754 /*
5755 * Check that the destination register exists and can be used with this
5756 * instruction. The REX.R prefix is ignored.
5757 */
5758 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5759 /** @todo r=bird: What does 8086 do here wrt CS? */
5760 if ( iSegReg == X86_SREG_CS
5761 || iSegReg > X86_SREG_GS)
5762 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5763
5764 /*
5765 * If rm is denoting a register, no more instruction bytes.
5766 *
5767 * Note! Using IEMOP_MOV_SW_EV_REG_BODY here to specify different
5768 * IEM_CIMPL_F_XXX values depending on the CPU mode and target
5769 * register. This is a restriction of the current recompiler
5770 * approach.
5771 */
5772 if (IEM_IS_MODRM_REG_MODE(bRm))
5773 {
5774#define IEMOP_MOV_SW_EV_REG_BODY(a_fCImplFlags) \
5775 IEM_MC_BEGIN(2, 0, 0, a_fCImplFlags); \
5776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5777 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
5778 IEM_MC_ARG(uint16_t, u16Value, 1); \
5779 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5780 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, iemCImpl_load_SReg, iSRegArg, u16Value); \
5781 IEM_MC_END()
5782
5783 if (iSegReg == X86_SREG_SS)
5784 {
5785 if (IEM_IS_32BIT_CODE(pVCpu))
5786 {
5787 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
5788 }
5789 else
5790 {
5791 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
5792 }
5793 }
5794 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5795 {
5796 IEMOP_MOV_SW_EV_REG_BODY(0);
5797 }
5798 else
5799 {
5800 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_MODE);
5801 }
5802#undef IEMOP_MOV_SW_EV_REG_BODY
5803 }
5804 else
5805 {
5806 /*
5807 * We're loading the register from memory. The access is word sized
5808 * regardless of operand size prefixes.
5809 */
5810#define IEMOP_MOV_SW_EV_MEM_BODY(a_fCImplFlags) \
5811 IEM_MC_BEGIN(2, 1, 0, a_fCImplFlags); \
5812 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
5813 IEM_MC_ARG(uint16_t, u16Value, 1); \
5814 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5815 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5817 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5818 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, iemCImpl_load_SReg, iSRegArg, u16Value); \
5819 IEM_MC_END()
5820
5821 if (iSegReg == X86_SREG_SS)
5822 {
5823 if (IEM_IS_32BIT_CODE(pVCpu))
5824 {
5825 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
5826 }
5827 else
5828 {
5829 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
5830 }
5831 }
5832 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5833 {
5834 IEMOP_MOV_SW_EV_MEM_BODY(0);
5835 }
5836 else
5837 {
5838 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_MODE);
5839 }
5840#undef IEMOP_MOV_SW_EV_MEM_BODY
5841 }
5842}
5843
5844
5845/** Opcode 0x8f /0. */
5846FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
5847{
5848 /* This bugger is rather annoying as it requires rSP to be updated before
5849 doing the effective address calculations. Will eventually require a
5850 split between the R/M+SIB decoding and the effective address
5851 calculation - which is something that is required for any attempt at
5852 reusing this code for a recompiler. It may also be good to have if we
5853 need to delay #UD exception caused by invalid lock prefixes.
5854
5855 For now, we'll do a mostly safe interpreter-only implementation here. */
5856 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
5857 * now until tests show it's checked.. */
5858 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
5859
5860 /* Register access is relatively easy and can share code. */
5861 if (IEM_IS_MODRM_REG_MODE(bRm))
5862 return FNIEMOP_CALL_1(iemOpCommonPopGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
5863
5864 /*
5865 * Memory target.
5866 *
5867 * Intel says that RSP is incremented before it's used in any effective
5868 * address calcuations. This means some serious extra annoyance here since
5869 * we decode and calculate the effective address in one step and like to
5870 * delay committing registers till everything is done.
5871 *
5872 * So, we'll decode and calculate the effective address twice. This will
5873 * require some recoding if turned into a recompiler.
5874 */
5875 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
5876
5877#if 1 /* This can be compiled, optimize later if needed. */
5878 switch (pVCpu->iem.s.enmEffOpSize)
5879 {
5880 case IEMMODE_16BIT:
5881 IEM_MC_BEGIN(2, 0, 0, 0);
5882 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5883 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2 << 8);
5885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5886 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5887 IEM_MC_CALL_CIMPL_2(0, iemCImpl_pop_mem16, iEffSeg, GCPtrEffDst);
5888 IEM_MC_END();
5889 break;
5890
5891 case IEMMODE_32BIT:
5892 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0);
5893 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5894 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5895 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4 << 8);
5896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5897 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5898 IEM_MC_CALL_CIMPL_2(0, iemCImpl_pop_mem32, iEffSeg, GCPtrEffDst);
5899 IEM_MC_END();
5900 break;
5901
5902 case IEMMODE_64BIT:
5903 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
5904 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5905 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5906 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 8 << 8);
5907 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5908 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5909 IEM_MC_CALL_CIMPL_2(0, iemCImpl_pop_mem64, iEffSeg, GCPtrEffDst);
5910 IEM_MC_END();
5911 break;
5912
5913 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5914 }
5915
5916#else
5917# ifndef TST_IEM_CHECK_MC
5918 /* Calc effective address with modified ESP. */
5919/** @todo testcase */
5920 RTGCPTR GCPtrEff;
5921 VBOXSTRICTRC rcStrict;
5922 switch (pVCpu->iem.s.enmEffOpSize)
5923 {
5924 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 2 << 8, &GCPtrEff); break;
5925 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 4 << 8, &GCPtrEff); break;
5926 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 8 << 8, &GCPtrEff); break;
5927 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5928 }
5929 if (rcStrict != VINF_SUCCESS)
5930 return rcStrict;
5931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5932
5933 /* Perform the operation - this should be CImpl. */
5934 RTUINT64U TmpRsp;
5935 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
5936 switch (pVCpu->iem.s.enmEffOpSize)
5937 {
5938 case IEMMODE_16BIT:
5939 {
5940 uint16_t u16Value;
5941 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
5942 if (rcStrict == VINF_SUCCESS)
5943 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
5944 break;
5945 }
5946
5947 case IEMMODE_32BIT:
5948 {
5949 uint32_t u32Value;
5950 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
5951 if (rcStrict == VINF_SUCCESS)
5952 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
5953 break;
5954 }
5955
5956 case IEMMODE_64BIT:
5957 {
5958 uint64_t u64Value;
5959 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
5960 if (rcStrict == VINF_SUCCESS)
5961 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
5962 break;
5963 }
5964
5965 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5966 }
5967 if (rcStrict == VINF_SUCCESS)
5968 {
5969 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
5970 return iemRegUpdateRipAndFinishClearingRF(pVCpu);
5971 }
5972 return rcStrict;
5973
5974# else
5975 return VERR_IEM_IPE_2;
5976# endif
5977#endif
5978}
5979
5980
5981/**
5982 * @opcode 0x8f
5983 */
5984FNIEMOP_DEF(iemOp_Grp1A__xop)
5985{
5986 /*
5987 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
5988 * three byte VEX prefix, except that the mmmmm field cannot have the values
5989 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
5990 */
5991 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5992 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
5993 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
5994
5995 IEMOP_MNEMONIC(xop, "xop");
5996 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
5997 {
5998 /** @todo Test when exctly the XOP conformance checks kick in during
5999 * instruction decoding and fetching (using \#PF). */
6000 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
6001 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6002 if ( ( pVCpu->iem.s.fPrefixes
6003 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
6004 == 0)
6005 {
6006 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
6007 if ((bXop2 & 0x80 /* XOP.W */) && IEM_IS_64BIT_CODE(pVCpu))
6008 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
6009 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6010 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
6011 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
6012 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
6013 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
6014 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
6015
6016 /** @todo XOP: Just use new tables and decoders. */
6017 switch (bRm & 0x1f)
6018 {
6019 case 8: /* xop opcode map 8. */
6020 IEMOP_BITCH_ABOUT_STUB();
6021 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6022
6023 case 9: /* xop opcode map 9. */
6024 IEMOP_BITCH_ABOUT_STUB();
6025 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6026
6027 case 10: /* xop opcode map 10. */
6028 IEMOP_BITCH_ABOUT_STUB();
6029 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6030
6031 default:
6032 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6033 IEMOP_RAISE_INVALID_OPCODE_RET();
6034 }
6035 }
6036 else
6037 Log(("XOP: Invalid prefix mix!\n"));
6038 }
6039 else
6040 Log(("XOP: XOP support disabled!\n"));
6041 IEMOP_RAISE_INVALID_OPCODE_RET();
6042}
6043
6044
6045/**
6046 * Common 'xchg reg,rAX' helper.
6047 */
6048FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
6049{
6050 iReg |= pVCpu->iem.s.uRexB;
6051 switch (pVCpu->iem.s.enmEffOpSize)
6052 {
6053 case IEMMODE_16BIT:
6054 IEM_MC_BEGIN(0, 2, 0, 0);
6055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6056 IEM_MC_LOCAL(uint16_t, u16Tmp1);
6057 IEM_MC_LOCAL(uint16_t, u16Tmp2);
6058 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
6059 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
6060 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
6061 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
6062 IEM_MC_ADVANCE_RIP_AND_FINISH();
6063 IEM_MC_END();
6064 break;
6065
6066 case IEMMODE_32BIT:
6067 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6069 IEM_MC_LOCAL(uint32_t, u32Tmp1);
6070 IEM_MC_LOCAL(uint32_t, u32Tmp2);
6071 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
6072 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
6073 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
6074 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
6075 IEM_MC_ADVANCE_RIP_AND_FINISH();
6076 IEM_MC_END();
6077 break;
6078
6079 case IEMMODE_64BIT:
6080 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6082 IEM_MC_LOCAL(uint64_t, u64Tmp1);
6083 IEM_MC_LOCAL(uint64_t, u64Tmp2);
6084 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
6085 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
6086 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
6087 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
6088 IEM_MC_ADVANCE_RIP_AND_FINISH();
6089 IEM_MC_END();
6090 break;
6091
6092 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6093 }
6094}
6095
6096
6097/**
6098 * @opcode 0x90
6099 */
6100FNIEMOP_DEF(iemOp_nop)
6101{
6102 /* R8/R8D and RAX/EAX can be exchanged. */
6103 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
6104 {
6105 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
6106 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
6107 }
6108
6109 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
6110 {
6111 IEMOP_MNEMONIC(pause, "pause");
6112 /* ASSUMING that we keep the IEM_F_X86_CTX_IN_GUEST, IEM_F_X86_CTX_VMX
6113 and IEM_F_X86_CTX_SVM in the TB key, we can safely do the following: */
6114 if (!IEM_IS_IN_GUEST(pVCpu))
6115 { /* probable */ }
6116#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6117 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_VMX)
6118 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmx_pause);
6119#endif
6120#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
6121 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_SVM)
6122 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_svm_pause);
6123#endif
6124 }
6125 else
6126 IEMOP_MNEMONIC(nop, "nop");
6127 /** @todo testcase: lock nop; lock pause */
6128 IEM_MC_BEGIN(0, 0, 0, 0);
6129 IEMOP_HLP_DONE_DECODING();
6130 IEM_MC_ADVANCE_RIP_AND_FINISH();
6131 IEM_MC_END();
6132}
6133
6134
6135/**
6136 * @opcode 0x91
6137 */
6138FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
6139{
6140 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
6141 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
6142}
6143
6144
6145/**
6146 * @opcode 0x92
6147 */
6148FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
6149{
6150 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
6151 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
6152}
6153
6154
6155/**
6156 * @opcode 0x93
6157 */
6158FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
6159{
6160 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
6161 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
6162}
6163
6164
6165/**
6166 * @opcode 0x94
6167 */
6168FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
6169{
6170 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
6171 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
6172}
6173
6174
6175/**
6176 * @opcode 0x95
6177 */
6178FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
6179{
6180 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
6181 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
6182}
6183
6184
6185/**
6186 * @opcode 0x96
6187 */
6188FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
6189{
6190 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
6191 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
6192}
6193
6194
6195/**
6196 * @opcode 0x97
6197 */
6198FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
6199{
6200 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
6201 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
6202}
6203
6204
6205/**
6206 * @opcode 0x98
6207 */
6208FNIEMOP_DEF(iemOp_cbw)
6209{
6210 switch (pVCpu->iem.s.enmEffOpSize)
6211 {
6212 case IEMMODE_16BIT:
6213 IEMOP_MNEMONIC(cbw, "cbw");
6214 IEM_MC_BEGIN(0, 1, 0, 0);
6215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6216 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
6217 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
6218 } IEM_MC_ELSE() {
6219 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
6220 } IEM_MC_ENDIF();
6221 IEM_MC_ADVANCE_RIP_AND_FINISH();
6222 IEM_MC_END();
6223 break;
6224
6225 case IEMMODE_32BIT:
6226 IEMOP_MNEMONIC(cwde, "cwde");
6227 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6229 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6230 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
6231 } IEM_MC_ELSE() {
6232 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
6233 } IEM_MC_ENDIF();
6234 IEM_MC_ADVANCE_RIP_AND_FINISH();
6235 IEM_MC_END();
6236 break;
6237
6238 case IEMMODE_64BIT:
6239 IEMOP_MNEMONIC(cdqe, "cdqe");
6240 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6242 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6243 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
6244 } IEM_MC_ELSE() {
6245 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
6246 } IEM_MC_ENDIF();
6247 IEM_MC_ADVANCE_RIP_AND_FINISH();
6248 IEM_MC_END();
6249 break;
6250
6251 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6252 }
6253}
6254
6255
6256/**
6257 * @opcode 0x99
6258 */
6259FNIEMOP_DEF(iemOp_cwd)
6260{
6261 switch (pVCpu->iem.s.enmEffOpSize)
6262 {
6263 case IEMMODE_16BIT:
6264 IEMOP_MNEMONIC(cwd, "cwd");
6265 IEM_MC_BEGIN(0, 1, 0, 0);
6266 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6267 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6268 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
6269 } IEM_MC_ELSE() {
6270 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
6271 } IEM_MC_ENDIF();
6272 IEM_MC_ADVANCE_RIP_AND_FINISH();
6273 IEM_MC_END();
6274 break;
6275
6276 case IEMMODE_32BIT:
6277 IEMOP_MNEMONIC(cdq, "cdq");
6278 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6280 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6281 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
6282 } IEM_MC_ELSE() {
6283 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
6284 } IEM_MC_ENDIF();
6285 IEM_MC_ADVANCE_RIP_AND_FINISH();
6286 IEM_MC_END();
6287 break;
6288
6289 case IEMMODE_64BIT:
6290 IEMOP_MNEMONIC(cqo, "cqo");
6291 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6293 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
6294 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
6295 } IEM_MC_ELSE() {
6296 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
6297 } IEM_MC_ENDIF();
6298 IEM_MC_ADVANCE_RIP_AND_FINISH();
6299 IEM_MC_END();
6300 break;
6301
6302 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6303 }
6304}
6305
6306
6307/**
6308 * @opcode 0x9a
6309 */
6310FNIEMOP_DEF(iemOp_call_Ap)
6311{
6312 IEMOP_MNEMONIC(call_Ap, "call Ap");
6313 IEMOP_HLP_NO_64BIT();
6314
6315 /* Decode the far pointer address and pass it on to the far call C implementation. */
6316 uint32_t off32Seg;
6317 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
6318 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
6319 else
6320 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
6321 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
6322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6323 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
6324 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
6325 iemCImpl_callf, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
6326}
6327
6328
6329/** Opcode 0x9b. (aka fwait) */
6330FNIEMOP_DEF(iemOp_wait)
6331{
6332 IEMOP_MNEMONIC(wait, "wait");
6333 IEM_MC_BEGIN(0, 0, 0, 0);
6334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6335 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
6336 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6337 IEM_MC_ADVANCE_RIP_AND_FINISH();
6338 IEM_MC_END();
6339}
6340
6341
6342/**
6343 * @opcode 0x9c
6344 */
6345FNIEMOP_DEF(iemOp_pushf_Fv)
6346{
6347 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
6348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6349 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6350 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
6351}
6352
6353
6354/**
6355 * @opcode 0x9d
6356 */
6357FNIEMOP_DEF(iemOp_popf_Fv)
6358{
6359 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
6360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6361 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6362 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE_AND_AFTER,
6363 iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
6364}
6365
6366
6367/**
6368 * @opcode 0x9e
6369 */
6370FNIEMOP_DEF(iemOp_sahf)
6371{
6372 IEMOP_MNEMONIC(sahf, "sahf");
6373 if ( IEM_IS_64BIT_CODE(pVCpu)
6374 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6375 IEMOP_RAISE_INVALID_OPCODE_RET();
6376 IEM_MC_BEGIN(0, 2, 0, 0);
6377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6378 IEM_MC_LOCAL(uint32_t, u32Flags);
6379 IEM_MC_LOCAL(uint32_t, EFlags);
6380 IEM_MC_FETCH_EFLAGS(EFlags);
6381 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
6382 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6383 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
6384 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
6385 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
6386 IEM_MC_COMMIT_EFLAGS(EFlags);
6387 IEM_MC_ADVANCE_RIP_AND_FINISH();
6388 IEM_MC_END();
6389}
6390
6391
6392/**
6393 * @opcode 0x9f
6394 */
6395FNIEMOP_DEF(iemOp_lahf)
6396{
6397 IEMOP_MNEMONIC(lahf, "lahf");
6398 if ( IEM_IS_64BIT_CODE(pVCpu)
6399 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6400 IEMOP_RAISE_INVALID_OPCODE_RET();
6401 IEM_MC_BEGIN(0, 1, 0, 0);
6402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6403 IEM_MC_LOCAL(uint8_t, u8Flags);
6404 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
6405 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
6406 IEM_MC_ADVANCE_RIP_AND_FINISH();
6407 IEM_MC_END();
6408}
6409
6410
6411/**
6412 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
6413 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode.
6414 * Will return/throw on failures.
6415 * @param a_GCPtrMemOff The variable to store the offset in.
6416 */
6417#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
6418 do \
6419 { \
6420 switch (pVCpu->iem.s.enmEffAddrMode) \
6421 { \
6422 case IEMMODE_16BIT: \
6423 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
6424 break; \
6425 case IEMMODE_32BIT: \
6426 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
6427 break; \
6428 case IEMMODE_64BIT: \
6429 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
6430 break; \
6431 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
6432 } \
6433 } while (0)
6434
6435/**
6436 * @opcode 0xa0
6437 */
6438FNIEMOP_DEF(iemOp_mov_AL_Ob)
6439{
6440 /*
6441 * Get the offset.
6442 */
6443 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
6444 RTGCPTR GCPtrMemOff;
6445 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
6446
6447 /*
6448 * Fetch AL.
6449 */
6450 IEM_MC_BEGIN(0, 1, 0, 0);
6451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6452 IEM_MC_LOCAL(uint8_t, u8Tmp);
6453 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6454 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6455 IEM_MC_ADVANCE_RIP_AND_FINISH();
6456 IEM_MC_END();
6457}
6458
6459
6460/**
6461 * @opcode 0xa1
6462 */
6463FNIEMOP_DEF(iemOp_mov_rAX_Ov)
6464{
6465 /*
6466 * Get the offset.
6467 */
6468 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
6469 RTGCPTR GCPtrMemOff;
6470 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
6471
6472 /*
6473 * Fetch rAX.
6474 */
6475 switch (pVCpu->iem.s.enmEffOpSize)
6476 {
6477 case IEMMODE_16BIT:
6478 IEM_MC_BEGIN(0, 1, 0, 0);
6479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6480 IEM_MC_LOCAL(uint16_t, u16Tmp);
6481 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6482 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
6483 IEM_MC_ADVANCE_RIP_AND_FINISH();
6484 IEM_MC_END();
6485 break;
6486
6487 case IEMMODE_32BIT:
6488 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6490 IEM_MC_LOCAL(uint32_t, u32Tmp);
6491 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6492 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
6493 IEM_MC_ADVANCE_RIP_AND_FINISH();
6494 IEM_MC_END();
6495 break;
6496
6497 case IEMMODE_64BIT:
6498 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6500 IEM_MC_LOCAL(uint64_t, u64Tmp);
6501 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6502 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
6503 IEM_MC_ADVANCE_RIP_AND_FINISH();
6504 IEM_MC_END();
6505 break;
6506
6507 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6508 }
6509}
6510
6511
6512/**
6513 * @opcode 0xa2
6514 */
6515FNIEMOP_DEF(iemOp_mov_Ob_AL)
6516{
6517 /*
6518 * Get the offset.
6519 */
6520 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
6521 RTGCPTR GCPtrMemOff;
6522 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
6523
6524 /*
6525 * Store AL.
6526 */
6527 IEM_MC_BEGIN(0, 1, 0, 0);
6528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6529 IEM_MC_LOCAL(uint8_t, u8Tmp);
6530 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
6531 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
6532 IEM_MC_ADVANCE_RIP_AND_FINISH();
6533 IEM_MC_END();
6534}
6535
6536
6537/**
6538 * @opcode 0xa3
6539 */
6540FNIEMOP_DEF(iemOp_mov_Ov_rAX)
6541{
6542 /*
6543 * Get the offset.
6544 */
6545 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
6546 RTGCPTR GCPtrMemOff;
6547 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
6548
6549 /*
6550 * Store rAX.
6551 */
6552 switch (pVCpu->iem.s.enmEffOpSize)
6553 {
6554 case IEMMODE_16BIT:
6555 IEM_MC_BEGIN(0, 1, 0, 0);
6556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6557 IEM_MC_LOCAL(uint16_t, u16Tmp);
6558 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
6559 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
6560 IEM_MC_ADVANCE_RIP_AND_FINISH();
6561 IEM_MC_END();
6562 break;
6563
6564 case IEMMODE_32BIT:
6565 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6567 IEM_MC_LOCAL(uint32_t, u32Tmp);
6568 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
6569 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
6570 IEM_MC_ADVANCE_RIP_AND_FINISH();
6571 IEM_MC_END();
6572 break;
6573
6574 case IEMMODE_64BIT:
6575 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6577 IEM_MC_LOCAL(uint64_t, u64Tmp);
6578 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
6579 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
6580 IEM_MC_ADVANCE_RIP_AND_FINISH();
6581 IEM_MC_END();
6582 break;
6583
6584 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6585 }
6586}
6587
6588/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
6589#define IEM_MOVS_CASE(ValBits, AddrBits, a_fMcFlags) \
6590 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
6591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6592 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6593 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6594 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6595 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
6596 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6597 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
6598 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6599 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6600 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6601 } IEM_MC_ELSE() { \
6602 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6603 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6604 } IEM_MC_ENDIF(); \
6605 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6606 IEM_MC_END() \
6607
6608/**
6609 * @opcode 0xa4
6610 */
6611FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
6612{
6613 /*
6614 * Use the C implementation if a repeat prefix is encountered.
6615 */
6616 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6617 {
6618 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
6619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6620 switch (pVCpu->iem.s.enmEffAddrMode)
6621 {
6622 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
6623 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
6624 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
6625 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6626 }
6627 }
6628
6629 /*
6630 * Sharing case implementation with movs[wdq] below.
6631 */
6632 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
6633 switch (pVCpu->iem.s.enmEffAddrMode)
6634 {
6635 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
6636 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32, IEM_MC_F_MIN_386); break;
6637 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64, IEM_MC_F_64BIT); break;
6638 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6639 }
6640}
6641
6642
6643/**
6644 * @opcode 0xa5
6645 */
6646FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
6647{
6648
6649 /*
6650 * Use the C implementation if a repeat prefix is encountered.
6651 */
6652 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6653 {
6654 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
6655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6656 switch (pVCpu->iem.s.enmEffOpSize)
6657 {
6658 case IEMMODE_16BIT:
6659 switch (pVCpu->iem.s.enmEffAddrMode)
6660 {
6661 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
6662 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
6663 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
6664 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6665 }
6666 break;
6667 case IEMMODE_32BIT:
6668 switch (pVCpu->iem.s.enmEffAddrMode)
6669 {
6670 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
6671 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
6672 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
6673 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6674 }
6675 case IEMMODE_64BIT:
6676 switch (pVCpu->iem.s.enmEffAddrMode)
6677 {
6678 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
6679 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
6680 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
6681 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6682 }
6683 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6684 }
6685 }
6686
6687 /*
6688 * Annoying double switch here.
6689 * Using ugly macro for implementing the cases, sharing it with movsb.
6690 */
6691 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
6692 switch (pVCpu->iem.s.enmEffOpSize)
6693 {
6694 case IEMMODE_16BIT:
6695 switch (pVCpu->iem.s.enmEffAddrMode)
6696 {
6697 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
6698 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32, IEM_MC_F_MIN_386); break;
6699 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64, IEM_MC_F_64BIT); break;
6700 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6701 }
6702 break;
6703
6704 case IEMMODE_32BIT:
6705 switch (pVCpu->iem.s.enmEffAddrMode)
6706 {
6707 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
6708 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32, IEM_MC_F_MIN_386); break;
6709 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64, IEM_MC_F_64BIT); break;
6710 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6711 }
6712 break;
6713
6714 case IEMMODE_64BIT:
6715 switch (pVCpu->iem.s.enmEffAddrMode)
6716 {
6717 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6718 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32, IEM_MC_F_64BIT); break;
6719 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64, IEM_MC_F_64BIT); break;
6720 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6721 }
6722 break;
6723 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6724 }
6725}
6726
6727#undef IEM_MOVS_CASE
6728
6729/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
6730#define IEM_CMPS_CASE(ValBits, AddrBits, a_fMcFlags) \
6731 IEM_MC_BEGIN(3, 3, a_fMcFlags, 0); \
6732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6733 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
6734 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
6735 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
6736 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
6737 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6738 \
6739 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6740 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
6741 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6742 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
6743 IEM_MC_REF_LOCAL(puValue1, uValue1); \
6744 IEM_MC_REF_EFLAGS(pEFlags); \
6745 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
6746 \
6747 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6748 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6749 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6750 } IEM_MC_ELSE() { \
6751 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6752 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6753 } IEM_MC_ENDIF(); \
6754 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6755 IEM_MC_END() \
6756
6757/**
6758 * @opcode 0xa6
6759 */
6760FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
6761{
6762
6763 /*
6764 * Use the C implementation if a repeat prefix is encountered.
6765 */
6766 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6767 {
6768 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
6769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6770 switch (pVCpu->iem.s.enmEffAddrMode)
6771 {
6772 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6773 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6774 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6775 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6776 }
6777 }
6778 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6779 {
6780 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
6781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6782 switch (pVCpu->iem.s.enmEffAddrMode)
6783 {
6784 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6785 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6786 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6787 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6788 }
6789 }
6790
6791 /*
6792 * Sharing case implementation with cmps[wdq] below.
6793 */
6794 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
6795 switch (pVCpu->iem.s.enmEffAddrMode)
6796 {
6797 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
6798 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32, IEM_MC_F_MIN_386); break;
6799 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64, IEM_MC_F_64BIT); break;
6800 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6801 }
6802}
6803
6804
6805/**
6806 * @opcode 0xa7
6807 */
6808FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
6809{
6810 /*
6811 * Use the C implementation if a repeat prefix is encountered.
6812 */
6813 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6814 {
6815 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
6816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6817 switch (pVCpu->iem.s.enmEffOpSize)
6818 {
6819 case IEMMODE_16BIT:
6820 switch (pVCpu->iem.s.enmEffAddrMode)
6821 {
6822 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
6823 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
6824 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
6825 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6826 }
6827 break;
6828 case IEMMODE_32BIT:
6829 switch (pVCpu->iem.s.enmEffAddrMode)
6830 {
6831 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
6832 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
6833 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
6834 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6835 }
6836 case IEMMODE_64BIT:
6837 switch (pVCpu->iem.s.enmEffAddrMode)
6838 {
6839 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
6840 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
6841 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
6842 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6843 }
6844 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6845 }
6846 }
6847
6848 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6849 {
6850 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
6851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6852 switch (pVCpu->iem.s.enmEffOpSize)
6853 {
6854 case IEMMODE_16BIT:
6855 switch (pVCpu->iem.s.enmEffAddrMode)
6856 {
6857 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
6858 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
6859 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
6860 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6861 }
6862 break;
6863 case IEMMODE_32BIT:
6864 switch (pVCpu->iem.s.enmEffAddrMode)
6865 {
6866 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
6867 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
6868 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
6869 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6870 }
6871 case IEMMODE_64BIT:
6872 switch (pVCpu->iem.s.enmEffAddrMode)
6873 {
6874 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
6875 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
6876 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
6877 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6878 }
6879 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6880 }
6881 }
6882
6883 /*
6884 * Annoying double switch here.
6885 * Using ugly macro for implementing the cases, sharing it with cmpsb.
6886 */
6887 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
6888 switch (pVCpu->iem.s.enmEffOpSize)
6889 {
6890 case IEMMODE_16BIT:
6891 switch (pVCpu->iem.s.enmEffAddrMode)
6892 {
6893 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
6894 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32, IEM_MC_F_MIN_386); break;
6895 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64, IEM_MC_F_64BIT); break;
6896 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6897 }
6898 break;
6899
6900 case IEMMODE_32BIT:
6901 switch (pVCpu->iem.s.enmEffAddrMode)
6902 {
6903 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
6904 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32, IEM_MC_F_MIN_386); break;
6905 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64, IEM_MC_F_64BIT); break;
6906 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6907 }
6908 break;
6909
6910 case IEMMODE_64BIT:
6911 switch (pVCpu->iem.s.enmEffAddrMode)
6912 {
6913 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6914 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32, IEM_MC_F_MIN_386); break;
6915 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64, IEM_MC_F_64BIT); break;
6916 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6917 }
6918 break;
6919 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6920 }
6921}
6922
6923#undef IEM_CMPS_CASE
6924
6925/**
6926 * @opcode 0xa8
6927 */
6928FNIEMOP_DEF(iemOp_test_AL_Ib)
6929{
6930 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
6931 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6932 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_test_u8);
6933}
6934
6935
6936/**
6937 * @opcode 0xa9
6938 */
6939FNIEMOP_DEF(iemOp_test_eAX_Iz)
6940{
6941 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
6942 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6943 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, 0);
6944}
6945
6946
6947/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
6948#define IEM_STOS_CASE(ValBits, AddrBits, a_fMcFlags) \
6949 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
6950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6951 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6952 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6953 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
6954 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6955 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
6956 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6957 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6958 } IEM_MC_ELSE() { \
6959 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6960 } IEM_MC_ENDIF(); \
6961 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6962 IEM_MC_END() \
6963
6964/**
6965 * @opcode 0xaa
6966 */
6967FNIEMOP_DEF(iemOp_stosb_Yb_AL)
6968{
6969 /*
6970 * Use the C implementation if a repeat prefix is encountered.
6971 */
6972 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6973 {
6974 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
6975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6976 switch (pVCpu->iem.s.enmEffAddrMode)
6977 {
6978 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP, iemCImpl_stos_al_m16);
6979 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP, iemCImpl_stos_al_m32);
6980 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP, iemCImpl_stos_al_m64);
6981 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6982 }
6983 }
6984
6985 /*
6986 * Sharing case implementation with stos[wdq] below.
6987 */
6988 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
6989 switch (pVCpu->iem.s.enmEffAddrMode)
6990 {
6991 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
6992 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32, IEM_MC_F_MIN_386); break;
6993 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64, IEM_MC_F_64BIT); break;
6994 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6995 }
6996}
6997
6998
6999/**
7000 * @opcode 0xab
7001 */
7002FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
7003{
7004 /*
7005 * Use the C implementation if a repeat prefix is encountered.
7006 */
7007 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7008 {
7009 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
7010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7011 switch (pVCpu->iem.s.enmEffOpSize)
7012 {
7013 case IEMMODE_16BIT:
7014 switch (pVCpu->iem.s.enmEffAddrMode)
7015 {
7016 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_ax_m16);
7017 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_ax_m32);
7018 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_ax_m64);
7019 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7020 }
7021 break;
7022 case IEMMODE_32BIT:
7023 switch (pVCpu->iem.s.enmEffAddrMode)
7024 {
7025 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_eax_m16);
7026 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_eax_m32);
7027 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_eax_m64);
7028 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7029 }
7030 case IEMMODE_64BIT:
7031 switch (pVCpu->iem.s.enmEffAddrMode)
7032 {
7033 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
7034 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_rax_m32);
7035 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_rax_m64);
7036 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7037 }
7038 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7039 }
7040 }
7041
7042 /*
7043 * Annoying double switch here.
7044 * Using ugly macro for implementing the cases, sharing it with stosb.
7045 */
7046 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
7047 switch (pVCpu->iem.s.enmEffOpSize)
7048 {
7049 case IEMMODE_16BIT:
7050 switch (pVCpu->iem.s.enmEffAddrMode)
7051 {
7052 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7053 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7054 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64, IEM_MC_F_64BIT); break;
7055 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7056 }
7057 break;
7058
7059 case IEMMODE_32BIT:
7060 switch (pVCpu->iem.s.enmEffAddrMode)
7061 {
7062 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7063 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7064 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64, IEM_MC_F_64BIT); break;
7065 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7066 }
7067 break;
7068
7069 case IEMMODE_64BIT:
7070 switch (pVCpu->iem.s.enmEffAddrMode)
7071 {
7072 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7073 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32, IEM_MC_F_64BIT); break;
7074 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64, IEM_MC_F_64BIT); break;
7075 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7076 }
7077 break;
7078 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7079 }
7080}
7081
7082#undef IEM_STOS_CASE
7083
7084/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
7085#define IEM_LODS_CASE(ValBits, AddrBits, a_fMcFlags) \
7086 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
7087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7088 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7089 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7090 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
7091 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
7092 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
7093 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7094 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7095 } IEM_MC_ELSE() { \
7096 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7097 } IEM_MC_ENDIF(); \
7098 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7099 IEM_MC_END() \
7100
7101/**
7102 * @opcode 0xac
7103 */
7104FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
7105{
7106 /*
7107 * Use the C implementation if a repeat prefix is encountered.
7108 */
7109 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7110 {
7111 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
7112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7113 switch (pVCpu->iem.s.enmEffAddrMode)
7114 {
7115 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
7116 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
7117 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
7118 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7119 }
7120 }
7121
7122 /*
7123 * Sharing case implementation with stos[wdq] below.
7124 */
7125 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
7126 switch (pVCpu->iem.s.enmEffAddrMode)
7127 {
7128 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7129 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7130 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64, IEM_MC_F_64BIT); break;
7131 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7132 }
7133}
7134
7135
7136/**
7137 * @opcode 0xad
7138 */
7139FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
7140{
7141 /*
7142 * Use the C implementation if a repeat prefix is encountered.
7143 */
7144 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7145 {
7146 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
7147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7148 switch (pVCpu->iem.s.enmEffOpSize)
7149 {
7150 case IEMMODE_16BIT:
7151 switch (pVCpu->iem.s.enmEffAddrMode)
7152 {
7153 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
7154 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
7155 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
7156 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7157 }
7158 break;
7159 case IEMMODE_32BIT:
7160 switch (pVCpu->iem.s.enmEffAddrMode)
7161 {
7162 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
7163 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
7164 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
7165 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7166 }
7167 case IEMMODE_64BIT:
7168 switch (pVCpu->iem.s.enmEffAddrMode)
7169 {
7170 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
7171 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
7172 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
7173 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7174 }
7175 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7176 }
7177 }
7178
7179 /*
7180 * Annoying double switch here.
7181 * Using ugly macro for implementing the cases, sharing it with lodsb.
7182 */
7183 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
7184 switch (pVCpu->iem.s.enmEffOpSize)
7185 {
7186 case IEMMODE_16BIT:
7187 switch (pVCpu->iem.s.enmEffAddrMode)
7188 {
7189 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7190 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7191 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64, IEM_MC_F_64BIT); break;
7192 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7193 }
7194 break;
7195
7196 case IEMMODE_32BIT:
7197 switch (pVCpu->iem.s.enmEffAddrMode)
7198 {
7199 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7200 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7201 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64, IEM_MC_F_64BIT); break;
7202 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7203 }
7204 break;
7205
7206 case IEMMODE_64BIT:
7207 switch (pVCpu->iem.s.enmEffAddrMode)
7208 {
7209 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7210 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32, IEM_MC_F_64BIT); break;
7211 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64, IEM_MC_F_64BIT); break;
7212 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7213 }
7214 break;
7215 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7216 }
7217}
7218
7219#undef IEM_LODS_CASE
7220
7221/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
7222#define IEM_SCAS_CASE(ValBits, AddrBits, a_fMcFlags) \
7223 IEM_MC_BEGIN(3, 2, a_fMcFlags, 0); \
7224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7225 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
7226 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
7227 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
7228 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7229 \
7230 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7231 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
7232 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
7233 IEM_MC_REF_EFLAGS(pEFlags); \
7234 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
7235 \
7236 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7237 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7238 } IEM_MC_ELSE() { \
7239 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7240 } IEM_MC_ENDIF(); \
7241 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7242 IEM_MC_END();
7243
7244/**
7245 * @opcode 0xae
7246 */
7247FNIEMOP_DEF(iemOp_scasb_AL_Xb)
7248{
7249 /*
7250 * Use the C implementation if a repeat prefix is encountered.
7251 */
7252 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7253 {
7254 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
7255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7256 switch (pVCpu->iem.s.enmEffAddrMode)
7257 {
7258 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_al_m16);
7259 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_al_m32);
7260 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_al_m64);
7261 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7262 }
7263 }
7264 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7265 {
7266 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
7267 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7268 switch (pVCpu->iem.s.enmEffAddrMode)
7269 {
7270 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_al_m16);
7271 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_al_m32);
7272 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_al_m64);
7273 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7274 }
7275 }
7276
7277 /*
7278 * Sharing case implementation with stos[wdq] below.
7279 */
7280 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
7281 switch (pVCpu->iem.s.enmEffAddrMode)
7282 {
7283 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7284 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7285 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64, IEM_MC_F_64BIT); break;
7286 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7287 }
7288}
7289
7290
7291/**
7292 * @opcode 0xaf
7293 */
7294FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
7295{
7296 /*
7297 * Use the C implementation if a repeat prefix is encountered.
7298 */
7299 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7300 {
7301 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
7302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7303 switch (pVCpu->iem.s.enmEffOpSize)
7304 {
7305 case IEMMODE_16BIT:
7306 switch (pVCpu->iem.s.enmEffAddrMode)
7307 {
7308 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_ax_m16);
7309 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_ax_m32);
7310 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_ax_m64);
7311 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7312 }
7313 break;
7314 case IEMMODE_32BIT:
7315 switch (pVCpu->iem.s.enmEffAddrMode)
7316 {
7317 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_eax_m16);
7318 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_eax_m32);
7319 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_eax_m64);
7320 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7321 }
7322 case IEMMODE_64BIT:
7323 switch (pVCpu->iem.s.enmEffAddrMode)
7324 {
7325 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
7326 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_rax_m32);
7327 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_rax_m64);
7328 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7329 }
7330 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7331 }
7332 }
7333 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7334 {
7335 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
7336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7337 switch (pVCpu->iem.s.enmEffOpSize)
7338 {
7339 case IEMMODE_16BIT:
7340 switch (pVCpu->iem.s.enmEffAddrMode)
7341 {
7342 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_ax_m16);
7343 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_ax_m32);
7344 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_ax_m64);
7345 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7346 }
7347 break;
7348 case IEMMODE_32BIT:
7349 switch (pVCpu->iem.s.enmEffAddrMode)
7350 {
7351 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_eax_m16);
7352 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_eax_m32);
7353 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_eax_m64);
7354 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7355 }
7356 case IEMMODE_64BIT:
7357 switch (pVCpu->iem.s.enmEffAddrMode)
7358 {
7359 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
7360 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_rax_m32);
7361 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_rax_m64);
7362 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7363 }
7364 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7365 }
7366 }
7367
7368 /*
7369 * Annoying double switch here.
7370 * Using ugly macro for implementing the cases, sharing it with scasb.
7371 */
7372 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
7373 switch (pVCpu->iem.s.enmEffOpSize)
7374 {
7375 case IEMMODE_16BIT:
7376 switch (pVCpu->iem.s.enmEffAddrMode)
7377 {
7378 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7379 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7380 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64, IEM_MC_F_64BIT); break;
7381 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7382 }
7383 break;
7384
7385 case IEMMODE_32BIT:
7386 switch (pVCpu->iem.s.enmEffAddrMode)
7387 {
7388 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7389 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7390 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64, IEM_MC_F_64BIT); break;
7391 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7392 }
7393 break;
7394
7395 case IEMMODE_64BIT:
7396 switch (pVCpu->iem.s.enmEffAddrMode)
7397 {
7398 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7399 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32, IEM_MC_F_64BIT); break;
7400 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64, IEM_MC_F_64BIT); break;
7401 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7402 }
7403 break;
7404 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7405 }
7406}
7407
7408#undef IEM_SCAS_CASE
7409
7410/**
7411 * Common 'mov r8, imm8' helper.
7412 */
7413FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iFixedReg)
7414{
7415 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7416 IEM_MC_BEGIN(0, 1, 0, 0);
7417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7418 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
7419 IEM_MC_STORE_GREG_U8(iFixedReg, u8Value);
7420 IEM_MC_ADVANCE_RIP_AND_FINISH();
7421 IEM_MC_END();
7422}
7423
7424
7425/**
7426 * @opcode 0xb0
7427 */
7428FNIEMOP_DEF(iemOp_mov_AL_Ib)
7429{
7430 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
7431 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7432}
7433
7434
7435/**
7436 * @opcode 0xb1
7437 */
7438FNIEMOP_DEF(iemOp_CL_Ib)
7439{
7440 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
7441 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7442}
7443
7444
7445/**
7446 * @opcode 0xb2
7447 */
7448FNIEMOP_DEF(iemOp_DL_Ib)
7449{
7450 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
7451 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7452}
7453
7454
7455/**
7456 * @opcode 0xb3
7457 */
7458FNIEMOP_DEF(iemOp_BL_Ib)
7459{
7460 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
7461 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7462}
7463
7464
7465/**
7466 * @opcode 0xb4
7467 */
7468FNIEMOP_DEF(iemOp_mov_AH_Ib)
7469{
7470 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
7471 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7472}
7473
7474
7475/**
7476 * @opcode 0xb5
7477 */
7478FNIEMOP_DEF(iemOp_CH_Ib)
7479{
7480 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
7481 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7482}
7483
7484
7485/**
7486 * @opcode 0xb6
7487 */
7488FNIEMOP_DEF(iemOp_DH_Ib)
7489{
7490 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
7491 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7492}
7493
7494
7495/**
7496 * @opcode 0xb7
7497 */
7498FNIEMOP_DEF(iemOp_BH_Ib)
7499{
7500 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
7501 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7502}
7503
7504
7505/**
7506 * Common 'mov regX,immX' helper.
7507 */
7508FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iFixedReg)
7509{
7510 switch (pVCpu->iem.s.enmEffOpSize)
7511 {
7512 case IEMMODE_16BIT:
7513 IEM_MC_BEGIN(0, 1, 0, 0);
7514 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7516 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
7517 IEM_MC_STORE_GREG_U16(iFixedReg, u16Value);
7518 IEM_MC_ADVANCE_RIP_AND_FINISH();
7519 IEM_MC_END();
7520 break;
7521
7522 case IEMMODE_32BIT:
7523 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
7524 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
7525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7526 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
7527 IEM_MC_STORE_GREG_U32(iFixedReg, u32Value);
7528 IEM_MC_ADVANCE_RIP_AND_FINISH();
7529 IEM_MC_END();
7530 break;
7531
7532 case IEMMODE_64BIT:
7533 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
7534 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
7535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7536 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
7537 IEM_MC_STORE_GREG_U64(iFixedReg, u64Value);
7538 IEM_MC_ADVANCE_RIP_AND_FINISH();
7539 IEM_MC_END();
7540 break;
7541 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7542 }
7543}
7544
7545
7546/**
7547 * @opcode 0xb8
7548 */
7549FNIEMOP_DEF(iemOp_eAX_Iv)
7550{
7551 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
7552 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7553}
7554
7555
7556/**
7557 * @opcode 0xb9
7558 */
7559FNIEMOP_DEF(iemOp_eCX_Iv)
7560{
7561 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
7562 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7563}
7564
7565
7566/**
7567 * @opcode 0xba
7568 */
7569FNIEMOP_DEF(iemOp_eDX_Iv)
7570{
7571 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
7572 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7573}
7574
7575
7576/**
7577 * @opcode 0xbb
7578 */
7579FNIEMOP_DEF(iemOp_eBX_Iv)
7580{
7581 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
7582 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7583}
7584
7585
7586/**
7587 * @opcode 0xbc
7588 */
7589FNIEMOP_DEF(iemOp_eSP_Iv)
7590{
7591 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
7592 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7593}
7594
7595
7596/**
7597 * @opcode 0xbd
7598 */
7599FNIEMOP_DEF(iemOp_eBP_Iv)
7600{
7601 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
7602 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7603}
7604
7605
7606/**
7607 * @opcode 0xbe
7608 */
7609FNIEMOP_DEF(iemOp_eSI_Iv)
7610{
7611 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
7612 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7613}
7614
7615
7616/**
7617 * @opcode 0xbf
7618 */
7619FNIEMOP_DEF(iemOp_eDI_Iv)
7620{
7621 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
7622 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7623}
7624
7625
7626/**
7627 * @opcode 0xc0
7628 */
7629FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
7630{
7631 IEMOP_HLP_MIN_186();
7632 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7633 PCIEMOPSHIFTSIZES pImpl;
7634 switch (IEM_GET_MODRM_REG_8(bRm))
7635 {
7636 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
7637 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
7638 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
7639 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
7640 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
7641 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
7642 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
7643 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7644 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
7645 }
7646 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7647
7648 if (IEM_IS_MODRM_REG_MODE(bRm))
7649 {
7650 /* register */
7651 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7652 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0);
7653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7654 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7655 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7656 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7657 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7658 IEM_MC_REF_EFLAGS(pEFlags);
7659 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7660 IEM_MC_ADVANCE_RIP_AND_FINISH();
7661 IEM_MC_END();
7662 }
7663 else
7664 {
7665 /* memory */
7666 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_186, 0);
7667 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7668 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7669 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7670 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7671 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
7672
7673 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7674 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7675 IEM_MC_ASSIGN(cShiftArg, cShift);
7676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7677 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7678 IEM_MC_FETCH_EFLAGS(EFlags);
7679 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7680
7681 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
7682 IEM_MC_COMMIT_EFLAGS(EFlags);
7683 IEM_MC_ADVANCE_RIP_AND_FINISH();
7684 IEM_MC_END();
7685 }
7686}
7687
7688
7689/**
7690 * @opcode 0xc1
7691 */
7692FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
7693{
7694 IEMOP_HLP_MIN_186();
7695 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7696 PCIEMOPSHIFTSIZES pImpl;
7697 switch (IEM_GET_MODRM_REG_8(bRm))
7698 {
7699 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
7700 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
7701 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
7702 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
7703 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
7704 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
7705 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
7706 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7707 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
7708 }
7709 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7710
7711 if (IEM_IS_MODRM_REG_MODE(bRm))
7712 {
7713 /* register */
7714 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7715 switch (pVCpu->iem.s.enmEffOpSize)
7716 {
7717 case IEMMODE_16BIT:
7718 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0);
7719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7720 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7721 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7722 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7723 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7724 IEM_MC_REF_EFLAGS(pEFlags);
7725 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7726 IEM_MC_ADVANCE_RIP_AND_FINISH();
7727 IEM_MC_END();
7728 break;
7729
7730 case IEMMODE_32BIT:
7731 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
7732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7733 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7734 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7735 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7736 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7737 IEM_MC_REF_EFLAGS(pEFlags);
7738 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7739 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7740 IEM_MC_ADVANCE_RIP_AND_FINISH();
7741 IEM_MC_END();
7742 break;
7743
7744 case IEMMODE_64BIT:
7745 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
7746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7747 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7748 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7749 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7750 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7751 IEM_MC_REF_EFLAGS(pEFlags);
7752 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7753 IEM_MC_ADVANCE_RIP_AND_FINISH();
7754 IEM_MC_END();
7755 break;
7756
7757 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7758 }
7759 }
7760 else
7761 {
7762 /* memory */
7763 switch (pVCpu->iem.s.enmEffOpSize)
7764 {
7765 case IEMMODE_16BIT:
7766 IEM_MC_BEGIN(3, 3, 0, 0);
7767 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7768 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7769 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7770 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7771 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
7772
7773 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7774 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7775 IEM_MC_ASSIGN(cShiftArg, cShift);
7776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7777 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7778 IEM_MC_FETCH_EFLAGS(EFlags);
7779 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7780
7781 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
7782 IEM_MC_COMMIT_EFLAGS(EFlags);
7783 IEM_MC_ADVANCE_RIP_AND_FINISH();
7784 IEM_MC_END();
7785 break;
7786
7787 case IEMMODE_32BIT:
7788 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
7789 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7790 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7791 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7792 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7793 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
7794
7795 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7796 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7797 IEM_MC_ASSIGN(cShiftArg, cShift);
7798 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7799 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7800 IEM_MC_FETCH_EFLAGS(EFlags);
7801 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7802
7803 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
7804 IEM_MC_COMMIT_EFLAGS(EFlags);
7805 IEM_MC_ADVANCE_RIP_AND_FINISH();
7806 IEM_MC_END();
7807 break;
7808
7809 case IEMMODE_64BIT:
7810 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
7811 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7812 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7813 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7814 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7815 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
7816
7817 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7818 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7819 IEM_MC_ASSIGN(cShiftArg, cShift);
7820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7821 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7822 IEM_MC_FETCH_EFLAGS(EFlags);
7823 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7824
7825 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
7826 IEM_MC_COMMIT_EFLAGS(EFlags);
7827 IEM_MC_ADVANCE_RIP_AND_FINISH();
7828 IEM_MC_END();
7829 break;
7830
7831 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7832 }
7833 }
7834}
7835
7836
7837/**
7838 * @opcode 0xc2
7839 */
7840FNIEMOP_DEF(iemOp_retn_Iw)
7841{
7842 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
7843 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7844 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7846 switch (pVCpu->iem.s.enmEffOpSize)
7847 {
7848 case IEMMODE_16BIT:
7849 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_iw_16, u16Imm);
7850 case IEMMODE_32BIT:
7851 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_iw_32, u16Imm);
7852 case IEMMODE_64BIT:
7853 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_iw_64, u16Imm);
7854 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7855 }
7856}
7857
7858
7859/**
7860 * @opcode 0xc3
7861 */
7862FNIEMOP_DEF(iemOp_retn)
7863{
7864 IEMOP_MNEMONIC(retn, "retn");
7865 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7867 switch (pVCpu->iem.s.enmEffOpSize)
7868 {
7869 case IEMMODE_16BIT:
7870 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_16);
7871 case IEMMODE_32BIT:
7872 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_32);
7873 case IEMMODE_64BIT:
7874 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_64);
7875 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7876 }
7877}
7878
7879
7880/**
7881 * @opcode 0xc4
7882 */
7883FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
7884{
7885 /* The LDS instruction is invalid 64-bit mode. In legacy and
7886 compatability mode it is invalid with MOD=3.
7887 The use as a VEX prefix is made possible by assigning the inverted
7888 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
7889 outside of 64-bit mode. VEX is not available in real or v86 mode. */
7890 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7891 if ( IEM_IS_64BIT_CODE(pVCpu)
7892 || IEM_IS_MODRM_REG_MODE(bRm) )
7893 {
7894 IEMOP_MNEMONIC(vex3_prefix, "vex3");
7895 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
7896 {
7897 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
7898 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
7899 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
7900 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
7901 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
7902 if ((bVex2 & 0x80 /* VEX.W */) && IEM_IS_64BIT_CODE(pVCpu))
7903 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
7904 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
7905 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
7906 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
7907 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
7908 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
7909 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
7910
7911 switch (bRm & 0x1f)
7912 {
7913 case 1: /* 0x0f lead opcode byte. */
7914#ifdef IEM_WITH_VEX
7915 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7916#else
7917 IEMOP_BITCH_ABOUT_STUB();
7918 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7919#endif
7920
7921 case 2: /* 0x0f 0x38 lead opcode bytes. */
7922#ifdef IEM_WITH_VEX
7923 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7924#else
7925 IEMOP_BITCH_ABOUT_STUB();
7926 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7927#endif
7928
7929 case 3: /* 0x0f 0x3a lead opcode bytes. */
7930#ifdef IEM_WITH_VEX
7931 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7932#else
7933 IEMOP_BITCH_ABOUT_STUB();
7934 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7935#endif
7936
7937 default:
7938 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
7939 IEMOP_RAISE_INVALID_OPCODE_RET();
7940 }
7941 }
7942 Log(("VEX3: VEX support disabled!\n"));
7943 IEMOP_RAISE_INVALID_OPCODE_RET();
7944 }
7945
7946 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
7947 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
7948}
7949
7950
7951/**
7952 * @opcode 0xc5
7953 */
7954FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
7955{
7956 /* The LES instruction is invalid 64-bit mode. In legacy and
7957 compatability mode it is invalid with MOD=3.
7958 The use as a VEX prefix is made possible by assigning the inverted
7959 REX.R to the top MOD bit, and the top bit in the inverted register
7960 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
7961 to accessing registers 0..7 in this VEX form. */
7962 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7963 if ( IEM_IS_64BIT_CODE(pVCpu)
7964 || IEM_IS_MODRM_REG_MODE(bRm))
7965 {
7966 IEMOP_MNEMONIC(vex2_prefix, "vex2");
7967 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
7968 {
7969 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
7970 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
7971 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
7972 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
7973 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
7974 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
7975 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
7976 pVCpu->iem.s.idxPrefix = bRm & 0x3;
7977
7978#ifdef IEM_WITH_VEX
7979 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7980#else
7981 IEMOP_BITCH_ABOUT_STUB();
7982 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7983#endif
7984 }
7985
7986 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
7987 Log(("VEX2: VEX support disabled!\n"));
7988 IEMOP_RAISE_INVALID_OPCODE_RET();
7989 }
7990
7991 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
7992 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
7993}
7994
7995
7996/**
7997 * @opcode 0xc6
7998 */
7999FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
8000{
8001 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8002 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
8003 IEMOP_RAISE_INVALID_OPCODE_RET();
8004 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
8005
8006 if (IEM_IS_MODRM_REG_MODE(bRm))
8007 {
8008 /* register access */
8009 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8010 IEM_MC_BEGIN(0, 0, 0, 0);
8011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8012 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Imm);
8013 IEM_MC_ADVANCE_RIP_AND_FINISH();
8014 IEM_MC_END();
8015 }
8016 else
8017 {
8018 /* memory access. */
8019 IEM_MC_BEGIN(0, 1, 0, 0);
8020 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8021 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8022 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8024 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
8025 IEM_MC_ADVANCE_RIP_AND_FINISH();
8026 IEM_MC_END();
8027 }
8028}
8029
8030
8031/**
8032 * @opcode 0xc7
8033 */
8034FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
8035{
8036 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8037 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
8038 IEMOP_RAISE_INVALID_OPCODE_RET();
8039 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
8040
8041 if (IEM_IS_MODRM_REG_MODE(bRm))
8042 {
8043 /* register access */
8044 switch (pVCpu->iem.s.enmEffOpSize)
8045 {
8046 case IEMMODE_16BIT:
8047 IEM_MC_BEGIN(0, 0, 0, 0);
8048 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8050 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Imm);
8051 IEM_MC_ADVANCE_RIP_AND_FINISH();
8052 IEM_MC_END();
8053 break;
8054
8055 case IEMMODE_32BIT:
8056 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8057 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8059 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Imm);
8060 IEM_MC_ADVANCE_RIP_AND_FINISH();
8061 IEM_MC_END();
8062 break;
8063
8064 case IEMMODE_64BIT:
8065 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
8066 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8068 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Imm);
8069 IEM_MC_ADVANCE_RIP_AND_FINISH();
8070 IEM_MC_END();
8071 break;
8072
8073 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8074 }
8075 }
8076 else
8077 {
8078 /* memory access. */
8079 switch (pVCpu->iem.s.enmEffOpSize)
8080 {
8081 case IEMMODE_16BIT:
8082 IEM_MC_BEGIN(0, 1, 0, 0);
8083 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8084 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
8085 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8087 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
8088 IEM_MC_ADVANCE_RIP_AND_FINISH();
8089 IEM_MC_END();
8090 break;
8091
8092 case IEMMODE_32BIT:
8093 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8094 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8095 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8096 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8098 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
8099 IEM_MC_ADVANCE_RIP_AND_FINISH();
8100 IEM_MC_END();
8101 break;
8102
8103 case IEMMODE_64BIT:
8104 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
8105 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8106 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8107 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8109 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
8110 IEM_MC_ADVANCE_RIP_AND_FINISH();
8111 IEM_MC_END();
8112 break;
8113
8114 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8115 }
8116 }
8117}
8118
8119
8120
8121
8122/**
8123 * @opcode 0xc8
8124 */
8125FNIEMOP_DEF(iemOp_enter_Iw_Ib)
8126{
8127 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
8128 IEMOP_HLP_MIN_186();
8129 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8130 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
8131 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
8132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8133 IEM_MC_DEFER_TO_CIMPL_3_RET(0, iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
8134}
8135
8136
8137/**
8138 * @opcode 0xc9
8139 */
8140FNIEMOP_DEF(iemOp_leave)
8141{
8142 IEMOP_MNEMONIC(leave, "leave");
8143 IEMOP_HLP_MIN_186();
8144 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8146 IEM_MC_DEFER_TO_CIMPL_1_RET(0, iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
8147}
8148
8149
8150/**
8151 * @opcode 0xca
8152 */
8153FNIEMOP_DEF(iemOp_retf_Iw)
8154{
8155 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
8156 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8158 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_MODE,
8159 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
8160}
8161
8162
8163/**
8164 * @opcode 0xcb
8165 */
8166FNIEMOP_DEF(iemOp_retf)
8167{
8168 IEMOP_MNEMONIC(retf, "retf");
8169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8170 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_MODE,
8171 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
8172}
8173
8174
8175/**
8176 * @opcode 0xcc
8177 */
8178FNIEMOP_DEF(iemOp_int3)
8179{
8180 IEMOP_MNEMONIC(int3, "int3");
8181 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8182 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
8183 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
8184 iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
8185}
8186
8187
8188/**
8189 * @opcode 0xcd
8190 */
8191FNIEMOP_DEF(iemOp_int_Ib)
8192{
8193 IEMOP_MNEMONIC(int_Ib, "int Ib");
8194 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
8195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8196 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
8197 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
8198 iemCImpl_int, u8Int, IEMINT_INTN);
8199}
8200
8201
8202/**
8203 * @opcode 0xce
8204 */
8205FNIEMOP_DEF(iemOp_into)
8206{
8207 IEMOP_MNEMONIC(into, "into");
8208 IEMOP_HLP_NO_64BIT();
8209 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_CONDITIONAL
8210 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
8211 iemCImpl_int, X86_XCPT_OF, IEMINT_INTO);
8212}
8213
8214
8215/**
8216 * @opcode 0xcf
8217 */
8218FNIEMOP_DEF(iemOp_iret)
8219{
8220 IEMOP_MNEMONIC(iret, "iret");
8221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8222 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
8223 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE | IEM_CIMPL_F_VMEXIT,
8224 iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
8225}
8226
8227
8228/**
8229 * @opcode 0xd0
8230 */
8231FNIEMOP_DEF(iemOp_Grp2_Eb_1)
8232{
8233 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8234 PCIEMOPSHIFTSIZES pImpl;
8235 switch (IEM_GET_MODRM_REG_8(bRm))
8236 {
8237 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
8238 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
8239 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
8240 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
8241 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
8242 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
8243 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
8244 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8245 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
8246 }
8247 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8248
8249 if (IEM_IS_MODRM_REG_MODE(bRm))
8250 {
8251 /* register */
8252 IEM_MC_BEGIN(3, 0, 0, 0);
8253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8254 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8255 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
8256 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8257 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8258 IEM_MC_REF_EFLAGS(pEFlags);
8259 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8260 IEM_MC_ADVANCE_RIP_AND_FINISH();
8261 IEM_MC_END();
8262 }
8263 else
8264 {
8265 /* memory */
8266 IEM_MC_BEGIN(3, 3, 0, 0);
8267 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8268 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
8269 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8270 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8271 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8272
8273 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8274 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8275 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8276 IEM_MC_FETCH_EFLAGS(EFlags);
8277 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8278
8279 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
8280 IEM_MC_COMMIT_EFLAGS(EFlags);
8281 IEM_MC_ADVANCE_RIP_AND_FINISH();
8282 IEM_MC_END();
8283 }
8284}
8285
8286
8287
8288/**
8289 * @opcode 0xd1
8290 */
8291FNIEMOP_DEF(iemOp_Grp2_Ev_1)
8292{
8293 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8294 PCIEMOPSHIFTSIZES pImpl;
8295 switch (IEM_GET_MODRM_REG_8(bRm))
8296 {
8297 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
8298 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
8299 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
8300 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
8301 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
8302 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
8303 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
8304 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8305 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
8306 }
8307 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8308
8309 if (IEM_IS_MODRM_REG_MODE(bRm))
8310 {
8311 /* register */
8312 switch (pVCpu->iem.s.enmEffOpSize)
8313 {
8314 case IEMMODE_16BIT:
8315 IEM_MC_BEGIN(3, 0, 0, 0);
8316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8317 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8318 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8319 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8320 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8321 IEM_MC_REF_EFLAGS(pEFlags);
8322 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8323 IEM_MC_ADVANCE_RIP_AND_FINISH();
8324 IEM_MC_END();
8325 break;
8326
8327 case IEMMODE_32BIT:
8328 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
8329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8330 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8331 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8332 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8333 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8334 IEM_MC_REF_EFLAGS(pEFlags);
8335 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8336 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8337 IEM_MC_ADVANCE_RIP_AND_FINISH();
8338 IEM_MC_END();
8339 break;
8340
8341 case IEMMODE_64BIT:
8342 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
8343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8344 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8345 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8346 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8347 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8348 IEM_MC_REF_EFLAGS(pEFlags);
8349 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8350 IEM_MC_ADVANCE_RIP_AND_FINISH();
8351 IEM_MC_END();
8352 break;
8353
8354 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8355 }
8356 }
8357 else
8358 {
8359 /* memory */
8360 switch (pVCpu->iem.s.enmEffOpSize)
8361 {
8362 case IEMMODE_16BIT:
8363 IEM_MC_BEGIN(3, 3, 0, 0);
8364 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8365 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8366 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8367 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8368 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8369
8370 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8372 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8373 IEM_MC_FETCH_EFLAGS(EFlags);
8374 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8375
8376 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
8377 IEM_MC_COMMIT_EFLAGS(EFlags);
8378 IEM_MC_ADVANCE_RIP_AND_FINISH();
8379 IEM_MC_END();
8380 break;
8381
8382 case IEMMODE_32BIT:
8383 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
8384 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8385 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8386 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8387 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8388 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8389
8390 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8391 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8392 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8393 IEM_MC_FETCH_EFLAGS(EFlags);
8394 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8395
8396 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
8397 IEM_MC_COMMIT_EFLAGS(EFlags);
8398 IEM_MC_ADVANCE_RIP_AND_FINISH();
8399 IEM_MC_END();
8400 break;
8401
8402 case IEMMODE_64BIT:
8403 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
8404 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8405 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8406 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8407 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8408 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8409
8410 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8412 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8413 IEM_MC_FETCH_EFLAGS(EFlags);
8414 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8415
8416 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
8417 IEM_MC_COMMIT_EFLAGS(EFlags);
8418 IEM_MC_ADVANCE_RIP_AND_FINISH();
8419 IEM_MC_END();
8420 break;
8421
8422 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8423 }
8424 }
8425}
8426
8427
8428/**
8429 * @opcode 0xd2
8430 */
8431FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
8432{
8433 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8434 PCIEMOPSHIFTSIZES pImpl;
8435 switch (IEM_GET_MODRM_REG_8(bRm))
8436 {
8437 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
8438 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
8439 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
8440 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
8441 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
8442 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
8443 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
8444 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8445 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
8446 }
8447 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8448
8449 if (IEM_IS_MODRM_REG_MODE(bRm))
8450 {
8451 /* register */
8452 IEM_MC_BEGIN(3, 0, 0, 0);
8453 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8454 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8455 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8456 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8457 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8458 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8459 IEM_MC_REF_EFLAGS(pEFlags);
8460 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8461 IEM_MC_ADVANCE_RIP_AND_FINISH();
8462 IEM_MC_END();
8463 }
8464 else
8465 {
8466 /* memory */
8467 IEM_MC_BEGIN(3, 3, 0, 0);
8468 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8469 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8470 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8471 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8472 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8473
8474 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8476 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8477 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8478 IEM_MC_FETCH_EFLAGS(EFlags);
8479 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8480
8481 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
8482 IEM_MC_COMMIT_EFLAGS(EFlags);
8483 IEM_MC_ADVANCE_RIP_AND_FINISH();
8484 IEM_MC_END();
8485 }
8486}
8487
8488
8489/**
8490 * @opcode 0xd3
8491 */
8492FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
8493{
8494 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8495 PCIEMOPSHIFTSIZES pImpl;
8496 switch (IEM_GET_MODRM_REG_8(bRm))
8497 {
8498 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
8499 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
8500 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
8501 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
8502 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
8503 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
8504 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
8505 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8506 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8507 }
8508 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8509
8510 if (IEM_IS_MODRM_REG_MODE(bRm))
8511 {
8512 /* register */
8513 switch (pVCpu->iem.s.enmEffOpSize)
8514 {
8515 case IEMMODE_16BIT:
8516 IEM_MC_BEGIN(3, 0, 0, 0);
8517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8518 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8519 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8520 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8521 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8522 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8523 IEM_MC_REF_EFLAGS(pEFlags);
8524 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8525 IEM_MC_ADVANCE_RIP_AND_FINISH();
8526 IEM_MC_END();
8527 break;
8528
8529 case IEMMODE_32BIT:
8530 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
8531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8532 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8533 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8534 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8535 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8536 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8537 IEM_MC_REF_EFLAGS(pEFlags);
8538 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8539 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
8540 IEM_MC_ADVANCE_RIP_AND_FINISH();
8541 IEM_MC_END();
8542 break;
8543
8544 case IEMMODE_64BIT:
8545 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
8546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8547 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8548 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8549 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8550 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8551 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8552 IEM_MC_REF_EFLAGS(pEFlags);
8553 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8554 IEM_MC_ADVANCE_RIP_AND_FINISH();
8555 IEM_MC_END();
8556 break;
8557
8558 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8559 }
8560 }
8561 else
8562 {
8563 /* memory */
8564 switch (pVCpu->iem.s.enmEffOpSize)
8565 {
8566 case IEMMODE_16BIT:
8567 IEM_MC_BEGIN(3, 3, 0, 0);
8568 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8569 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8570 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8571 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8572 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8573
8574 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8576 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8577 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8578 IEM_MC_FETCH_EFLAGS(EFlags);
8579 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8580
8581 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
8582 IEM_MC_COMMIT_EFLAGS(EFlags);
8583 IEM_MC_ADVANCE_RIP_AND_FINISH();
8584 IEM_MC_END();
8585 break;
8586
8587 case IEMMODE_32BIT:
8588 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
8589 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8590 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8591 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8592 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8593 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8594
8595 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8597 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8598 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8599 IEM_MC_FETCH_EFLAGS(EFlags);
8600 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8601
8602 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
8603 IEM_MC_COMMIT_EFLAGS(EFlags);
8604 IEM_MC_ADVANCE_RIP_AND_FINISH();
8605 IEM_MC_END();
8606 break;
8607
8608 case IEMMODE_64BIT:
8609 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
8610 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8611 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8612 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8614 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8615
8616 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8618 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8619 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8620 IEM_MC_FETCH_EFLAGS(EFlags);
8621 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8622
8623 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
8624 IEM_MC_COMMIT_EFLAGS(EFlags);
8625 IEM_MC_ADVANCE_RIP_AND_FINISH();
8626 IEM_MC_END();
8627 break;
8628
8629 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8630 }
8631 }
8632}
8633
8634/**
8635 * @opcode 0xd4
8636 */
8637FNIEMOP_DEF(iemOp_aam_Ib)
8638{
8639 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
8640 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
8641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8642 IEMOP_HLP_NO_64BIT();
8643 if (!bImm)
8644 IEMOP_RAISE_DIVIDE_ERROR_RET();
8645 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aam, bImm);
8646}
8647
8648
8649/**
8650 * @opcode 0xd5
8651 */
8652FNIEMOP_DEF(iemOp_aad_Ib)
8653{
8654 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
8655 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
8656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8657 IEMOP_HLP_NO_64BIT();
8658 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aad, bImm);
8659}
8660
8661
8662/**
8663 * @opcode 0xd6
8664 */
8665FNIEMOP_DEF(iemOp_salc)
8666{
8667 IEMOP_MNEMONIC(salc, "salc");
8668 IEMOP_HLP_NO_64BIT();
8669
8670 IEM_MC_BEGIN(0, 0, 0, 0);
8671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8672 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8673 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
8674 } IEM_MC_ELSE() {
8675 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
8676 } IEM_MC_ENDIF();
8677 IEM_MC_ADVANCE_RIP_AND_FINISH();
8678 IEM_MC_END();
8679}
8680
8681
8682/**
8683 * @opcode 0xd7
8684 */
8685FNIEMOP_DEF(iemOp_xlat)
8686{
8687 IEMOP_MNEMONIC(xlat, "xlat");
8688 switch (pVCpu->iem.s.enmEffAddrMode)
8689 {
8690 case IEMMODE_16BIT:
8691 IEM_MC_BEGIN(2, 0, 0, 0);
8692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8693 IEM_MC_LOCAL(uint8_t, u8Tmp);
8694 IEM_MC_LOCAL(uint16_t, u16Addr);
8695 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
8696 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
8697 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
8698 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
8699 IEM_MC_ADVANCE_RIP_AND_FINISH();
8700 IEM_MC_END();
8701 break;
8702
8703 case IEMMODE_32BIT:
8704 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0);
8705 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8706 IEM_MC_LOCAL(uint8_t, u8Tmp);
8707 IEM_MC_LOCAL(uint32_t, u32Addr);
8708 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
8709 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
8710 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
8711 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
8712 IEM_MC_ADVANCE_RIP_AND_FINISH();
8713 IEM_MC_END();
8714 break;
8715
8716 case IEMMODE_64BIT:
8717 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
8718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8719 IEM_MC_LOCAL(uint8_t, u8Tmp);
8720 IEM_MC_LOCAL(uint64_t, u64Addr);
8721 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
8722 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
8723 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
8724 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
8725 IEM_MC_ADVANCE_RIP_AND_FINISH();
8726 IEM_MC_END();
8727 break;
8728
8729 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8730 }
8731}
8732
8733
8734/**
8735 * Common worker for FPU instructions working on ST0 and STn, and storing the
8736 * result in ST0.
8737 *
8738 * @param bRm Mod R/M byte.
8739 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8740 */
8741FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
8742{
8743 IEM_MC_BEGIN(3, 1, 0, 0);
8744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8745 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8746 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8747 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8748 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8749
8750 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8751 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8752 IEM_MC_PREPARE_FPU_USAGE();
8753 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8754 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
8755 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
8756 } IEM_MC_ELSE() {
8757 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
8758 } IEM_MC_ENDIF();
8759 IEM_MC_ADVANCE_RIP_AND_FINISH();
8760
8761 IEM_MC_END();
8762}
8763
8764
8765/**
8766 * Common worker for FPU instructions working on ST0 and STn, and only affecting
8767 * flags.
8768 *
8769 * @param bRm Mod R/M byte.
8770 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8771 */
8772FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8773{
8774 IEM_MC_BEGIN(3, 1, 0, 0);
8775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8776 IEM_MC_LOCAL(uint16_t, u16Fsw);
8777 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8778 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8779 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8780
8781 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8782 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8783 IEM_MC_PREPARE_FPU_USAGE();
8784 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8785 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8786 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
8787 } IEM_MC_ELSE() {
8788 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
8789 } IEM_MC_ENDIF();
8790 IEM_MC_ADVANCE_RIP_AND_FINISH();
8791
8792 IEM_MC_END();
8793}
8794
8795
8796/**
8797 * Common worker for FPU instructions working on ST0 and STn, only affecting
8798 * flags, and popping when done.
8799 *
8800 * @param bRm Mod R/M byte.
8801 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8802 */
8803FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8804{
8805 IEM_MC_BEGIN(3, 1, 0, 0);
8806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8807 IEM_MC_LOCAL(uint16_t, u16Fsw);
8808 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8809 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8810 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8811
8812 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8813 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8814 IEM_MC_PREPARE_FPU_USAGE();
8815 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8816 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8817 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
8818 } IEM_MC_ELSE() {
8819 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
8820 } IEM_MC_ENDIF();
8821 IEM_MC_ADVANCE_RIP_AND_FINISH();
8822
8823 IEM_MC_END();
8824}
8825
8826
8827/** Opcode 0xd8 11/0. */
8828FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
8829{
8830 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
8831 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
8832}
8833
8834
8835/** Opcode 0xd8 11/1. */
8836FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
8837{
8838 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
8839 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
8840}
8841
8842
8843/** Opcode 0xd8 11/2. */
8844FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
8845{
8846 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
8847 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
8848}
8849
8850
8851/** Opcode 0xd8 11/3. */
8852FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
8853{
8854 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
8855 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
8856}
8857
8858
8859/** Opcode 0xd8 11/4. */
8860FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
8861{
8862 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
8863 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
8864}
8865
8866
8867/** Opcode 0xd8 11/5. */
8868FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
8869{
8870 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
8871 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
8872}
8873
8874
8875/** Opcode 0xd8 11/6. */
8876FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
8877{
8878 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
8879 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
8880}
8881
8882
8883/** Opcode 0xd8 11/7. */
8884FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
8885{
8886 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
8887 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
8888}
8889
8890
8891/**
8892 * Common worker for FPU instructions working on ST0 and an m32r, and storing
8893 * the result in ST0.
8894 *
8895 * @param bRm Mod R/M byte.
8896 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8897 */
8898FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
8899{
8900 IEM_MC_BEGIN(3, 3, 0, 0);
8901 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8902 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8903 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8904 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8905 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8906 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8907
8908 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8910
8911 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8912 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8913 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8914
8915 IEM_MC_PREPARE_FPU_USAGE();
8916 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8917 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
8918 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
8919 } IEM_MC_ELSE() {
8920 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
8921 } IEM_MC_ENDIF();
8922 IEM_MC_ADVANCE_RIP_AND_FINISH();
8923
8924 IEM_MC_END();
8925}
8926
8927
8928/** Opcode 0xd8 !11/0. */
8929FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
8930{
8931 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
8932 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
8933}
8934
8935
8936/** Opcode 0xd8 !11/1. */
8937FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
8938{
8939 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
8940 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
8941}
8942
8943
8944/** Opcode 0xd8 !11/2. */
8945FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
8946{
8947 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
8948
8949 IEM_MC_BEGIN(3, 3, 0, 0);
8950 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8951 IEM_MC_LOCAL(uint16_t, u16Fsw);
8952 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8953 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8954 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8955 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8956
8957 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8959
8960 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8961 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8962 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8963
8964 IEM_MC_PREPARE_FPU_USAGE();
8965 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8966 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
8967 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8968 } IEM_MC_ELSE() {
8969 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8970 } IEM_MC_ENDIF();
8971 IEM_MC_ADVANCE_RIP_AND_FINISH();
8972
8973 IEM_MC_END();
8974}
8975
8976
8977/** Opcode 0xd8 !11/3. */
8978FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
8979{
8980 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
8981
8982 IEM_MC_BEGIN(3, 3, 0, 0);
8983 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8984 IEM_MC_LOCAL(uint16_t, u16Fsw);
8985 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8986 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8987 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8988 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8989
8990 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8992
8993 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8994 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8995 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8996
8997 IEM_MC_PREPARE_FPU_USAGE();
8998 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8999 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
9000 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9001 } IEM_MC_ELSE() {
9002 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9003 } IEM_MC_ENDIF();
9004 IEM_MC_ADVANCE_RIP_AND_FINISH();
9005
9006 IEM_MC_END();
9007}
9008
9009
9010/** Opcode 0xd8 !11/4. */
9011FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
9012{
9013 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
9014 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
9015}
9016
9017
9018/** Opcode 0xd8 !11/5. */
9019FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
9020{
9021 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
9022 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
9023}
9024
9025
9026/** Opcode 0xd8 !11/6. */
9027FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
9028{
9029 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
9030 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
9031}
9032
9033
9034/** Opcode 0xd8 !11/7. */
9035FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
9036{
9037 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
9038 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
9039}
9040
9041
9042/**
9043 * @opcode 0xd8
9044 */
9045FNIEMOP_DEF(iemOp_EscF0)
9046{
9047 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9048 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
9049
9050 if (IEM_IS_MODRM_REG_MODE(bRm))
9051 {
9052 switch (IEM_GET_MODRM_REG_8(bRm))
9053 {
9054 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
9055 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
9056 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
9057 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
9058 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
9059 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
9060 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
9061 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
9062 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9063 }
9064 }
9065 else
9066 {
9067 switch (IEM_GET_MODRM_REG_8(bRm))
9068 {
9069 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
9070 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
9071 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
9072 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
9073 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
9074 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
9075 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
9076 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
9077 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9078 }
9079 }
9080}
9081
9082
9083/** Opcode 0xd9 /0 mem32real
9084 * @sa iemOp_fld_m64r */
9085FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
9086{
9087 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
9088
9089 IEM_MC_BEGIN(2, 3, 0, 0);
9090 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9091 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9092 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
9093 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9094 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
9095
9096 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9098
9099 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9100 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9101 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9102 IEM_MC_PREPARE_FPU_USAGE();
9103 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
9104 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r32, pFpuRes, pr32Val);
9105 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9106 } IEM_MC_ELSE() {
9107 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9108 } IEM_MC_ENDIF();
9109 IEM_MC_ADVANCE_RIP_AND_FINISH();
9110
9111 IEM_MC_END();
9112}
9113
9114
9115/** Opcode 0xd9 !11/2 mem32real */
9116FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
9117{
9118 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
9119 IEM_MC_BEGIN(3, 2, 0, 0);
9120 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9121 IEM_MC_LOCAL(uint16_t, u16Fsw);
9122 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9123 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
9124 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9125
9126 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9128 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9129 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9130
9131 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9132 IEM_MC_PREPARE_FPU_USAGE();
9133 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9134 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
9135 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9136 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9137 } IEM_MC_ELSE() {
9138 IEM_MC_IF_FCW_IM() {
9139 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
9140 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
9141 } IEM_MC_ENDIF();
9142 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9143 } IEM_MC_ENDIF();
9144 IEM_MC_ADVANCE_RIP_AND_FINISH();
9145
9146 IEM_MC_END();
9147}
9148
9149
9150/** Opcode 0xd9 !11/3 */
9151FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
9152{
9153 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
9154 IEM_MC_BEGIN(3, 2, 0, 0);
9155 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9156 IEM_MC_LOCAL(uint16_t, u16Fsw);
9157 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9158 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
9159 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9160
9161 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9163 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9164 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9165
9166 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9167 IEM_MC_PREPARE_FPU_USAGE();
9168 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9169 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
9170 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9171 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9172 } IEM_MC_ELSE() {
9173 IEM_MC_IF_FCW_IM() {
9174 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
9175 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
9176 } IEM_MC_ENDIF();
9177 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9178 } IEM_MC_ENDIF();
9179 IEM_MC_ADVANCE_RIP_AND_FINISH();
9180
9181 IEM_MC_END();
9182}
9183
9184
9185/** Opcode 0xd9 !11/4 */
9186FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
9187{
9188 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
9189 IEM_MC_BEGIN(3, 0, 0, 0);
9190 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9191 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9192 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
9193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9195 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9196 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9197 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9198 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
9199 IEM_MC_END();
9200}
9201
9202
9203/** Opcode 0xd9 !11/5 */
9204FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
9205{
9206 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
9207 IEM_MC_BEGIN(1, 1, 0, 0);
9208 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9209 IEM_MC_ARG(uint16_t, u16Fsw, 0);
9210 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9212 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9213 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9214 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9215 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_FPU, iemCImpl_fldcw, u16Fsw);
9216 IEM_MC_END();
9217}
9218
9219
9220/** Opcode 0xd9 !11/6 */
9221FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
9222{
9223 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
9224 IEM_MC_BEGIN(3, 0, 0, 0);
9225 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9226 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9227 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
9228 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9230 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9231 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9232 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9233 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
9234 IEM_MC_END();
9235}
9236
9237
9238/** Opcode 0xd9 !11/7 */
9239FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
9240{
9241 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
9242 IEM_MC_BEGIN(2, 0, 0, 0);
9243 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9244 IEM_MC_LOCAL(uint16_t, u16Fcw);
9245 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9246 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9247 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9248 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9249 IEM_MC_FETCH_FCW(u16Fcw);
9250 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
9251 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
9252 IEM_MC_END();
9253}
9254
9255
9256/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
9257FNIEMOP_DEF(iemOp_fnop)
9258{
9259 IEMOP_MNEMONIC(fnop, "fnop");
9260 IEM_MC_BEGIN(0, 0, 0, 0);
9261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9262 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9263 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9264 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9265 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
9266 * intel optimizations. Investigate. */
9267 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9268 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
9269 IEM_MC_END();
9270}
9271
9272
9273/** Opcode 0xd9 11/0 stN */
9274FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
9275{
9276 IEMOP_MNEMONIC(fld_stN, "fld stN");
9277 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
9278 * indicates that it does. */
9279 IEM_MC_BEGIN(0, 2, 0, 0);
9280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9281 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9282 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9283 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9284 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9285
9286 IEM_MC_PREPARE_FPU_USAGE();
9287 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, IEM_GET_MODRM_RM_8(bRm)) {
9288 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9289 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
9290 } IEM_MC_ELSE() {
9291 IEM_MC_FPU_STACK_PUSH_UNDERFLOW(pVCpu->iem.s.uFpuOpcode);
9292 } IEM_MC_ENDIF();
9293
9294 IEM_MC_ADVANCE_RIP_AND_FINISH();
9295 IEM_MC_END();
9296}
9297
9298
9299/** Opcode 0xd9 11/3 stN */
9300FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
9301{
9302 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
9303 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
9304 * indicates that it does. */
9305 IEM_MC_BEGIN(2, 3, 0, 0);
9306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9307 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
9308 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
9309 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9310 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ IEM_GET_MODRM_RM_8(bRm), 0);
9311 IEM_MC_ARG_CONST(uint16_t, uFpuOpcode, /*=*/ pVCpu->iem.s.uFpuOpcode, 1);
9312 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9313 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9314
9315 IEM_MC_PREPARE_FPU_USAGE();
9316 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9317 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
9318 IEM_MC_STORE_FPUREG_R80_SRC_REF(IEM_GET_MODRM_RM_8(bRm), pr80Value1);
9319 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9320 } IEM_MC_ELSE() {
9321 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, iemCImpl_fxch_underflow, iStReg, uFpuOpcode);
9322 } IEM_MC_ENDIF();
9323
9324 IEM_MC_ADVANCE_RIP_AND_FINISH();
9325 IEM_MC_END();
9326}
9327
9328
9329/** Opcode 0xd9 11/4, 0xdd 11/2. */
9330FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
9331{
9332 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
9333
9334 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
9335 uint8_t const iDstReg = IEM_GET_MODRM_RM_8(bRm);
9336 if (!iDstReg)
9337 {
9338 IEM_MC_BEGIN(0, 1, 0, 0);
9339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9340 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
9341 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9342 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9343
9344 IEM_MC_PREPARE_FPU_USAGE();
9345 IEM_MC_IF_FPUREG_NOT_EMPTY(0) {
9346 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9347 } IEM_MC_ELSE() {
9348 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0, pVCpu->iem.s.uFpuOpcode);
9349 } IEM_MC_ENDIF();
9350
9351 IEM_MC_ADVANCE_RIP_AND_FINISH();
9352 IEM_MC_END();
9353 }
9354 else
9355 {
9356 IEM_MC_BEGIN(0, 2, 0, 0);
9357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9358 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9359 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9360 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9361 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9362
9363 IEM_MC_PREPARE_FPU_USAGE();
9364 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9365 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9366 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg, pVCpu->iem.s.uFpuOpcode);
9367 } IEM_MC_ELSE() {
9368 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg, pVCpu->iem.s.uFpuOpcode);
9369 } IEM_MC_ENDIF();
9370
9371 IEM_MC_ADVANCE_RIP_AND_FINISH();
9372 IEM_MC_END();
9373 }
9374}
9375
9376
9377/**
9378 * Common worker for FPU instructions working on ST0 and replaces it with the
9379 * result, i.e. unary operators.
9380 *
9381 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9382 */
9383FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
9384{
9385 IEM_MC_BEGIN(2, 1, 0, 0);
9386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9387 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9388 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9389 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9390
9391 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9392 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9393 IEM_MC_PREPARE_FPU_USAGE();
9394 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9395 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
9396 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9397 } IEM_MC_ELSE() {
9398 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9399 } IEM_MC_ENDIF();
9400 IEM_MC_ADVANCE_RIP_AND_FINISH();
9401
9402 IEM_MC_END();
9403}
9404
9405
9406/** Opcode 0xd9 0xe0. */
9407FNIEMOP_DEF(iemOp_fchs)
9408{
9409 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
9410 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
9411}
9412
9413
9414/** Opcode 0xd9 0xe1. */
9415FNIEMOP_DEF(iemOp_fabs)
9416{
9417 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
9418 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
9419}
9420
9421
9422/** Opcode 0xd9 0xe4. */
9423FNIEMOP_DEF(iemOp_ftst)
9424{
9425 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
9426 IEM_MC_BEGIN(2, 1, 0, 0);
9427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9428 IEM_MC_LOCAL(uint16_t, u16Fsw);
9429 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9430 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9431
9432 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9433 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9434 IEM_MC_PREPARE_FPU_USAGE();
9435 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9436 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_ftst_r80, pu16Fsw, pr80Value);
9437 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9438 } IEM_MC_ELSE() {
9439 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
9440 } IEM_MC_ENDIF();
9441 IEM_MC_ADVANCE_RIP_AND_FINISH();
9442
9443 IEM_MC_END();
9444}
9445
9446
9447/** Opcode 0xd9 0xe5. */
9448FNIEMOP_DEF(iemOp_fxam)
9449{
9450 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
9451 IEM_MC_BEGIN(2, 1, 0, 0);
9452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9453 IEM_MC_LOCAL(uint16_t, u16Fsw);
9454 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9455 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9456
9457 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9458 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9459 IEM_MC_PREPARE_FPU_USAGE();
9460 IEM_MC_REF_FPUREG(pr80Value, 0);
9461 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fxam_r80, pu16Fsw, pr80Value);
9462 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9463 IEM_MC_ADVANCE_RIP_AND_FINISH();
9464
9465 IEM_MC_END();
9466}
9467
9468
9469/**
9470 * Common worker for FPU instructions pushing a constant onto the FPU stack.
9471 *
9472 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9473 */
9474FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
9475{
9476 IEM_MC_BEGIN(1, 1, 0, 0);
9477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9478 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9479 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9480
9481 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9482 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9483 IEM_MC_PREPARE_FPU_USAGE();
9484 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
9485 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
9486 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
9487 } IEM_MC_ELSE() {
9488 IEM_MC_FPU_STACK_PUSH_OVERFLOW(pVCpu->iem.s.uFpuOpcode);
9489 } IEM_MC_ENDIF();
9490 IEM_MC_ADVANCE_RIP_AND_FINISH();
9491
9492 IEM_MC_END();
9493}
9494
9495
9496/** Opcode 0xd9 0xe8. */
9497FNIEMOP_DEF(iemOp_fld1)
9498{
9499 IEMOP_MNEMONIC(fld1, "fld1");
9500 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
9501}
9502
9503
9504/** Opcode 0xd9 0xe9. */
9505FNIEMOP_DEF(iemOp_fldl2t)
9506{
9507 IEMOP_MNEMONIC(fldl2t, "fldl2t");
9508 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
9509}
9510
9511
9512/** Opcode 0xd9 0xea. */
9513FNIEMOP_DEF(iemOp_fldl2e)
9514{
9515 IEMOP_MNEMONIC(fldl2e, "fldl2e");
9516 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
9517}
9518
9519/** Opcode 0xd9 0xeb. */
9520FNIEMOP_DEF(iemOp_fldpi)
9521{
9522 IEMOP_MNEMONIC(fldpi, "fldpi");
9523 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
9524}
9525
9526
9527/** Opcode 0xd9 0xec. */
9528FNIEMOP_DEF(iemOp_fldlg2)
9529{
9530 IEMOP_MNEMONIC(fldlg2, "fldlg2");
9531 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
9532}
9533
9534/** Opcode 0xd9 0xed. */
9535FNIEMOP_DEF(iemOp_fldln2)
9536{
9537 IEMOP_MNEMONIC(fldln2, "fldln2");
9538 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
9539}
9540
9541
9542/** Opcode 0xd9 0xee. */
9543FNIEMOP_DEF(iemOp_fldz)
9544{
9545 IEMOP_MNEMONIC(fldz, "fldz");
9546 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
9547}
9548
9549
9550/** Opcode 0xd9 0xf0.
9551 *
9552 * The f2xm1 instruction works on values +1.0 thru -1.0, currently (the range on
9553 * 287 & 8087 was +0.5 thru 0.0 according to docs). In addition is does appear
9554 * to produce proper results for +Inf and -Inf.
9555 *
9556 * This is probably usful in the implementation pow() and similar.
9557 */
9558FNIEMOP_DEF(iemOp_f2xm1)
9559{
9560 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
9561 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
9562}
9563
9564
9565/**
9566 * Common worker for FPU instructions working on STn and ST0, storing the result
9567 * in STn, and popping the stack unless IE, DE or ZE was raised.
9568 *
9569 * @param bRm Mod R/M byte.
9570 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9571 */
9572FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
9573{
9574 IEM_MC_BEGIN(3, 1, 0, 0);
9575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9576 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9577 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9578 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9579 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9580
9581 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9582 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9583
9584 IEM_MC_PREPARE_FPU_USAGE();
9585 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
9586 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
9587 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
9588 } IEM_MC_ELSE() {
9589 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
9590 } IEM_MC_ENDIF();
9591 IEM_MC_ADVANCE_RIP_AND_FINISH();
9592
9593 IEM_MC_END();
9594}
9595
9596
9597/** Opcode 0xd9 0xf1. */
9598FNIEMOP_DEF(iemOp_fyl2x)
9599{
9600 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
9601 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
9602}
9603
9604
9605/**
9606 * Common worker for FPU instructions working on ST0 and having two outputs, one
9607 * replacing ST0 and one pushed onto the stack.
9608 *
9609 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9610 */
9611FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
9612{
9613 IEM_MC_BEGIN(2, 1, 0, 0);
9614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9615 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
9616 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
9617 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9618
9619 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9620 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9621 IEM_MC_PREPARE_FPU_USAGE();
9622 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9623 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
9624 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo, pVCpu->iem.s.uFpuOpcode);
9625 } IEM_MC_ELSE() {
9626 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO(pVCpu->iem.s.uFpuOpcode);
9627 } IEM_MC_ENDIF();
9628 IEM_MC_ADVANCE_RIP_AND_FINISH();
9629
9630 IEM_MC_END();
9631}
9632
9633
9634/** Opcode 0xd9 0xf2. */
9635FNIEMOP_DEF(iemOp_fptan)
9636{
9637 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
9638 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
9639}
9640
9641
9642/** Opcode 0xd9 0xf3. */
9643FNIEMOP_DEF(iemOp_fpatan)
9644{
9645 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
9646 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
9647}
9648
9649
9650/** Opcode 0xd9 0xf4. */
9651FNIEMOP_DEF(iemOp_fxtract)
9652{
9653 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
9654 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
9655}
9656
9657
9658/** Opcode 0xd9 0xf5. */
9659FNIEMOP_DEF(iemOp_fprem1)
9660{
9661 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
9662 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
9663}
9664
9665
9666/** Opcode 0xd9 0xf6. */
9667FNIEMOP_DEF(iemOp_fdecstp)
9668{
9669 IEMOP_MNEMONIC(fdecstp, "fdecstp");
9670 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
9671 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
9672 * FINCSTP and FDECSTP. */
9673 IEM_MC_BEGIN(0, 0, 0, 0);
9674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9675
9676 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9677 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9678
9679 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9680 IEM_MC_FPU_STACK_DEC_TOP();
9681 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
9682
9683 IEM_MC_ADVANCE_RIP_AND_FINISH();
9684 IEM_MC_END();
9685}
9686
9687
9688/** Opcode 0xd9 0xf7. */
9689FNIEMOP_DEF(iemOp_fincstp)
9690{
9691 IEMOP_MNEMONIC(fincstp, "fincstp");
9692 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
9693 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
9694 * FINCSTP and FDECSTP. */
9695 IEM_MC_BEGIN(0, 0, 0, 0);
9696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9697
9698 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9699 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9700
9701 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9702 IEM_MC_FPU_STACK_INC_TOP();
9703 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
9704
9705 IEM_MC_ADVANCE_RIP_AND_FINISH();
9706 IEM_MC_END();
9707}
9708
9709
9710/** Opcode 0xd9 0xf8. */
9711FNIEMOP_DEF(iemOp_fprem)
9712{
9713 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
9714 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
9715}
9716
9717
9718/** Opcode 0xd9 0xf9. */
9719FNIEMOP_DEF(iemOp_fyl2xp1)
9720{
9721 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
9722 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
9723}
9724
9725
9726/** Opcode 0xd9 0xfa. */
9727FNIEMOP_DEF(iemOp_fsqrt)
9728{
9729 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
9730 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
9731}
9732
9733
9734/** Opcode 0xd9 0xfb. */
9735FNIEMOP_DEF(iemOp_fsincos)
9736{
9737 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
9738 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
9739}
9740
9741
9742/** Opcode 0xd9 0xfc. */
9743FNIEMOP_DEF(iemOp_frndint)
9744{
9745 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
9746 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
9747}
9748
9749
9750/** Opcode 0xd9 0xfd. */
9751FNIEMOP_DEF(iemOp_fscale)
9752{
9753 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
9754 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
9755}
9756
9757
9758/** Opcode 0xd9 0xfe. */
9759FNIEMOP_DEF(iemOp_fsin)
9760{
9761 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
9762 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
9763}
9764
9765
9766/** Opcode 0xd9 0xff. */
9767FNIEMOP_DEF(iemOp_fcos)
9768{
9769 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
9770 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
9771}
9772
9773
9774/** Used by iemOp_EscF1. */
9775IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
9776{
9777 /* 0xe0 */ iemOp_fchs,
9778 /* 0xe1 */ iemOp_fabs,
9779 /* 0xe2 */ iemOp_Invalid,
9780 /* 0xe3 */ iemOp_Invalid,
9781 /* 0xe4 */ iemOp_ftst,
9782 /* 0xe5 */ iemOp_fxam,
9783 /* 0xe6 */ iemOp_Invalid,
9784 /* 0xe7 */ iemOp_Invalid,
9785 /* 0xe8 */ iemOp_fld1,
9786 /* 0xe9 */ iemOp_fldl2t,
9787 /* 0xea */ iemOp_fldl2e,
9788 /* 0xeb */ iemOp_fldpi,
9789 /* 0xec */ iemOp_fldlg2,
9790 /* 0xed */ iemOp_fldln2,
9791 /* 0xee */ iemOp_fldz,
9792 /* 0xef */ iemOp_Invalid,
9793 /* 0xf0 */ iemOp_f2xm1,
9794 /* 0xf1 */ iemOp_fyl2x,
9795 /* 0xf2 */ iemOp_fptan,
9796 /* 0xf3 */ iemOp_fpatan,
9797 /* 0xf4 */ iemOp_fxtract,
9798 /* 0xf5 */ iemOp_fprem1,
9799 /* 0xf6 */ iemOp_fdecstp,
9800 /* 0xf7 */ iemOp_fincstp,
9801 /* 0xf8 */ iemOp_fprem,
9802 /* 0xf9 */ iemOp_fyl2xp1,
9803 /* 0xfa */ iemOp_fsqrt,
9804 /* 0xfb */ iemOp_fsincos,
9805 /* 0xfc */ iemOp_frndint,
9806 /* 0xfd */ iemOp_fscale,
9807 /* 0xfe */ iemOp_fsin,
9808 /* 0xff */ iemOp_fcos
9809};
9810
9811
9812/**
9813 * @opcode 0xd9
9814 */
9815FNIEMOP_DEF(iemOp_EscF1)
9816{
9817 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9818 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
9819
9820 if (IEM_IS_MODRM_REG_MODE(bRm))
9821 {
9822 switch (IEM_GET_MODRM_REG_8(bRm))
9823 {
9824 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
9825 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
9826 case 2:
9827 if (bRm == 0xd0)
9828 return FNIEMOP_CALL(iemOp_fnop);
9829 IEMOP_RAISE_INVALID_OPCODE_RET();
9830 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
9831 case 4:
9832 case 5:
9833 case 6:
9834 case 7:
9835 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
9836 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
9837 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9838 }
9839 }
9840 else
9841 {
9842 switch (IEM_GET_MODRM_REG_8(bRm))
9843 {
9844 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
9845 case 1: IEMOP_RAISE_INVALID_OPCODE_RET();
9846 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
9847 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
9848 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
9849 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
9850 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
9851 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
9852 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9853 }
9854 }
9855}
9856
9857
9858/** Opcode 0xda 11/0. */
9859FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
9860{
9861 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
9862 IEM_MC_BEGIN(0, 1, 0, 0);
9863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9864 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9865
9866 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9867 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9868
9869 IEM_MC_PREPARE_FPU_USAGE();
9870 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9871 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9872 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9873 } IEM_MC_ENDIF();
9874 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9875 } IEM_MC_ELSE() {
9876 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9877 } IEM_MC_ENDIF();
9878 IEM_MC_ADVANCE_RIP_AND_FINISH();
9879
9880 IEM_MC_END();
9881}
9882
9883
9884/** Opcode 0xda 11/1. */
9885FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
9886{
9887 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
9888 IEM_MC_BEGIN(0, 1, 0, 0);
9889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9890 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9891
9892 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9893 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9894
9895 IEM_MC_PREPARE_FPU_USAGE();
9896 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9897 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9898 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9899 } IEM_MC_ENDIF();
9900 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9901 } IEM_MC_ELSE() {
9902 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9903 } IEM_MC_ENDIF();
9904 IEM_MC_ADVANCE_RIP_AND_FINISH();
9905
9906 IEM_MC_END();
9907}
9908
9909
9910/** Opcode 0xda 11/2. */
9911FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
9912{
9913 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
9914 IEM_MC_BEGIN(0, 1, 0, 0);
9915 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9916 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9917
9918 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9919 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9920
9921 IEM_MC_PREPARE_FPU_USAGE();
9922 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9923 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9924 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9925 } IEM_MC_ENDIF();
9926 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9927 } IEM_MC_ELSE() {
9928 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9929 } IEM_MC_ENDIF();
9930 IEM_MC_ADVANCE_RIP_AND_FINISH();
9931
9932 IEM_MC_END();
9933}
9934
9935
9936/** Opcode 0xda 11/3. */
9937FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
9938{
9939 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
9940 IEM_MC_BEGIN(0, 1, 0, 0);
9941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9942 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9943
9944 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9945 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9946
9947 IEM_MC_PREPARE_FPU_USAGE();
9948 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9949 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9950 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9951 } IEM_MC_ENDIF();
9952 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9953 } IEM_MC_ELSE() {
9954 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9955 } IEM_MC_ENDIF();
9956 IEM_MC_ADVANCE_RIP_AND_FINISH();
9957
9958 IEM_MC_END();
9959}
9960
9961
9962/**
9963 * Common worker for FPU instructions working on ST0 and ST1, only affecting
9964 * flags, and popping twice when done.
9965 *
9966 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9967 */
9968FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
9969{
9970 IEM_MC_BEGIN(3, 1, 0, 0);
9971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9972 IEM_MC_LOCAL(uint16_t, u16Fsw);
9973 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9974 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9975 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9976
9977 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9978 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9979
9980 IEM_MC_PREPARE_FPU_USAGE();
9981 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1) {
9982 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
9983 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9984 } IEM_MC_ELSE() {
9985 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP(pVCpu->iem.s.uFpuOpcode);
9986 } IEM_MC_ENDIF();
9987 IEM_MC_ADVANCE_RIP_AND_FINISH();
9988
9989 IEM_MC_END();
9990}
9991
9992
9993/** Opcode 0xda 0xe9. */
9994FNIEMOP_DEF(iemOp_fucompp)
9995{
9996 IEMOP_MNEMONIC(fucompp, "fucompp");
9997 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fucom_r80_by_r80);
9998}
9999
10000
10001/**
10002 * Common worker for FPU instructions working on ST0 and an m32i, and storing
10003 * the result in ST0.
10004 *
10005 * @param bRm Mod R/M byte.
10006 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10007 */
10008FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
10009{
10010 IEM_MC_BEGIN(3, 3, 0, 0);
10011 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10012 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10013 IEM_MC_LOCAL(int32_t, i32Val2);
10014 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10015 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10016 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10017
10018 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10020
10021 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10022 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10023 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10024
10025 IEM_MC_PREPARE_FPU_USAGE();
10026 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10027 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
10028 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10029 } IEM_MC_ELSE() {
10030 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10031 } IEM_MC_ENDIF();
10032 IEM_MC_ADVANCE_RIP_AND_FINISH();
10033
10034 IEM_MC_END();
10035}
10036
10037
10038/** Opcode 0xda !11/0. */
10039FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
10040{
10041 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
10042 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
10043}
10044
10045
10046/** Opcode 0xda !11/1. */
10047FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
10048{
10049 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
10050 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
10051}
10052
10053
10054/** Opcode 0xda !11/2. */
10055FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
10056{
10057 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
10058
10059 IEM_MC_BEGIN(3, 3, 0, 0);
10060 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10061 IEM_MC_LOCAL(uint16_t, u16Fsw);
10062 IEM_MC_LOCAL(int32_t, i32Val2);
10063 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10064 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10065 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10066
10067 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10069
10070 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10071 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10072 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10073
10074 IEM_MC_PREPARE_FPU_USAGE();
10075 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10076 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
10077 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10078 } IEM_MC_ELSE() {
10079 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10080 } IEM_MC_ENDIF();
10081 IEM_MC_ADVANCE_RIP_AND_FINISH();
10082
10083 IEM_MC_END();
10084}
10085
10086
10087/** Opcode 0xda !11/3. */
10088FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
10089{
10090 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
10091
10092 IEM_MC_BEGIN(3, 3, 0, 0);
10093 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10094 IEM_MC_LOCAL(uint16_t, u16Fsw);
10095 IEM_MC_LOCAL(int32_t, i32Val2);
10096 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10097 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10098 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10099
10100 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10102
10103 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10104 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10105 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10106
10107 IEM_MC_PREPARE_FPU_USAGE();
10108 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10109 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
10110 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10111 } IEM_MC_ELSE() {
10112 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10113 } IEM_MC_ENDIF();
10114 IEM_MC_ADVANCE_RIP_AND_FINISH();
10115
10116 IEM_MC_END();
10117}
10118
10119
10120/** Opcode 0xda !11/4. */
10121FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
10122{
10123 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
10124 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
10125}
10126
10127
10128/** Opcode 0xda !11/5. */
10129FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
10130{
10131 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
10132 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
10133}
10134
10135
10136/** Opcode 0xda !11/6. */
10137FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
10138{
10139 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
10140 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
10141}
10142
10143
10144/** Opcode 0xda !11/7. */
10145FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
10146{
10147 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
10148 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
10149}
10150
10151
10152/**
10153 * @opcode 0xda
10154 */
10155FNIEMOP_DEF(iemOp_EscF2)
10156{
10157 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10158 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
10159 if (IEM_IS_MODRM_REG_MODE(bRm))
10160 {
10161 switch (IEM_GET_MODRM_REG_8(bRm))
10162 {
10163 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
10164 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
10165 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
10166 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
10167 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
10168 case 5:
10169 if (bRm == 0xe9)
10170 return FNIEMOP_CALL(iemOp_fucompp);
10171 IEMOP_RAISE_INVALID_OPCODE_RET();
10172 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10173 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
10174 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10175 }
10176 }
10177 else
10178 {
10179 switch (IEM_GET_MODRM_REG_8(bRm))
10180 {
10181 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
10182 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
10183 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
10184 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
10185 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
10186 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
10187 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
10188 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
10189 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10190 }
10191 }
10192}
10193
10194
10195/** Opcode 0xdb !11/0. */
10196FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
10197{
10198 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
10199
10200 IEM_MC_BEGIN(2, 3, 0, 0);
10201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10202 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10203 IEM_MC_LOCAL(int32_t, i32Val);
10204 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10205 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
10206
10207 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10209
10210 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10211 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10212 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10213
10214 IEM_MC_PREPARE_FPU_USAGE();
10215 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10216 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i32, pFpuRes, pi32Val);
10217 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10218 } IEM_MC_ELSE() {
10219 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10220 } IEM_MC_ENDIF();
10221 IEM_MC_ADVANCE_RIP_AND_FINISH();
10222
10223 IEM_MC_END();
10224}
10225
10226
10227/** Opcode 0xdb !11/1. */
10228FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
10229{
10230 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
10231 IEM_MC_BEGIN(3, 2, 0, 0);
10232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10233 IEM_MC_LOCAL(uint16_t, u16Fsw);
10234 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10235 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10236 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10237
10238 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10240 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10241 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10242
10243 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10244 IEM_MC_PREPARE_FPU_USAGE();
10245 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10246 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10247 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
10248 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10249 } IEM_MC_ELSE() {
10250 IEM_MC_IF_FCW_IM() {
10251 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10252 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
10253 } IEM_MC_ENDIF();
10254 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10255 } IEM_MC_ENDIF();
10256 IEM_MC_ADVANCE_RIP_AND_FINISH();
10257
10258 IEM_MC_END();
10259}
10260
10261
10262/** Opcode 0xdb !11/2. */
10263FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
10264{
10265 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
10266 IEM_MC_BEGIN(3, 2, 0, 0);
10267 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10268 IEM_MC_LOCAL(uint16_t, u16Fsw);
10269 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10270 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10271 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10272
10273 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10274 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10275 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10276 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10277
10278 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10279 IEM_MC_PREPARE_FPU_USAGE();
10280 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10281 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10282 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
10283 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10284 } IEM_MC_ELSE() {
10285 IEM_MC_IF_FCW_IM() {
10286 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10287 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
10288 } IEM_MC_ENDIF();
10289 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10290 } IEM_MC_ENDIF();
10291 IEM_MC_ADVANCE_RIP_AND_FINISH();
10292
10293 IEM_MC_END();
10294}
10295
10296
10297/** Opcode 0xdb !11/3. */
10298FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
10299{
10300 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
10301 IEM_MC_BEGIN(3, 2, 0, 0);
10302 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10303 IEM_MC_LOCAL(uint16_t, u16Fsw);
10304 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10305 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10306 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10307
10308 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10310 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10311 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10312
10313 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10314 IEM_MC_PREPARE_FPU_USAGE();
10315 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10316 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10317 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
10318 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10319 } IEM_MC_ELSE() {
10320 IEM_MC_IF_FCW_IM() {
10321 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10322 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
10323 } IEM_MC_ENDIF();
10324 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10325 } IEM_MC_ENDIF();
10326 IEM_MC_ADVANCE_RIP_AND_FINISH();
10327
10328 IEM_MC_END();
10329}
10330
10331
10332/** Opcode 0xdb !11/5. */
10333FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
10334{
10335 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
10336
10337 IEM_MC_BEGIN(2, 3, 0, 0);
10338 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10339 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10340 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
10341 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10342 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
10343
10344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10346
10347 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10348 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10349 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10350
10351 IEM_MC_PREPARE_FPU_USAGE();
10352 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10353 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
10354 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10355 } IEM_MC_ELSE() {
10356 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10357 } IEM_MC_ENDIF();
10358 IEM_MC_ADVANCE_RIP_AND_FINISH();
10359
10360 IEM_MC_END();
10361}
10362
10363
10364/** Opcode 0xdb !11/7. */
10365FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
10366{
10367 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
10368 IEM_MC_BEGIN(3, 2, 0, 0);
10369 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10370 IEM_MC_LOCAL(uint16_t, u16Fsw);
10371 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10372 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
10373 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10374
10375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10377 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10378 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10379
10380 IEM_MC_MEM_MAP_EX(pr80Dst, IEM_ACCESS_DATA_W, sizeof(*pr80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
10381 IEM_MC_PREPARE_FPU_USAGE();
10382 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10383 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
10384 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
10385 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10386 } IEM_MC_ELSE() {
10387 IEM_MC_IF_FCW_IM() {
10388 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
10389 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
10390 } IEM_MC_ENDIF();
10391 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10392 } IEM_MC_ENDIF();
10393 IEM_MC_ADVANCE_RIP_AND_FINISH();
10394
10395 IEM_MC_END();
10396}
10397
10398
10399/** Opcode 0xdb 11/0. */
10400FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
10401{
10402 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
10403 IEM_MC_BEGIN(0, 1, 0, 0);
10404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10405 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10406
10407 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10408 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10409
10410 IEM_MC_PREPARE_FPU_USAGE();
10411 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10412 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF) {
10413 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10414 } IEM_MC_ENDIF();
10415 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10416 } IEM_MC_ELSE() {
10417 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10418 } IEM_MC_ENDIF();
10419 IEM_MC_ADVANCE_RIP_AND_FINISH();
10420
10421 IEM_MC_END();
10422}
10423
10424
10425/** Opcode 0xdb 11/1. */
10426FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
10427{
10428 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
10429 IEM_MC_BEGIN(0, 1, 0, 0);
10430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10431 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10432
10433 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10434 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10435
10436 IEM_MC_PREPARE_FPU_USAGE();
10437 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10438 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10439 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10440 } IEM_MC_ENDIF();
10441 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10442 } IEM_MC_ELSE() {
10443 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10444 } IEM_MC_ENDIF();
10445 IEM_MC_ADVANCE_RIP_AND_FINISH();
10446
10447 IEM_MC_END();
10448}
10449
10450
10451/** Opcode 0xdb 11/2. */
10452FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
10453{
10454 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
10455 IEM_MC_BEGIN(0, 1, 0, 0);
10456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10457 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10458
10459 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10460 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10461
10462 IEM_MC_PREPARE_FPU_USAGE();
10463 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10464 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10465 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10466 } IEM_MC_ENDIF();
10467 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10468 } IEM_MC_ELSE() {
10469 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10470 } IEM_MC_ENDIF();
10471 IEM_MC_ADVANCE_RIP_AND_FINISH();
10472
10473 IEM_MC_END();
10474}
10475
10476
10477/** Opcode 0xdb 11/3. */
10478FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
10479{
10480 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
10481 IEM_MC_BEGIN(0, 1, 0, 0);
10482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10483 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10484
10485 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10486 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10487
10488 IEM_MC_PREPARE_FPU_USAGE();
10489 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10490 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF) {
10491 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10492 } IEM_MC_ENDIF();
10493 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10494 } IEM_MC_ELSE() {
10495 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10496 } IEM_MC_ENDIF();
10497 IEM_MC_ADVANCE_RIP_AND_FINISH();
10498
10499 IEM_MC_END();
10500}
10501
10502
10503/** Opcode 0xdb 0xe0. */
10504FNIEMOP_DEF(iemOp_fneni)
10505{
10506 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
10507 IEM_MC_BEGIN(0, 0, 0, 0);
10508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10509 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10510 IEM_MC_ADVANCE_RIP_AND_FINISH();
10511 IEM_MC_END();
10512}
10513
10514
10515/** Opcode 0xdb 0xe1. */
10516FNIEMOP_DEF(iemOp_fndisi)
10517{
10518 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
10519 IEM_MC_BEGIN(0, 0, 0, 0);
10520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10521 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10522 IEM_MC_ADVANCE_RIP_AND_FINISH();
10523 IEM_MC_END();
10524}
10525
10526
10527/** Opcode 0xdb 0xe2. */
10528FNIEMOP_DEF(iemOp_fnclex)
10529{
10530 IEMOP_MNEMONIC(fnclex, "fnclex");
10531 IEM_MC_BEGIN(0, 0, 0, 0);
10532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10533 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10534 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10535 IEM_MC_CLEAR_FSW_EX();
10536 IEM_MC_ADVANCE_RIP_AND_FINISH();
10537 IEM_MC_END();
10538}
10539
10540
10541/** Opcode 0xdb 0xe3. */
10542FNIEMOP_DEF(iemOp_fninit)
10543{
10544 IEMOP_MNEMONIC(fninit, "fninit");
10545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10546 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_FPU, iemCImpl_finit, false /*fCheckXcpts*/);
10547}
10548
10549
10550/** Opcode 0xdb 0xe4. */
10551FNIEMOP_DEF(iemOp_fnsetpm)
10552{
10553 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
10554 IEM_MC_BEGIN(0, 0, 0, 0);
10555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10556 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10557 IEM_MC_ADVANCE_RIP_AND_FINISH();
10558 IEM_MC_END();
10559}
10560
10561
10562/** Opcode 0xdb 0xe5. */
10563FNIEMOP_DEF(iemOp_frstpm)
10564{
10565 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
10566#if 0 /* #UDs on newer CPUs */
10567 IEM_MC_BEGIN(0, 0, 0, 0);
10568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10569 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10570 IEM_MC_ADVANCE_RIP_AND_FINISH();
10571 IEM_MC_END();
10572 return VINF_SUCCESS;
10573#else
10574 IEMOP_RAISE_INVALID_OPCODE_RET();
10575#endif
10576}
10577
10578
10579/** Opcode 0xdb 11/5. */
10580FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
10581{
10582 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
10583 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
10584 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), true /*fUCmp*/,
10585 0 /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
10586}
10587
10588
10589/** Opcode 0xdb 11/6. */
10590FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
10591{
10592 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
10593 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
10594 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
10595 false /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
10596}
10597
10598
10599/**
10600 * @opcode 0xdb
10601 */
10602FNIEMOP_DEF(iemOp_EscF3)
10603{
10604 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10605 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
10606 if (IEM_IS_MODRM_REG_MODE(bRm))
10607 {
10608 switch (IEM_GET_MODRM_REG_8(bRm))
10609 {
10610 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
10611 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
10612 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
10613 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
10614 case 4:
10615 switch (bRm)
10616 {
10617 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
10618 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
10619 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
10620 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
10621 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
10622 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
10623 case 0xe6: IEMOP_RAISE_INVALID_OPCODE_RET();
10624 case 0xe7: IEMOP_RAISE_INVALID_OPCODE_RET();
10625 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10626 }
10627 break;
10628 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
10629 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
10630 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
10631 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10632 }
10633 }
10634 else
10635 {
10636 switch (IEM_GET_MODRM_REG_8(bRm))
10637 {
10638 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
10639 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
10640 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
10641 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
10642 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
10643 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
10644 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10645 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
10646 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10647 }
10648 }
10649}
10650
10651
10652/**
10653 * Common worker for FPU instructions working on STn and ST0, and storing the
10654 * result in STn unless IE, DE or ZE was raised.
10655 *
10656 * @param bRm Mod R/M byte.
10657 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10658 */
10659FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
10660{
10661 IEM_MC_BEGIN(3, 1, 0, 0);
10662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10663 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10664 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10665 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10666 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10667
10668 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10669 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10670
10671 IEM_MC_PREPARE_FPU_USAGE();
10672 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
10673 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
10674 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10675 } IEM_MC_ELSE() {
10676 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10677 } IEM_MC_ENDIF();
10678 IEM_MC_ADVANCE_RIP_AND_FINISH();
10679
10680 IEM_MC_END();
10681}
10682
10683
10684/** Opcode 0xdc 11/0. */
10685FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
10686{
10687 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
10688 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
10689}
10690
10691
10692/** Opcode 0xdc 11/1. */
10693FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
10694{
10695 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
10696 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
10697}
10698
10699
10700/** Opcode 0xdc 11/4. */
10701FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
10702{
10703 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
10704 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
10705}
10706
10707
10708/** Opcode 0xdc 11/5. */
10709FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
10710{
10711 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
10712 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
10713}
10714
10715
10716/** Opcode 0xdc 11/6. */
10717FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
10718{
10719 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
10720 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
10721}
10722
10723
10724/** Opcode 0xdc 11/7. */
10725FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
10726{
10727 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
10728 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
10729}
10730
10731
10732/**
10733 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
10734 * memory operand, and storing the result in ST0.
10735 *
10736 * @param bRm Mod R/M byte.
10737 * @param pfnImpl Pointer to the instruction implementation (assembly).
10738 */
10739FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
10740{
10741 IEM_MC_BEGIN(3, 3, 0, 0);
10742 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10743 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10744 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
10745 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10746 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
10747 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
10748
10749 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10751 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10752 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10753
10754 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10755 IEM_MC_PREPARE_FPU_USAGE();
10756 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0) {
10757 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
10758 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10759 } IEM_MC_ELSE() {
10760 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10761 } IEM_MC_ENDIF();
10762 IEM_MC_ADVANCE_RIP_AND_FINISH();
10763
10764 IEM_MC_END();
10765}
10766
10767
10768/** Opcode 0xdc !11/0. */
10769FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
10770{
10771 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
10772 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
10773}
10774
10775
10776/** Opcode 0xdc !11/1. */
10777FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
10778{
10779 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
10780 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
10781}
10782
10783
10784/** Opcode 0xdc !11/2. */
10785FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
10786{
10787 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
10788
10789 IEM_MC_BEGIN(3, 3, 0, 0);
10790 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10791 IEM_MC_LOCAL(uint16_t, u16Fsw);
10792 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
10793 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10794 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10795 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
10796
10797 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10798 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10799
10800 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10801 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10802 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10803
10804 IEM_MC_PREPARE_FPU_USAGE();
10805 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10806 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
10807 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10808 } IEM_MC_ELSE() {
10809 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10810 } IEM_MC_ENDIF();
10811 IEM_MC_ADVANCE_RIP_AND_FINISH();
10812
10813 IEM_MC_END();
10814}
10815
10816
10817/** Opcode 0xdc !11/3. */
10818FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
10819{
10820 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
10821
10822 IEM_MC_BEGIN(3, 3, 0, 0);
10823 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10824 IEM_MC_LOCAL(uint16_t, u16Fsw);
10825 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
10826 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10827 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10828 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
10829
10830 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10832
10833 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10834 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10835 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10836
10837 IEM_MC_PREPARE_FPU_USAGE();
10838 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10839 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
10840 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10841 } IEM_MC_ELSE() {
10842 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10843 } IEM_MC_ENDIF();
10844 IEM_MC_ADVANCE_RIP_AND_FINISH();
10845
10846 IEM_MC_END();
10847}
10848
10849
10850/** Opcode 0xdc !11/4. */
10851FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
10852{
10853 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
10854 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
10855}
10856
10857
10858/** Opcode 0xdc !11/5. */
10859FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
10860{
10861 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
10862 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
10863}
10864
10865
10866/** Opcode 0xdc !11/6. */
10867FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
10868{
10869 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
10870 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
10871}
10872
10873
10874/** Opcode 0xdc !11/7. */
10875FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
10876{
10877 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
10878 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
10879}
10880
10881
10882/**
10883 * @opcode 0xdc
10884 */
10885FNIEMOP_DEF(iemOp_EscF4)
10886{
10887 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10888 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
10889 if (IEM_IS_MODRM_REG_MODE(bRm))
10890 {
10891 switch (IEM_GET_MODRM_REG_8(bRm))
10892 {
10893 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
10894 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
10895 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
10896 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
10897 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
10898 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
10899 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
10900 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
10901 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10902 }
10903 }
10904 else
10905 {
10906 switch (IEM_GET_MODRM_REG_8(bRm))
10907 {
10908 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
10909 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
10910 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
10911 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
10912 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
10913 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
10914 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
10915 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
10916 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10917 }
10918 }
10919}
10920
10921
10922/** Opcode 0xdd !11/0.
10923 * @sa iemOp_fld_m32r */
10924FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
10925{
10926 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
10927
10928 IEM_MC_BEGIN(2, 3, 0, 0);
10929 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10930 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10931 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
10932 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10933 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
10934
10935 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10937 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10938 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10939
10940 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10941 IEM_MC_PREPARE_FPU_USAGE();
10942 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10943 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r64, pFpuRes, pr64Val);
10944 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10945 } IEM_MC_ELSE() {
10946 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10947 } IEM_MC_ENDIF();
10948 IEM_MC_ADVANCE_RIP_AND_FINISH();
10949
10950 IEM_MC_END();
10951}
10952
10953
10954/** Opcode 0xdd !11/0. */
10955FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
10956{
10957 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
10958 IEM_MC_BEGIN(3, 2, 0, 0);
10959 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10960 IEM_MC_LOCAL(uint16_t, u16Fsw);
10961 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10962 IEM_MC_ARG(int64_t *, pi64Dst, 1);
10963 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10964
10965 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10967 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10968 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10969
10970 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10971 IEM_MC_PREPARE_FPU_USAGE();
10972 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10973 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
10974 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10975 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10976 } IEM_MC_ELSE() {
10977 IEM_MC_IF_FCW_IM() {
10978 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
10979 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
10980 } IEM_MC_ENDIF();
10981 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10982 } IEM_MC_ENDIF();
10983 IEM_MC_ADVANCE_RIP_AND_FINISH();
10984
10985 IEM_MC_END();
10986}
10987
10988
10989/** Opcode 0xdd !11/0. */
10990FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
10991{
10992 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
10993 IEM_MC_BEGIN(3, 2, 0, 0);
10994 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10995 IEM_MC_LOCAL(uint16_t, u16Fsw);
10996 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10997 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
10998 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10999
11000 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11001 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11002 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11003 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11004
11005 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11006 IEM_MC_PREPARE_FPU_USAGE();
11007 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11008 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
11009 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11010 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11011 } IEM_MC_ELSE() {
11012 IEM_MC_IF_FCW_IM() {
11013 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
11014 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
11015 } IEM_MC_ENDIF();
11016 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11017 } IEM_MC_ENDIF();
11018 IEM_MC_ADVANCE_RIP_AND_FINISH();
11019
11020 IEM_MC_END();
11021}
11022
11023
11024
11025
11026/** Opcode 0xdd !11/0. */
11027FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
11028{
11029 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
11030 IEM_MC_BEGIN(3, 2, 0, 0);
11031 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11032 IEM_MC_LOCAL(uint16_t, u16Fsw);
11033 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11034 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
11035 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11036
11037 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11038 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11039 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11040 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11041
11042 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11043 IEM_MC_PREPARE_FPU_USAGE();
11044 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11045 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
11046 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11047 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11048 } IEM_MC_ELSE() {
11049 IEM_MC_IF_FCW_IM() {
11050 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
11051 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
11052 } IEM_MC_ENDIF();
11053 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11054 } IEM_MC_ENDIF();
11055 IEM_MC_ADVANCE_RIP_AND_FINISH();
11056
11057 IEM_MC_END();
11058}
11059
11060
11061/** Opcode 0xdd !11/0. */
11062FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
11063{
11064 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
11065 IEM_MC_BEGIN(3, 0, 0, 0);
11066 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
11067 IEM_MC_ARG(uint8_t, iEffSeg, 1);
11068 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
11069 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11071 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11072 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11073 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
11074 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
11075 IEM_MC_END();
11076}
11077
11078
11079/** Opcode 0xdd !11/0. */
11080FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
11081{
11082 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
11083 IEM_MC_BEGIN(3, 0, 0, 0);
11084 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
11085 IEM_MC_ARG(uint8_t, iEffSeg, 1);
11086 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
11087 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11089 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11090 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); /* Note! Implicit fninit after the save, do not use FOR_READ here! */
11091 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
11092 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
11093 IEM_MC_END();
11094}
11095
11096/** Opcode 0xdd !11/0. */
11097FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
11098{
11099 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
11100
11101 IEM_MC_BEGIN(0, 2, 0, 0);
11102 IEM_MC_LOCAL(uint16_t, u16Tmp);
11103 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11104
11105 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11107 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11108
11109 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
11110 IEM_MC_FETCH_FSW(u16Tmp);
11111 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
11112 IEM_MC_ADVANCE_RIP_AND_FINISH();
11113
11114/** @todo Debug / drop a hint to the verifier that things may differ
11115 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
11116 * NT4SP1. (X86_FSW_PE) */
11117 IEM_MC_END();
11118}
11119
11120
11121/** Opcode 0xdd 11/0. */
11122FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
11123{
11124 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
11125 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
11126 unmodified. */
11127 IEM_MC_BEGIN(0, 0, 0, 0);
11128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11129
11130 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11131 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11132
11133 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11134 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
11135 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11136
11137 IEM_MC_ADVANCE_RIP_AND_FINISH();
11138 IEM_MC_END();
11139}
11140
11141
11142/** Opcode 0xdd 11/1. */
11143FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
11144{
11145 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
11146 IEM_MC_BEGIN(0, 2, 0, 0);
11147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11148 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
11149 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11150 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11151 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11152
11153 IEM_MC_PREPARE_FPU_USAGE();
11154 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11155 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
11156 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11157 } IEM_MC_ELSE() {
11158 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11159 } IEM_MC_ENDIF();
11160
11161 IEM_MC_ADVANCE_RIP_AND_FINISH();
11162 IEM_MC_END();
11163}
11164
11165
11166/** Opcode 0xdd 11/3. */
11167FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
11168{
11169 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
11170 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
11171}
11172
11173
11174/** Opcode 0xdd 11/4. */
11175FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
11176{
11177 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
11178 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
11179}
11180
11181
11182/**
11183 * @opcode 0xdd
11184 */
11185FNIEMOP_DEF(iemOp_EscF5)
11186{
11187 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11188 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
11189 if (IEM_IS_MODRM_REG_MODE(bRm))
11190 {
11191 switch (IEM_GET_MODRM_REG_8(bRm))
11192 {
11193 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
11194 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
11195 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
11196 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
11197 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
11198 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
11199 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
11200 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11201 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11202 }
11203 }
11204 else
11205 {
11206 switch (IEM_GET_MODRM_REG_8(bRm))
11207 {
11208 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
11209 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
11210 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
11211 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
11212 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
11213 case 5: IEMOP_RAISE_INVALID_OPCODE_RET();
11214 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
11215 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
11216 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11217 }
11218 }
11219}
11220
11221
11222/** Opcode 0xde 11/0. */
11223FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
11224{
11225 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
11226 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
11227}
11228
11229
11230/** Opcode 0xde 11/0. */
11231FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
11232{
11233 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
11234 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
11235}
11236
11237
11238/** Opcode 0xde 0xd9. */
11239FNIEMOP_DEF(iemOp_fcompp)
11240{
11241 IEMOP_MNEMONIC(fcompp, "fcompp");
11242 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fcom_r80_by_r80);
11243}
11244
11245
11246/** Opcode 0xde 11/4. */
11247FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
11248{
11249 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
11250 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
11251}
11252
11253
11254/** Opcode 0xde 11/5. */
11255FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
11256{
11257 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
11258 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
11259}
11260
11261
11262/** Opcode 0xde 11/6. */
11263FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
11264{
11265 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
11266 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
11267}
11268
11269
11270/** Opcode 0xde 11/7. */
11271FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
11272{
11273 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
11274 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
11275}
11276
11277
11278/**
11279 * Common worker for FPU instructions working on ST0 and an m16i, and storing
11280 * the result in ST0.
11281 *
11282 * @param bRm Mod R/M byte.
11283 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11284 */
11285FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
11286{
11287 IEM_MC_BEGIN(3, 3, 0, 0);
11288 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11289 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11290 IEM_MC_LOCAL(int16_t, i16Val2);
11291 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11292 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11293 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11294
11295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11297
11298 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11299 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11300 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11301
11302 IEM_MC_PREPARE_FPU_USAGE();
11303 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11304 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
11305 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
11306 } IEM_MC_ELSE() {
11307 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11308 } IEM_MC_ENDIF();
11309 IEM_MC_ADVANCE_RIP_AND_FINISH();
11310
11311 IEM_MC_END();
11312}
11313
11314
11315/** Opcode 0xde !11/0. */
11316FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
11317{
11318 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
11319 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
11320}
11321
11322
11323/** Opcode 0xde !11/1. */
11324FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
11325{
11326 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
11327 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
11328}
11329
11330
11331/** Opcode 0xde !11/2. */
11332FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
11333{
11334 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
11335
11336 IEM_MC_BEGIN(3, 3, 0, 0);
11337 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11338 IEM_MC_LOCAL(uint16_t, u16Fsw);
11339 IEM_MC_LOCAL(int16_t, i16Val2);
11340 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11341 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11342 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11343
11344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11346
11347 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11348 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11349 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11350
11351 IEM_MC_PREPARE_FPU_USAGE();
11352 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11353 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
11354 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11355 } IEM_MC_ELSE() {
11356 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11357 } IEM_MC_ENDIF();
11358 IEM_MC_ADVANCE_RIP_AND_FINISH();
11359
11360 IEM_MC_END();
11361}
11362
11363
11364/** Opcode 0xde !11/3. */
11365FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
11366{
11367 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
11368
11369 IEM_MC_BEGIN(3, 3, 0, 0);
11370 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11371 IEM_MC_LOCAL(uint16_t, u16Fsw);
11372 IEM_MC_LOCAL(int16_t, i16Val2);
11373 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11374 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11375 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11376
11377 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11379
11380 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11381 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11382 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11383
11384 IEM_MC_PREPARE_FPU_USAGE();
11385 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11386 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
11387 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11388 } IEM_MC_ELSE() {
11389 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11390 } IEM_MC_ENDIF();
11391 IEM_MC_ADVANCE_RIP_AND_FINISH();
11392
11393 IEM_MC_END();
11394}
11395
11396
11397/** Opcode 0xde !11/4. */
11398FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
11399{
11400 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
11401 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
11402}
11403
11404
11405/** Opcode 0xde !11/5. */
11406FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
11407{
11408 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
11409 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
11410}
11411
11412
11413/** Opcode 0xde !11/6. */
11414FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
11415{
11416 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
11417 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
11418}
11419
11420
11421/** Opcode 0xde !11/7. */
11422FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
11423{
11424 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
11425 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
11426}
11427
11428
11429/**
11430 * @opcode 0xde
11431 */
11432FNIEMOP_DEF(iemOp_EscF6)
11433{
11434 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11435 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
11436 if (IEM_IS_MODRM_REG_MODE(bRm))
11437 {
11438 switch (IEM_GET_MODRM_REG_8(bRm))
11439 {
11440 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
11441 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
11442 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
11443 case 3: if (bRm == 0xd9)
11444 return FNIEMOP_CALL(iemOp_fcompp);
11445 IEMOP_RAISE_INVALID_OPCODE_RET();
11446 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
11447 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
11448 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
11449 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
11450 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11451 }
11452 }
11453 else
11454 {
11455 switch (IEM_GET_MODRM_REG_8(bRm))
11456 {
11457 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
11458 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
11459 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
11460 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
11461 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
11462 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
11463 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
11464 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
11465 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11466 }
11467 }
11468}
11469
11470
11471/** Opcode 0xdf 11/0.
11472 * Undocument instruction, assumed to work like ffree + fincstp. */
11473FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
11474{
11475 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
11476 IEM_MC_BEGIN(0, 0, 0, 0);
11477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11478
11479 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11480 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11481
11482 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11483 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
11484 IEM_MC_FPU_STACK_INC_TOP();
11485 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11486
11487 IEM_MC_ADVANCE_RIP_AND_FINISH();
11488 IEM_MC_END();
11489}
11490
11491
11492/** Opcode 0xdf 0xe0. */
11493FNIEMOP_DEF(iemOp_fnstsw_ax)
11494{
11495 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
11496 IEM_MC_BEGIN(0, 1, 0, 0);
11497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11498 IEM_MC_LOCAL(uint16_t, u16Tmp);
11499 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11500 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
11501 IEM_MC_FETCH_FSW(u16Tmp);
11502 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
11503 IEM_MC_ADVANCE_RIP_AND_FINISH();
11504 IEM_MC_END();
11505}
11506
11507
11508/** Opcode 0xdf 11/5. */
11509FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
11510{
11511 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
11512 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
11513 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
11514 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11515}
11516
11517
11518/** Opcode 0xdf 11/6. */
11519FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
11520{
11521 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
11522 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
11523 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
11524 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11525}
11526
11527
11528/** Opcode 0xdf !11/0. */
11529FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
11530{
11531 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
11532
11533 IEM_MC_BEGIN(2, 3, 0, 0);
11534 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11535 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11536 IEM_MC_LOCAL(int16_t, i16Val);
11537 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11538 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
11539
11540 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11542
11543 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11544 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11545 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11546
11547 IEM_MC_PREPARE_FPU_USAGE();
11548 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11549 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i16, pFpuRes, pi16Val);
11550 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11551 } IEM_MC_ELSE() {
11552 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11553 } IEM_MC_ENDIF();
11554 IEM_MC_ADVANCE_RIP_AND_FINISH();
11555
11556 IEM_MC_END();
11557}
11558
11559
11560/** Opcode 0xdf !11/1. */
11561FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
11562{
11563 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
11564 IEM_MC_BEGIN(3, 2, 0, 0);
11565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11566 IEM_MC_LOCAL(uint16_t, u16Fsw);
11567 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11568 IEM_MC_ARG(int16_t *, pi16Dst, 1);
11569 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11570
11571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11573 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11574 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11575
11576 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11577 IEM_MC_PREPARE_FPU_USAGE();
11578 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11579 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
11580 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
11581 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11582 } IEM_MC_ELSE() {
11583 IEM_MC_IF_FCW_IM() {
11584 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
11585 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
11586 } IEM_MC_ENDIF();
11587 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11588 } IEM_MC_ENDIF();
11589 IEM_MC_ADVANCE_RIP_AND_FINISH();
11590
11591 IEM_MC_END();
11592}
11593
11594
11595/** Opcode 0xdf !11/2. */
11596FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
11597{
11598 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
11599 IEM_MC_BEGIN(3, 2, 0, 0);
11600 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11601 IEM_MC_LOCAL(uint16_t, u16Fsw);
11602 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11603 IEM_MC_ARG(int16_t *, pi16Dst, 1);
11604 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11605
11606 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11608 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11609 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11610
11611 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11612 IEM_MC_PREPARE_FPU_USAGE();
11613 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11614 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
11615 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
11616 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11617 } IEM_MC_ELSE() {
11618 IEM_MC_IF_FCW_IM() {
11619 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
11620 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
11621 } IEM_MC_ENDIF();
11622 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11623 } IEM_MC_ENDIF();
11624 IEM_MC_ADVANCE_RIP_AND_FINISH();
11625
11626 IEM_MC_END();
11627}
11628
11629
11630/** Opcode 0xdf !11/3. */
11631FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
11632{
11633 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
11634 IEM_MC_BEGIN(3, 2, 0, 0);
11635 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11636 IEM_MC_LOCAL(uint16_t, u16Fsw);
11637 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11638 IEM_MC_ARG(int16_t *, pi16Dst, 1);
11639 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11640
11641 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11643 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11644 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11645
11646 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11647 IEM_MC_PREPARE_FPU_USAGE();
11648 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11649 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
11650 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
11651 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11652 } IEM_MC_ELSE() {
11653 IEM_MC_IF_FCW_IM() {
11654 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
11655 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
11656 } IEM_MC_ENDIF();
11657 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11658 } IEM_MC_ENDIF();
11659 IEM_MC_ADVANCE_RIP_AND_FINISH();
11660
11661 IEM_MC_END();
11662}
11663
11664
11665/** Opcode 0xdf !11/4. */
11666FNIEMOP_DEF_1(iemOp_fbld_m80d, uint8_t, bRm)
11667{
11668 IEMOP_MNEMONIC(fbld_m80d, "fbld m80d");
11669
11670 IEM_MC_BEGIN(2, 3, 0, 0);
11671 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11672 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11673 IEM_MC_LOCAL(RTPBCD80U, d80Val);
11674 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11675 IEM_MC_ARG_LOCAL_REF(PCRTPBCD80U, pd80Val, d80Val, 1);
11676
11677 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11679
11680 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11681 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11682 IEM_MC_FETCH_MEM_D80(d80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11683
11684 IEM_MC_PREPARE_FPU_USAGE();
11685 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11686 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_d80, pFpuRes, pd80Val);
11687 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11688 } IEM_MC_ELSE() {
11689 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11690 } IEM_MC_ENDIF();
11691 IEM_MC_ADVANCE_RIP_AND_FINISH();
11692
11693 IEM_MC_END();
11694}
11695
11696
11697/** Opcode 0xdf !11/5. */
11698FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
11699{
11700 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
11701
11702 IEM_MC_BEGIN(2, 3, 0, 0);
11703 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11704 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11705 IEM_MC_LOCAL(int64_t, i64Val);
11706 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11707 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
11708
11709 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11711
11712 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11713 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11714 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11715
11716 IEM_MC_PREPARE_FPU_USAGE();
11717 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11718 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i64, pFpuRes, pi64Val);
11719 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11720 } IEM_MC_ELSE() {
11721 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11722 } IEM_MC_ENDIF();
11723 IEM_MC_ADVANCE_RIP_AND_FINISH();
11724
11725 IEM_MC_END();
11726}
11727
11728
11729/** Opcode 0xdf !11/6. */
11730FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
11731{
11732 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
11733 IEM_MC_BEGIN(3, 2, 0, 0);
11734 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11735 IEM_MC_LOCAL(uint16_t, u16Fsw);
11736 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11737 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
11738 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11739
11740 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11742 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11743 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11744
11745 IEM_MC_MEM_MAP_EX(pd80Dst, IEM_ACCESS_DATA_W, sizeof(*pd80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
11746 IEM_MC_PREPARE_FPU_USAGE();
11747 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11748 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
11749 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pd80Dst, IEM_ACCESS_DATA_W, u16Fsw);
11750 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11751 } IEM_MC_ELSE() {
11752 IEM_MC_IF_FCW_IM() {
11753 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
11754 IEM_MC_MEM_COMMIT_AND_UNMAP(pd80Dst, IEM_ACCESS_DATA_W);
11755 } IEM_MC_ENDIF();
11756 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11757 } IEM_MC_ENDIF();
11758 IEM_MC_ADVANCE_RIP_AND_FINISH();
11759
11760 IEM_MC_END();
11761}
11762
11763
11764/** Opcode 0xdf !11/7. */
11765FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
11766{
11767 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
11768 IEM_MC_BEGIN(3, 2, 0, 0);
11769 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11770 IEM_MC_LOCAL(uint16_t, u16Fsw);
11771 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11772 IEM_MC_ARG(int64_t *, pi64Dst, 1);
11773 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11774
11775 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11777 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11778 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11779
11780 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11781 IEM_MC_PREPARE_FPU_USAGE();
11782 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11783 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
11784 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11785 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11786 } IEM_MC_ELSE() {
11787 IEM_MC_IF_FCW_IM() {
11788 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
11789 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
11790 } IEM_MC_ENDIF();
11791 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11792 } IEM_MC_ENDIF();
11793 IEM_MC_ADVANCE_RIP_AND_FINISH();
11794
11795 IEM_MC_END();
11796}
11797
11798
11799/**
11800 * @opcode 0xdf
11801 */
11802FNIEMOP_DEF(iemOp_EscF7)
11803{
11804 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11805 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdf & 0x7);
11806 if (IEM_IS_MODRM_REG_MODE(bRm))
11807 {
11808 switch (IEM_GET_MODRM_REG_8(bRm))
11809 {
11810 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
11811 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
11812 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
11813 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
11814 case 4: if (bRm == 0xe0)
11815 return FNIEMOP_CALL(iemOp_fnstsw_ax);
11816 IEMOP_RAISE_INVALID_OPCODE_RET();
11817 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
11818 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
11819 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11820 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11821 }
11822 }
11823 else
11824 {
11825 switch (IEM_GET_MODRM_REG_8(bRm))
11826 {
11827 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
11828 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
11829 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
11830 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
11831 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
11832 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
11833 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
11834 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
11835 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11836 }
11837 }
11838}
11839
11840
11841/**
11842 * @opcode 0xe0
11843 */
11844FNIEMOP_DEF(iemOp_loopne_Jb)
11845{
11846 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
11847 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11848 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11849
11850 switch (pVCpu->iem.s.enmEffAddrMode)
11851 {
11852 case IEMMODE_16BIT:
11853 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
11854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11855 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11856 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11857 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11858 } IEM_MC_ELSE() {
11859 IEM_MC_ADVANCE_RIP_AND_FINISH();
11860 } IEM_MC_ENDIF();
11861 IEM_MC_END();
11862 break;
11863
11864 case IEMMODE_32BIT:
11865 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
11866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11867 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11868 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11869 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11870 } IEM_MC_ELSE() {
11871 IEM_MC_ADVANCE_RIP_AND_FINISH();
11872 } IEM_MC_ENDIF();
11873 IEM_MC_END();
11874 break;
11875
11876 case IEMMODE_64BIT:
11877 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
11878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11879 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
11880 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11881 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11882 } IEM_MC_ELSE() {
11883 IEM_MC_ADVANCE_RIP_AND_FINISH();
11884 } IEM_MC_ENDIF();
11885 IEM_MC_END();
11886 break;
11887
11888 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11889 }
11890}
11891
11892
11893/**
11894 * @opcode 0xe1
11895 */
11896FNIEMOP_DEF(iemOp_loope_Jb)
11897{
11898 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
11899 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11900 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11901
11902 switch (pVCpu->iem.s.enmEffAddrMode)
11903 {
11904 case IEMMODE_16BIT:
11905 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
11906 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11907 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11908 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11909 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11910 } IEM_MC_ELSE() {
11911 IEM_MC_ADVANCE_RIP_AND_FINISH();
11912 } IEM_MC_ENDIF();
11913 IEM_MC_END();
11914 break;
11915
11916 case IEMMODE_32BIT:
11917 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
11918 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11919 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11920 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11921 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11922 } IEM_MC_ELSE() {
11923 IEM_MC_ADVANCE_RIP_AND_FINISH();
11924 } IEM_MC_ENDIF();
11925 IEM_MC_END();
11926 break;
11927
11928 case IEMMODE_64BIT:
11929 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
11930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11931 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
11932 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11933 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11934 } IEM_MC_ELSE() {
11935 IEM_MC_ADVANCE_RIP_AND_FINISH();
11936 } IEM_MC_ENDIF();
11937 IEM_MC_END();
11938 break;
11939
11940 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11941 }
11942}
11943
11944
11945/**
11946 * @opcode 0xe2
11947 */
11948FNIEMOP_DEF(iemOp_loop_Jb)
11949{
11950 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
11951 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11952 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11953
11954 /** @todo Check out the \#GP case if EIP < CS.Base or EIP > CS.Limit when
11955 * using the 32-bit operand size override. How can that be restarted? See
11956 * weird pseudo code in intel manual. */
11957
11958 /* NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
11959 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
11960 * the loop causes guest crashes, but when logging it's nice to skip a few million
11961 * lines of useless output. */
11962#if defined(LOG_ENABLED)
11963 if ((LogIs3Enabled() || LogIs4Enabled()) && -(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm)
11964 switch (pVCpu->iem.s.enmEffAddrMode)
11965 {
11966 case IEMMODE_16BIT:
11967 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
11968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11969 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
11970 IEM_MC_ADVANCE_RIP_AND_FINISH();
11971 IEM_MC_END();
11972 break;
11973
11974 case IEMMODE_32BIT:
11975 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
11976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11977 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
11978 IEM_MC_ADVANCE_RIP_AND_FINISH();
11979 IEM_MC_END();
11980 break;
11981
11982 case IEMMODE_64BIT:
11983 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
11984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11985 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
11986 IEM_MC_ADVANCE_RIP_AND_FINISH();
11987 IEM_MC_END();
11988 break;
11989
11990 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11991 }
11992#endif
11993
11994 switch (pVCpu->iem.s.enmEffAddrMode)
11995 {
11996 case IEMMODE_16BIT:
11997 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
11998 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11999 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
12000 IEM_MC_IF_CX_IS_NZ() {
12001 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12002 } IEM_MC_ELSE() {
12003 IEM_MC_ADVANCE_RIP_AND_FINISH();
12004 } IEM_MC_ENDIF();
12005 IEM_MC_END();
12006 break;
12007
12008 case IEMMODE_32BIT:
12009 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12011 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
12012 IEM_MC_IF_ECX_IS_NZ() {
12013 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12014 } IEM_MC_ELSE() {
12015 IEM_MC_ADVANCE_RIP_AND_FINISH();
12016 } IEM_MC_ENDIF();
12017 IEM_MC_END();
12018 break;
12019
12020 case IEMMODE_64BIT:
12021 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12023 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
12024 IEM_MC_IF_RCX_IS_NZ() {
12025 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12026 } IEM_MC_ELSE() {
12027 IEM_MC_ADVANCE_RIP_AND_FINISH();
12028 } IEM_MC_ENDIF();
12029 IEM_MC_END();
12030 break;
12031
12032 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12033 }
12034}
12035
12036
12037/**
12038 * @opcode 0xe3
12039 */
12040FNIEMOP_DEF(iemOp_jecxz_Jb)
12041{
12042 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
12043 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12044 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12045
12046 switch (pVCpu->iem.s.enmEffAddrMode)
12047 {
12048 case IEMMODE_16BIT:
12049 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12051 IEM_MC_IF_CX_IS_NZ() {
12052 IEM_MC_ADVANCE_RIP_AND_FINISH();
12053 } IEM_MC_ELSE() {
12054 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12055 } IEM_MC_ENDIF();
12056 IEM_MC_END();
12057 break;
12058
12059 case IEMMODE_32BIT:
12060 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12062 IEM_MC_IF_ECX_IS_NZ() {
12063 IEM_MC_ADVANCE_RIP_AND_FINISH();
12064 } IEM_MC_ELSE() {
12065 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12066 } IEM_MC_ENDIF();
12067 IEM_MC_END();
12068 break;
12069
12070 case IEMMODE_64BIT:
12071 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12072 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12073 IEM_MC_IF_RCX_IS_NZ() {
12074 IEM_MC_ADVANCE_RIP_AND_FINISH();
12075 } IEM_MC_ELSE() {
12076 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12077 } IEM_MC_ENDIF();
12078 IEM_MC_END();
12079 break;
12080
12081 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12082 }
12083}
12084
12085
12086/** Opcode 0xe4 */
12087FNIEMOP_DEF(iemOp_in_AL_Ib)
12088{
12089 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
12090 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12092 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12093 iemCImpl_in, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12094}
12095
12096
12097/** Opcode 0xe5 */
12098FNIEMOP_DEF(iemOp_in_eAX_Ib)
12099{
12100 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
12101 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12103 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12104 iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12105 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12106}
12107
12108
12109/** Opcode 0xe6 */
12110FNIEMOP_DEF(iemOp_out_Ib_AL)
12111{
12112 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
12113 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12115 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12116 iemCImpl_out, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12117}
12118
12119
12120/** Opcode 0xe7 */
12121FNIEMOP_DEF(iemOp_out_Ib_eAX)
12122{
12123 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
12124 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12126 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12127 iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12128 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12129}
12130
12131
12132/**
12133 * @opcode 0xe8
12134 */
12135FNIEMOP_DEF(iemOp_call_Jv)
12136{
12137 IEMOP_MNEMONIC(call_Jv, "call Jv");
12138 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12139 switch (pVCpu->iem.s.enmEffOpSize)
12140 {
12141 case IEMMODE_16BIT:
12142 {
12143 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12144 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE, iemCImpl_call_rel_16, (int16_t)u16Imm);
12145 }
12146
12147 case IEMMODE_32BIT:
12148 {
12149 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12150 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE, iemCImpl_call_rel_32, (int32_t)u32Imm);
12151 }
12152
12153 case IEMMODE_64BIT:
12154 {
12155 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12156 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE, iemCImpl_call_rel_64, u64Imm);
12157 }
12158
12159 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12160 }
12161}
12162
12163
12164/**
12165 * @opcode 0xe9
12166 */
12167FNIEMOP_DEF(iemOp_jmp_Jv)
12168{
12169 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
12170 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12171 switch (pVCpu->iem.s.enmEffOpSize)
12172 {
12173 case IEMMODE_16BIT:
12174 IEM_MC_BEGIN(0, 0, 0, 0);
12175 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
12176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12177 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
12178 IEM_MC_END();
12179 break;
12180
12181 case IEMMODE_64BIT:
12182 case IEMMODE_32BIT:
12183 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12184 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
12185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12186 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
12187 IEM_MC_END();
12188 break;
12189
12190 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12191 }
12192}
12193
12194
12195/**
12196 * @opcode 0xea
12197 */
12198FNIEMOP_DEF(iemOp_jmp_Ap)
12199{
12200 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
12201 IEMOP_HLP_NO_64BIT();
12202
12203 /* Decode the far pointer address and pass it on to the far call C implementation. */
12204 uint32_t off32Seg;
12205 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
12206 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
12207 else
12208 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
12209 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
12210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12211 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
12212 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
12213 iemCImpl_FarJmp, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
12214}
12215
12216
12217/**
12218 * @opcode 0xeb
12219 */
12220FNIEMOP_DEF(iemOp_jmp_Jb)
12221{
12222 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
12223 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12224 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12225
12226 IEM_MC_BEGIN(0, 0, 0, 0);
12227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12228 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12229 IEM_MC_END();
12230}
12231
12232
12233/** Opcode 0xec */
12234FNIEMOP_DEF(iemOp_in_AL_DX)
12235{
12236 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
12237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12238 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12239 iemCImpl_in_eAX_DX, 1, pVCpu->iem.s.enmEffAddrMode);
12240}
12241
12242
12243/** Opcode 0xed */
12244FNIEMOP_DEF(iemOp_in_eAX_DX)
12245{
12246 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
12247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12248 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12249 iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12250 pVCpu->iem.s.enmEffAddrMode);
12251}
12252
12253
12254/** Opcode 0xee */
12255FNIEMOP_DEF(iemOp_out_DX_AL)
12256{
12257 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
12258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12259 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12260 iemCImpl_out_DX_eAX, 1, pVCpu->iem.s.enmEffAddrMode);
12261}
12262
12263
12264/** Opcode 0xef */
12265FNIEMOP_DEF(iemOp_out_DX_eAX)
12266{
12267 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
12268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12269 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12270 iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12271 pVCpu->iem.s.enmEffAddrMode);
12272}
12273
12274
12275/**
12276 * @opcode 0xf0
12277 */
12278FNIEMOP_DEF(iemOp_lock)
12279{
12280 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
12281 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12282 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
12283
12284 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12285 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12286}
12287
12288
12289/**
12290 * @opcode 0xf1
12291 */
12292FNIEMOP_DEF(iemOp_int1)
12293{
12294 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
12295 /** @todo Does not generate \#UD on 286, or so they say... Was allegedly a
12296 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
12297 * LOADALL memo. Needs some testing. */
12298 IEMOP_HLP_MIN_386();
12299 /** @todo testcase! */
12300 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
12301 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
12302 iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
12303}
12304
12305
12306/**
12307 * @opcode 0xf2
12308 */
12309FNIEMOP_DEF(iemOp_repne)
12310{
12311 /* This overrides any previous REPE prefix. */
12312 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
12313 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
12314 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
12315
12316 /* For the 4 entry opcode tables, REPNZ overrides any previous
12317 REPZ and operand size prefixes. */
12318 pVCpu->iem.s.idxPrefix = 3;
12319
12320 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12321 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12322}
12323
12324
12325/**
12326 * @opcode 0xf3
12327 */
12328FNIEMOP_DEF(iemOp_repe)
12329{
12330 /* This overrides any previous REPNE prefix. */
12331 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
12332 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
12333 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
12334
12335 /* For the 4 entry opcode tables, REPNZ overrides any previous
12336 REPNZ and operand size prefixes. */
12337 pVCpu->iem.s.idxPrefix = 2;
12338
12339 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12340 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12341}
12342
12343
12344/**
12345 * @opcode 0xf4
12346 */
12347FNIEMOP_DEF(iemOp_hlt)
12348{
12349 IEMOP_MNEMONIC(hlt, "hlt");
12350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12351 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, iemCImpl_hlt);
12352}
12353
12354
12355/**
12356 * @opcode 0xf5
12357 */
12358FNIEMOP_DEF(iemOp_cmc)
12359{
12360 IEMOP_MNEMONIC(cmc, "cmc");
12361 IEM_MC_BEGIN(0, 0, 0, 0);
12362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12363 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
12364 IEM_MC_ADVANCE_RIP_AND_FINISH();
12365 IEM_MC_END();
12366}
12367
12368
12369/**
12370 * Body for of 'inc/dec/not/neg Eb'.
12371 */
12372#define IEMOP_BODY_UNARY_Eb(a_bRm, a_fnNormalU8, a_fnLockedU8) \
12373 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
12374 { \
12375 /* register access */ \
12376 IEM_MC_BEGIN(2, 0, 0, 0); \
12377 IEMOP_HLP_DONE_DECODING(); \
12378 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12379 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12380 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
12381 IEM_MC_REF_EFLAGS(pEFlags); \
12382 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
12383 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12384 IEM_MC_END(); \
12385 } \
12386 else \
12387 { \
12388 /* memory access. */ \
12389 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
12390 { \
12391 IEM_MC_BEGIN(2, 2, 0, 0); \
12392 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12393 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12394 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12395 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12396 \
12397 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
12398 IEMOP_HLP_DONE_DECODING(); \
12399 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12400 IEM_MC_FETCH_EFLAGS(EFlags); \
12401 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
12402 \
12403 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
12404 IEM_MC_COMMIT_EFLAGS(EFlags); \
12405 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12406 IEM_MC_END(); \
12407 } \
12408 else \
12409 { \
12410 IEM_MC_BEGIN(2, 2, 0, 0); \
12411 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12412 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12413 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12414 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12415 \
12416 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
12417 IEMOP_HLP_DONE_DECODING(); \
12418 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12419 IEM_MC_FETCH_EFLAGS(EFlags); \
12420 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU8, pu8Dst, pEFlags); \
12421 \
12422 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
12423 IEM_MC_COMMIT_EFLAGS(EFlags); \
12424 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12425 IEM_MC_END(); \
12426 } \
12427 } \
12428 (void)0
12429
12430
12431/**
12432 * Body for 'inc/dec/not/neg Ev' (groups 3 and 5).
12433 */
12434#define IEMOP_BODY_UNARY_Ev(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
12435 if (IEM_IS_MODRM_REG_MODE(bRm)) \
12436 { \
12437 /* \
12438 * Register target \
12439 */ \
12440 switch (pVCpu->iem.s.enmEffOpSize) \
12441 { \
12442 case IEMMODE_16BIT: \
12443 IEM_MC_BEGIN(2, 0, 0, 0); \
12444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12445 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
12446 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12447 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
12448 IEM_MC_REF_EFLAGS(pEFlags); \
12449 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
12450 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12451 IEM_MC_END(); \
12452 break; \
12453 \
12454 case IEMMODE_32BIT: \
12455 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0); \
12456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12457 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
12458 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12459 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
12460 IEM_MC_REF_EFLAGS(pEFlags); \
12461 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
12462 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
12463 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12464 IEM_MC_END(); \
12465 break; \
12466 \
12467 case IEMMODE_64BIT: \
12468 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0); \
12469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12470 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
12471 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12472 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
12473 IEM_MC_REF_EFLAGS(pEFlags); \
12474 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
12475 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12476 IEM_MC_END(); \
12477 break; \
12478 \
12479 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12480 } \
12481 } \
12482 else \
12483 { \
12484 /* \
12485 * Memory target. \
12486 */ \
12487 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
12488 { \
12489 switch (pVCpu->iem.s.enmEffOpSize) \
12490 { \
12491 case IEMMODE_16BIT: \
12492 IEM_MC_BEGIN(2, 3, 0, 0); \
12493 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
12494 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12495 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12496 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12497 \
12498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12500 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12501 IEM_MC_FETCH_EFLAGS(EFlags); \
12502 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
12503 \
12504 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
12505 IEM_MC_COMMIT_EFLAGS(EFlags); \
12506 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12507 IEM_MC_END(); \
12508 break; \
12509 \
12510 case IEMMODE_32BIT: \
12511 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386, 0); \
12512 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
12513 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12514 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12515 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12516 \
12517 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12519 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12520 IEM_MC_FETCH_EFLAGS(EFlags); \
12521 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
12522 \
12523 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
12524 IEM_MC_COMMIT_EFLAGS(EFlags); \
12525 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12526 IEM_MC_END(); \
12527 break; \
12528 \
12529 case IEMMODE_64BIT: \
12530 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT, 0); \
12531 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
12532 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12533 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12534 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12535 \
12536 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12538 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12539 IEM_MC_FETCH_EFLAGS(EFlags); \
12540 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
12541 \
12542 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
12543 IEM_MC_COMMIT_EFLAGS(EFlags); \
12544 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12545 IEM_MC_END(); \
12546 break; \
12547 \
12548 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12549 } \
12550 } \
12551 else \
12552 { \
12553 (void)0
12554
12555#define IEMOP_BODY_UNARY_Ev_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
12556 switch (pVCpu->iem.s.enmEffOpSize) \
12557 { \
12558 case IEMMODE_16BIT: \
12559 IEM_MC_BEGIN(2, 3, 0, 0); \
12560 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
12561 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12562 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12563 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12564 \
12565 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12566 IEMOP_HLP_DONE_DECODING(); \
12567 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12568 IEM_MC_FETCH_EFLAGS(EFlags); \
12569 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU16, pu16Dst, pEFlags); \
12570 \
12571 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
12572 IEM_MC_COMMIT_EFLAGS(EFlags); \
12573 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12574 IEM_MC_END(); \
12575 break; \
12576 \
12577 case IEMMODE_32BIT: \
12578 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386, 0); \
12579 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
12580 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12581 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12582 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12583 \
12584 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12585 IEMOP_HLP_DONE_DECODING(); \
12586 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12587 IEM_MC_FETCH_EFLAGS(EFlags); \
12588 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU32, pu32Dst, pEFlags); \
12589 \
12590 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
12591 IEM_MC_COMMIT_EFLAGS(EFlags); \
12592 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12593 IEM_MC_END(); \
12594 break; \
12595 \
12596 case IEMMODE_64BIT: \
12597 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT, 0); \
12598 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
12599 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12600 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12601 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12602 \
12603 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12604 IEMOP_HLP_DONE_DECODING(); \
12605 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12606 IEM_MC_FETCH_EFLAGS(EFlags); \
12607 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU64, pu64Dst, pEFlags); \
12608 \
12609 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
12610 IEM_MC_COMMIT_EFLAGS(EFlags); \
12611 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12612 IEM_MC_END(); \
12613 break; \
12614 \
12615 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12616 } \
12617 } \
12618 } \
12619 (void)0
12620
12621
12622/**
12623 * @opmaps grp3_f6
12624 * @opcode /0
12625 * @todo also /1
12626 */
12627FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
12628{
12629 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
12630 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12631
12632 if (IEM_IS_MODRM_REG_MODE(bRm))
12633 {
12634 /* register access */
12635 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12636 IEM_MC_BEGIN(3, 0, 0, 0);
12637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12638 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12639 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
12640 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12641 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12642 IEM_MC_REF_EFLAGS(pEFlags);
12643 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
12644 IEM_MC_ADVANCE_RIP_AND_FINISH();
12645 IEM_MC_END();
12646 }
12647 else
12648 {
12649 /* memory access. */
12650 IEM_MC_BEGIN(3, 3, 0, 0);
12651 IEM_MC_ARG(uint8_t const *, pu8Dst, 0);
12652 IEM_MC_ARG(uint8_t, u8Src, 1);
12653 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
12654 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12655 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12656
12657 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12658 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12659 IEM_MC_ASSIGN(u8Src, u8Imm);
12660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12661 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12662 IEM_MC_FETCH_EFLAGS(EFlags);
12663 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
12664
12665 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu8Dst, bUnmapInfo);
12666 IEM_MC_COMMIT_EFLAGS(EFlags);
12667 IEM_MC_ADVANCE_RIP_AND_FINISH();
12668 IEM_MC_END();
12669 }
12670}
12671
12672
12673/** Opcode 0xf6 /4, /5, /6 and /7. */
12674FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
12675{
12676 if (IEM_IS_MODRM_REG_MODE(bRm))
12677 {
12678 /* register access */
12679 IEM_MC_BEGIN(3, 1, 0, 0);
12680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12681 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12682 IEM_MC_ARG(uint8_t, u8Value, 1);
12683 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12684 IEM_MC_LOCAL(int32_t, rc);
12685
12686 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12687 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12688 IEM_MC_REF_EFLAGS(pEFlags);
12689 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
12690 IEM_MC_IF_LOCAL_IS_Z(rc) {
12691 IEM_MC_ADVANCE_RIP_AND_FINISH();
12692 } IEM_MC_ELSE() {
12693 IEM_MC_RAISE_DIVIDE_ERROR();
12694 } IEM_MC_ENDIF();
12695
12696 IEM_MC_END();
12697 }
12698 else
12699 {
12700 /* memory access. */
12701 IEM_MC_BEGIN(3, 2, 0, 0);
12702 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12703 IEM_MC_ARG(uint8_t, u8Value, 1);
12704 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12705 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12706 IEM_MC_LOCAL(int32_t, rc);
12707
12708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12710 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12711 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12712 IEM_MC_REF_EFLAGS(pEFlags);
12713 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
12714 IEM_MC_IF_LOCAL_IS_Z(rc) {
12715 IEM_MC_ADVANCE_RIP_AND_FINISH();
12716 } IEM_MC_ELSE() {
12717 IEM_MC_RAISE_DIVIDE_ERROR();
12718 } IEM_MC_ENDIF();
12719
12720 IEM_MC_END();
12721 }
12722}
12723
12724
12725/** Opcode 0xf7 /4, /5, /6 and /7. */
12726FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
12727{
12728 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12729
12730 if (IEM_IS_MODRM_REG_MODE(bRm))
12731 {
12732 /* register access */
12733 switch (pVCpu->iem.s.enmEffOpSize)
12734 {
12735 case IEMMODE_16BIT:
12736 IEM_MC_BEGIN(4, 1, 0, 0);
12737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12738 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12739 IEM_MC_ARG(uint16_t *, pu16DX, 1);
12740 IEM_MC_ARG(uint16_t, u16Value, 2);
12741 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12742 IEM_MC_LOCAL(int32_t, rc);
12743
12744 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12745 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12746 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
12747 IEM_MC_REF_EFLAGS(pEFlags);
12748 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
12749 IEM_MC_IF_LOCAL_IS_Z(rc) {
12750 IEM_MC_ADVANCE_RIP_AND_FINISH();
12751 } IEM_MC_ELSE() {
12752 IEM_MC_RAISE_DIVIDE_ERROR();
12753 } IEM_MC_ENDIF();
12754
12755 IEM_MC_END();
12756 break;
12757
12758 case IEMMODE_32BIT:
12759 IEM_MC_BEGIN(4, 1, IEM_MC_F_MIN_386, 0);
12760 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12761 IEM_MC_ARG(uint32_t *, pu32AX, 0);
12762 IEM_MC_ARG(uint32_t *, pu32DX, 1);
12763 IEM_MC_ARG(uint32_t, u32Value, 2);
12764 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12765 IEM_MC_LOCAL(int32_t, rc);
12766
12767 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12768 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
12769 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
12770 IEM_MC_REF_EFLAGS(pEFlags);
12771 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
12772 IEM_MC_IF_LOCAL_IS_Z(rc) {
12773 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
12774 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
12775 IEM_MC_ADVANCE_RIP_AND_FINISH();
12776 } IEM_MC_ELSE() {
12777 IEM_MC_RAISE_DIVIDE_ERROR();
12778 } IEM_MC_ENDIF();
12779
12780 IEM_MC_END();
12781 break;
12782
12783 case IEMMODE_64BIT:
12784 IEM_MC_BEGIN(4, 1, IEM_MC_F_64BIT, 0);
12785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12786 IEM_MC_ARG(uint64_t *, pu64AX, 0);
12787 IEM_MC_ARG(uint64_t *, pu64DX, 1);
12788 IEM_MC_ARG(uint64_t, u64Value, 2);
12789 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12790 IEM_MC_LOCAL(int32_t, rc);
12791
12792 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12793 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
12794 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
12795 IEM_MC_REF_EFLAGS(pEFlags);
12796 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
12797 IEM_MC_IF_LOCAL_IS_Z(rc) {
12798 IEM_MC_ADVANCE_RIP_AND_FINISH();
12799 } IEM_MC_ELSE() {
12800 IEM_MC_RAISE_DIVIDE_ERROR();
12801 } IEM_MC_ENDIF();
12802
12803 IEM_MC_END();
12804 break;
12805
12806 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12807 }
12808 }
12809 else
12810 {
12811 /* memory access. */
12812 switch (pVCpu->iem.s.enmEffOpSize)
12813 {
12814 case IEMMODE_16BIT:
12815 IEM_MC_BEGIN(4, 2, 0, 0);
12816 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12817 IEM_MC_ARG(uint16_t *, pu16DX, 1);
12818 IEM_MC_ARG(uint16_t, u16Value, 2);
12819 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12820 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12821 IEM_MC_LOCAL(int32_t, rc);
12822
12823 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12825 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12826 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12827 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
12828 IEM_MC_REF_EFLAGS(pEFlags);
12829 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
12830 IEM_MC_IF_LOCAL_IS_Z(rc) {
12831 IEM_MC_ADVANCE_RIP_AND_FINISH();
12832 } IEM_MC_ELSE() {
12833 IEM_MC_RAISE_DIVIDE_ERROR();
12834 } IEM_MC_ENDIF();
12835
12836 IEM_MC_END();
12837 break;
12838
12839 case IEMMODE_32BIT:
12840 IEM_MC_BEGIN(4, 2, IEM_MC_F_MIN_386, 0);
12841 IEM_MC_ARG(uint32_t *, pu32AX, 0);
12842 IEM_MC_ARG(uint32_t *, pu32DX, 1);
12843 IEM_MC_ARG(uint32_t, u32Value, 2);
12844 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12845 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12846 IEM_MC_LOCAL(int32_t, rc);
12847
12848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12850 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12851 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
12852 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
12853 IEM_MC_REF_EFLAGS(pEFlags);
12854 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
12855 IEM_MC_IF_LOCAL_IS_Z(rc) {
12856 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
12857 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
12858 IEM_MC_ADVANCE_RIP_AND_FINISH();
12859 } IEM_MC_ELSE() {
12860 IEM_MC_RAISE_DIVIDE_ERROR();
12861 } IEM_MC_ENDIF();
12862
12863 IEM_MC_END();
12864 break;
12865
12866 case IEMMODE_64BIT:
12867 IEM_MC_BEGIN(4, 2, IEM_MC_F_64BIT, 0);
12868 IEM_MC_ARG(uint64_t *, pu64AX, 0);
12869 IEM_MC_ARG(uint64_t *, pu64DX, 1);
12870 IEM_MC_ARG(uint64_t, u64Value, 2);
12871 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12872 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12873 IEM_MC_LOCAL(int32_t, rc);
12874
12875 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12877 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12878 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
12879 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
12880 IEM_MC_REF_EFLAGS(pEFlags);
12881 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
12882 IEM_MC_IF_LOCAL_IS_Z(rc) {
12883 IEM_MC_ADVANCE_RIP_AND_FINISH();
12884 } IEM_MC_ELSE() {
12885 IEM_MC_RAISE_DIVIDE_ERROR();
12886 } IEM_MC_ENDIF();
12887
12888 IEM_MC_END();
12889 break;
12890
12891 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12892 }
12893 }
12894}
12895
12896
12897/**
12898 * @opmaps grp3_f6
12899 * @opcode /2
12900 */
12901FNIEMOP_DEF_1(iemOp_grp3_not_Eb, uint8_t, bRm)
12902{
12903 IEMOP_MNEMONIC(not_Eb, "not Eb");
12904 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_not_u8, iemAImpl_not_u8_locked);
12905}
12906
12907
12908/**
12909 * @opmaps grp3_f6
12910 * @opcode /3
12911 */
12912FNIEMOP_DEF_1(iemOp_grp3_neg_Eb, uint8_t, bRm)
12913{
12914 IEMOP_MNEMONIC(net_Eb, "neg Eb");
12915 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_neg_u8, iemAImpl_neg_u8_locked);
12916}
12917
12918
12919/**
12920 * @opcode 0xf6
12921 */
12922FNIEMOP_DEF(iemOp_Grp3_Eb)
12923{
12924 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12925 switch (IEM_GET_MODRM_REG_8(bRm))
12926 {
12927 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
12928 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
12929 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Eb, bRm);
12930 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Eb, bRm);
12931 case 4:
12932 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
12933 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12934 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
12935 case 5:
12936 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
12937 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12938 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
12939 case 6:
12940 IEMOP_MNEMONIC(div_Eb, "div Eb");
12941 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12942 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
12943 case 7:
12944 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
12945 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12946 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
12947 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12948 }
12949}
12950
12951
12952/** Opcode 0xf7 /0. */
12953FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
12954{
12955 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
12956 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12957
12958 if (IEM_IS_MODRM_REG_MODE(bRm))
12959 {
12960 /* register access */
12961 switch (pVCpu->iem.s.enmEffOpSize)
12962 {
12963 case IEMMODE_16BIT:
12964 IEM_MC_BEGIN(3, 0, 0, 0);
12965 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12967 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12968 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
12969 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12970 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12971 IEM_MC_REF_EFLAGS(pEFlags);
12972 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
12973 IEM_MC_ADVANCE_RIP_AND_FINISH();
12974 IEM_MC_END();
12975 break;
12976
12977 case IEMMODE_32BIT:
12978 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
12979 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12981 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12982 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
12983 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12984 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12985 IEM_MC_REF_EFLAGS(pEFlags);
12986 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
12987 /* No clearing the high dword here - test doesn't write back the result. */
12988 IEM_MC_ADVANCE_RIP_AND_FINISH();
12989 IEM_MC_END();
12990 break;
12991
12992 case IEMMODE_64BIT:
12993 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
12994 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12996 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12997 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
12998 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12999 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13000 IEM_MC_REF_EFLAGS(pEFlags);
13001 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
13002 IEM_MC_ADVANCE_RIP_AND_FINISH();
13003 IEM_MC_END();
13004 break;
13005
13006 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13007 }
13008 }
13009 else
13010 {
13011 /* memory access. */
13012 switch (pVCpu->iem.s.enmEffOpSize)
13013 {
13014 case IEMMODE_16BIT:
13015 IEM_MC_BEGIN(3, 3, 0, 0);
13016 IEM_MC_ARG(uint16_t const *, pu16Dst, 0);
13017 IEM_MC_ARG(uint16_t, u16Src, 1);
13018 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13019 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13020 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13021
13022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
13023 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13024 IEM_MC_ASSIGN(u16Src, u16Imm);
13025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13026 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13027 IEM_MC_FETCH_EFLAGS(EFlags);
13028 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
13029
13030 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo);
13031 IEM_MC_COMMIT_EFLAGS(EFlags);
13032 IEM_MC_ADVANCE_RIP_AND_FINISH();
13033 IEM_MC_END();
13034 break;
13035
13036 case IEMMODE_32BIT:
13037 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
13038 IEM_MC_ARG(uint32_t const *, pu32Dst, 0);
13039 IEM_MC_ARG(uint32_t, u32Src, 1);
13040 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13041 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13042 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13043
13044 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13045 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13046 IEM_MC_ASSIGN(u32Src, u32Imm);
13047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13048 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13049 IEM_MC_FETCH_EFLAGS(EFlags);
13050 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
13051
13052 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo);
13053 IEM_MC_COMMIT_EFLAGS(EFlags);
13054 IEM_MC_ADVANCE_RIP_AND_FINISH();
13055 IEM_MC_END();
13056 break;
13057
13058 case IEMMODE_64BIT:
13059 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
13060 IEM_MC_ARG(uint64_t const *, pu64Dst, 0);
13061 IEM_MC_ARG(uint64_t, u64Src, 1);
13062 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13063 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13064 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13065
13066 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13067 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13068 IEM_MC_ASSIGN(u64Src, u64Imm);
13069 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13070 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13071 IEM_MC_FETCH_EFLAGS(EFlags);
13072 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
13073
13074 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo);
13075 IEM_MC_COMMIT_EFLAGS(EFlags);
13076 IEM_MC_ADVANCE_RIP_AND_FINISH();
13077 IEM_MC_END();
13078 break;
13079
13080 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13081 }
13082 }
13083}
13084
13085
13086/** Opcode 0xf7 /2. */
13087FNIEMOP_DEF_1(iemOp_grp3_not_Ev, uint8_t, bRm)
13088{
13089 IEMOP_MNEMONIC(not_Ev, "not Ev");
13090 IEMOP_BODY_UNARY_Ev( iemAImpl_not_u16, iemAImpl_not_u32, iemAImpl_not_u64);
13091 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_not_u16_locked, iemAImpl_not_u32_locked, iemAImpl_not_u64_locked);
13092}
13093
13094
13095/** Opcode 0xf7 /3. */
13096FNIEMOP_DEF_1(iemOp_grp3_neg_Ev, uint8_t, bRm)
13097{
13098 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
13099 IEMOP_BODY_UNARY_Ev( iemAImpl_neg_u16, iemAImpl_neg_u32, iemAImpl_neg_u64);
13100 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_neg_u16_locked, iemAImpl_neg_u32_locked, iemAImpl_neg_u64_locked);
13101}
13102
13103
13104/**
13105 * @opcode 0xf7
13106 */
13107FNIEMOP_DEF(iemOp_Grp3_Ev)
13108{
13109 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13110 switch (IEM_GET_MODRM_REG_8(bRm))
13111 {
13112 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
13113 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
13114 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Ev, bRm);
13115 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Ev, bRm);
13116 case 4:
13117 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
13118 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13119 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
13120 case 5:
13121 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
13122 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13123 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
13124 case 6:
13125 IEMOP_MNEMONIC(div_Ev, "div Ev");
13126 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13127 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
13128 case 7:
13129 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
13130 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13131 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
13132 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13133 }
13134}
13135
13136
13137/**
13138 * @opcode 0xf8
13139 */
13140FNIEMOP_DEF(iemOp_clc)
13141{
13142 IEMOP_MNEMONIC(clc, "clc");
13143 IEM_MC_BEGIN(0, 0, 0, 0);
13144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13145 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
13146 IEM_MC_ADVANCE_RIP_AND_FINISH();
13147 IEM_MC_END();
13148}
13149
13150
13151/**
13152 * @opcode 0xf9
13153 */
13154FNIEMOP_DEF(iemOp_stc)
13155{
13156 IEMOP_MNEMONIC(stc, "stc");
13157 IEM_MC_BEGIN(0, 0, 0, 0);
13158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13159 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
13160 IEM_MC_ADVANCE_RIP_AND_FINISH();
13161 IEM_MC_END();
13162}
13163
13164
13165/**
13166 * @opcode 0xfa
13167 */
13168FNIEMOP_DEF(iemOp_cli)
13169{
13170 IEMOP_MNEMONIC(cli, "cli");
13171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13172 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_CHECK_IRQ_BEFORE, iemCImpl_cli);
13173}
13174
13175
13176FNIEMOP_DEF(iemOp_sti)
13177{
13178 IEMOP_MNEMONIC(sti, "sti");
13179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13180 IEM_MC_DEFER_TO_CIMPL_0_RET( IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_AFTER
13181 | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_INHIBIT_SHADOW, iemCImpl_sti);
13182}
13183
13184
13185/**
13186 * @opcode 0xfc
13187 */
13188FNIEMOP_DEF(iemOp_cld)
13189{
13190 IEMOP_MNEMONIC(cld, "cld");
13191 IEM_MC_BEGIN(0, 0, 0, 0);
13192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13193 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
13194 IEM_MC_ADVANCE_RIP_AND_FINISH();
13195 IEM_MC_END();
13196}
13197
13198
13199/**
13200 * @opcode 0xfd
13201 */
13202FNIEMOP_DEF(iemOp_std)
13203{
13204 IEMOP_MNEMONIC(std, "std");
13205 IEM_MC_BEGIN(0, 0, 0, 0);
13206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13207 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
13208 IEM_MC_ADVANCE_RIP_AND_FINISH();
13209 IEM_MC_END();
13210}
13211
13212
13213/**
13214 * @opmaps grp4
13215 * @opcode /0
13216 */
13217FNIEMOP_DEF_1(iemOp_Grp4_inc_Eb, uint8_t, bRm)
13218{
13219 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
13220 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_inc_u8, iemAImpl_inc_u8_locked);
13221}
13222
13223
13224/**
13225 * @opmaps grp4
13226 * @opcode /1
13227 */
13228FNIEMOP_DEF_1(iemOp_Grp4_dec_Eb, uint8_t, bRm)
13229{
13230 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
13231 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_dec_u8, iemAImpl_dec_u8_locked);
13232}
13233
13234
13235/**
13236 * @opcode 0xfe
13237 */
13238FNIEMOP_DEF(iemOp_Grp4)
13239{
13240 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13241 switch (IEM_GET_MODRM_REG_8(bRm))
13242 {
13243 case 0: return FNIEMOP_CALL_1(iemOp_Grp4_inc_Eb, bRm);
13244 case 1: return FNIEMOP_CALL_1(iemOp_Grp4_dec_Eb, bRm);
13245 default:
13246 /** @todo is the eff-addr decoded? */
13247 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
13248 IEMOP_RAISE_INVALID_OPCODE_RET();
13249 }
13250}
13251
13252/** Opcode 0xff /0. */
13253FNIEMOP_DEF_1(iemOp_Grp5_inc_Ev, uint8_t, bRm)
13254{
13255 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
13256 IEMOP_BODY_UNARY_Ev( iemAImpl_inc_u16, iemAImpl_inc_u32, iemAImpl_inc_u64);
13257 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_inc_u16_locked, iemAImpl_inc_u32_locked, iemAImpl_inc_u64_locked);
13258}
13259
13260
13261/** Opcode 0xff /1. */
13262FNIEMOP_DEF_1(iemOp_Grp5_dec_Ev, uint8_t, bRm)
13263{
13264 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
13265 IEMOP_BODY_UNARY_Ev( iemAImpl_dec_u16, iemAImpl_dec_u32, iemAImpl_dec_u64);
13266 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_dec_u16_locked, iemAImpl_dec_u32_locked, iemAImpl_dec_u64_locked);
13267}
13268
13269
13270/**
13271 * Opcode 0xff /2.
13272 * @param bRm The RM byte.
13273 */
13274FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
13275{
13276 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
13277 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13278
13279 if (IEM_IS_MODRM_REG_MODE(bRm))
13280 {
13281 /* The new RIP is taken from a register. */
13282 switch (pVCpu->iem.s.enmEffOpSize)
13283 {
13284 case IEMMODE_16BIT:
13285 IEM_MC_BEGIN(1, 0, 0, 0);
13286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13287 IEM_MC_ARG(uint16_t, u16Target, 0);
13288 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13289 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_16, u16Target);
13290 IEM_MC_END();
13291 break;
13292
13293 case IEMMODE_32BIT:
13294 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_386, 0);
13295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13296 IEM_MC_ARG(uint32_t, u32Target, 0);
13297 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13298 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_32, u32Target);
13299 IEM_MC_END();
13300 break;
13301
13302 case IEMMODE_64BIT:
13303 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT, 0);
13304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13305 IEM_MC_ARG(uint64_t, u64Target, 0);
13306 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13307 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_64, u64Target);
13308 IEM_MC_END();
13309 break;
13310
13311 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13312 }
13313 }
13314 else
13315 {
13316 /* The new RIP is taken from a register. */
13317 switch (pVCpu->iem.s.enmEffOpSize)
13318 {
13319 case IEMMODE_16BIT:
13320 IEM_MC_BEGIN(1, 1, 0, 0);
13321 IEM_MC_ARG(uint16_t, u16Target, 0);
13322 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13323 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13325 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13326 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_16, u16Target);
13327 IEM_MC_END();
13328 break;
13329
13330 case IEMMODE_32BIT:
13331 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_386, 0);
13332 IEM_MC_ARG(uint32_t, u32Target, 0);
13333 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13334 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13336 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13337 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_32, u32Target);
13338 IEM_MC_END();
13339 break;
13340
13341 case IEMMODE_64BIT:
13342 IEM_MC_BEGIN(1, 1, IEM_MC_F_64BIT, 0);
13343 IEM_MC_ARG(uint64_t, u64Target, 0);
13344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13345 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13347 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13348 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_64, u64Target);
13349 IEM_MC_END();
13350 break;
13351
13352 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13353 }
13354 }
13355}
13356
13357#define IEMOP_BODY_GRP5_FAR_EP(a_bRm, a_fnCImpl) \
13358 /* Registers? How?? */ \
13359 if (RT_LIKELY(IEM_IS_MODRM_MEM_MODE(a_bRm))) \
13360 { /* likely */ } \
13361 else \
13362 IEMOP_RAISE_INVALID_OPCODE_RET(); /* callf eax is not legal */ \
13363 \
13364 /* 64-bit mode: Default is 32-bit, but only intel respects a REX.W prefix. */ \
13365 /** @todo what does VIA do? */ \
13366 if (!IEM_IS_64BIT_CODE(pVCpu) || pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT || IEM_IS_GUEST_CPU_INTEL(pVCpu)) \
13367 { /* likely */ } \
13368 else \
13369 pVCpu->iem.s.enmEffOpSize = IEMMODE_32BIT; \
13370 \
13371 /* Far pointer loaded from memory. */ \
13372 switch (pVCpu->iem.s.enmEffOpSize) \
13373 { \
13374 case IEMMODE_16BIT: \
13375 IEM_MC_BEGIN(3, 1, 0, 0); \
13376 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13377 IEM_MC_ARG(uint16_t, offSeg, 1); \
13378 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2); \
13379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13380 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13382 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13383 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2); \
13384 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR \
13385 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, \
13386 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13387 IEM_MC_END(); \
13388 break; \
13389 \
13390 case IEMMODE_32BIT: \
13391 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0); \
13392 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13393 IEM_MC_ARG(uint32_t, offSeg, 1); \
13394 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2); \
13395 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13396 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13398 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13399 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4); \
13400 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR \
13401 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, \
13402 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13403 IEM_MC_END(); \
13404 break; \
13405 \
13406 case IEMMODE_64BIT: \
13407 Assert(!IEM_IS_GUEST_CPU_AMD(pVCpu)); \
13408 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0); \
13409 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13410 IEM_MC_ARG(uint64_t, offSeg, 1); \
13411 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_64BIT, 2); \
13412 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13413 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13415 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13416 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8); \
13417 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_MODE /* no gates */, \
13418 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13419 IEM_MC_END(); \
13420 break; \
13421 \
13422 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13423 } do {} while (0)
13424
13425
13426/**
13427 * Opcode 0xff /3.
13428 * @param bRm The RM byte.
13429 */
13430FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
13431{
13432 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
13433 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_callf);
13434}
13435
13436
13437/**
13438 * Opcode 0xff /4.
13439 * @param bRm The RM byte.
13440 */
13441FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
13442{
13443 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
13444 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13445
13446 if (IEM_IS_MODRM_REG_MODE(bRm))
13447 {
13448 /* The new RIP is taken from a register. */
13449 switch (pVCpu->iem.s.enmEffOpSize)
13450 {
13451 case IEMMODE_16BIT:
13452 IEM_MC_BEGIN(0, 1, 0, 0);
13453 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13454 IEM_MC_LOCAL(uint16_t, u16Target);
13455 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13456 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
13457 IEM_MC_END();
13458 break;
13459
13460 case IEMMODE_32BIT:
13461 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
13462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13463 IEM_MC_LOCAL(uint32_t, u32Target);
13464 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13465 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
13466 IEM_MC_END();
13467 break;
13468
13469 case IEMMODE_64BIT:
13470 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
13471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13472 IEM_MC_LOCAL(uint64_t, u64Target);
13473 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13474 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
13475 IEM_MC_END();
13476 break;
13477
13478 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13479 }
13480 }
13481 else
13482 {
13483 /* The new RIP is taken from a memory location. */
13484 switch (pVCpu->iem.s.enmEffOpSize)
13485 {
13486 case IEMMODE_16BIT:
13487 IEM_MC_BEGIN(0, 2, 0, 0);
13488 IEM_MC_LOCAL(uint16_t, u16Target);
13489 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13490 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13492 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13493 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
13494 IEM_MC_END();
13495 break;
13496
13497 case IEMMODE_32BIT:
13498 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
13499 IEM_MC_LOCAL(uint32_t, u32Target);
13500 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13501 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13503 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13504 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
13505 IEM_MC_END();
13506 break;
13507
13508 case IEMMODE_64BIT:
13509 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
13510 IEM_MC_LOCAL(uint64_t, u64Target);
13511 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13512 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13514 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13515 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
13516 IEM_MC_END();
13517 break;
13518
13519 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13520 }
13521 }
13522}
13523
13524
13525/**
13526 * Opcode 0xff /5.
13527 * @param bRm The RM byte.
13528 */
13529FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
13530{
13531 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
13532 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_FarJmp);
13533}
13534
13535
13536/**
13537 * Opcode 0xff /6.
13538 * @param bRm The RM byte.
13539 */
13540FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
13541{
13542 IEMOP_MNEMONIC(push_Ev, "push Ev");
13543
13544 /* Registers are handled by a common worker. */
13545 if (IEM_IS_MODRM_REG_MODE(bRm))
13546 return FNIEMOP_CALL_1(iemOpCommonPushGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
13547
13548 /* Memory we do here. */
13549 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13550 switch (pVCpu->iem.s.enmEffOpSize)
13551 {
13552 case IEMMODE_16BIT:
13553 IEM_MC_BEGIN(0, 2, 0, 0);
13554 IEM_MC_LOCAL(uint16_t, u16Src);
13555 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13556 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13558 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13559 IEM_MC_PUSH_U16(u16Src);
13560 IEM_MC_ADVANCE_RIP_AND_FINISH();
13561 IEM_MC_END();
13562 break;
13563
13564 case IEMMODE_32BIT:
13565 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
13566 IEM_MC_LOCAL(uint32_t, u32Src);
13567 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13568 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13570 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13571 IEM_MC_PUSH_U32(u32Src);
13572 IEM_MC_ADVANCE_RIP_AND_FINISH();
13573 IEM_MC_END();
13574 break;
13575
13576 case IEMMODE_64BIT:
13577 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
13578 IEM_MC_LOCAL(uint64_t, u64Src);
13579 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13580 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13582 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13583 IEM_MC_PUSH_U64(u64Src);
13584 IEM_MC_ADVANCE_RIP_AND_FINISH();
13585 IEM_MC_END();
13586 break;
13587
13588 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13589 }
13590}
13591
13592
13593/**
13594 * @opcode 0xff
13595 */
13596FNIEMOP_DEF(iemOp_Grp5)
13597{
13598 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13599 switch (IEM_GET_MODRM_REG_8(bRm))
13600 {
13601 case 0:
13602 return FNIEMOP_CALL_1(iemOp_Grp5_inc_Ev, bRm);
13603 case 1:
13604 return FNIEMOP_CALL_1(iemOp_Grp5_dec_Ev, bRm);
13605 case 2:
13606 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
13607 case 3:
13608 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
13609 case 4:
13610 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
13611 case 5:
13612 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
13613 case 6:
13614 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
13615 case 7:
13616 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
13617 IEMOP_RAISE_INVALID_OPCODE_RET();
13618 }
13619 AssertFailedReturn(VERR_IEM_IPE_3);
13620}
13621
13622
13623
13624const PFNIEMOP g_apfnOneByteMap[256] =
13625{
13626 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
13627 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
13628 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
13629 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
13630 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
13631 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
13632 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
13633 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
13634 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
13635 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
13636 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
13637 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
13638 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
13639 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
13640 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
13641 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
13642 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
13643 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
13644 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
13645 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
13646 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
13647 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
13648 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
13649 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
13650 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
13651 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
13652 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
13653 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
13654 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
13655 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
13656 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
13657 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
13658 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
13659 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
13660 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
13661 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
13662 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
13663 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
13664 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
13665 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
13666 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
13667 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
13668 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
13669 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
13670 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
13671 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
13672 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
13673 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
13674 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
13675 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
13676 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
13677 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
13678 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
13679 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
13680 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
13681 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
13682 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
13683 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
13684 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
13685 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
13686 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
13687 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
13688 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
13689 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
13690};
13691
13692
13693/** @} */
13694
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette