VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstOneByte.cpp.h@ 100804

Last change on this file since 100804 was 100804, checked in by vboxsync, 21 months ago

VMM/IEM: Made the rep-prefixed string instructions return new status code VINF_IEM_YIELD_PENDING_FF if they yield and does not update RIP. This will avoid trouble in the recompiler as any non-zero status code will stop TB execution. This is more efficient than generating extra checks on the RIP value or something in the TB. The IEM_CIMPL_F_REP annotated instructions no longer need to trigger an end-of-tb. Annotate I/O instructions in case it comes in handy. bugref:10369

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 459.6 KB
Line 
1/* $Id: IEMAllInstOneByte.cpp.h 100804 2023-08-05 01:01:32Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*******************************************************************************
30* Global Variables *
31*******************************************************************************/
32extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
33
34/* Instruction group definitions: */
35
36/** @defgroup og_gen General
37 * @{ */
38 /** @defgroup og_gen_arith Arithmetic
39 * @{ */
40 /** @defgroup og_gen_arith_bin Binary numbers */
41 /** @defgroup og_gen_arith_dec Decimal numbers */
42 /** @} */
43/** @} */
44
45/** @defgroup og_stack Stack
46 * @{ */
47 /** @defgroup og_stack_sreg Segment registers */
48/** @} */
49
50/** @defgroup og_prefix Prefixes */
51/** @defgroup og_escapes Escape bytes */
52
53
54
55/** @name One byte opcodes.
56 * @{
57 */
58
59/**
60 * Body for instructions like ADD, AND, OR, TEST, CMP, ++ with a byte
61 * memory/register as the destination.
62 *
63 * Used with IEMOP_BODY_BINARY_rm_r8_NO_LOCK or IEMOP_BODY_BINARY_rm_r8_LOCKED.
64 */
65#define IEMOP_BODY_BINARY_rm_r8(a_fnNormalU8, a_fRW) \
66 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
67 \
68 /* \
69 * If rm is denoting a register, no more instruction bytes. \
70 */ \
71 if (IEM_IS_MODRM_REG_MODE(bRm)) \
72 { \
73 IEM_MC_BEGIN(3, 0); \
74 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
75 IEM_MC_ARG(uint8_t, u8Src, 1); \
76 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
77 \
78 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
79 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
80 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
81 IEM_MC_REF_EFLAGS(pEFlags); \
82 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
83 \
84 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
85 IEM_MC_END(); \
86 } \
87 else \
88 { \
89 /* \
90 * We're accessing memory. \
91 * Note! We're putting the eflags on the stack here so we can commit them \
92 * after the memory. \
93 */ \
94 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
95 { \
96 IEM_MC_BEGIN(3, 2); \
97 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
98 IEM_MC_ARG(uint8_t, u8Src, 1); \
99 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
101 \
102 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
103 IEMOP_HLP_DONE_DECODING(); \
104 IEM_MC_MEM_MAP(pu8Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
105 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
106 IEM_MC_FETCH_EFLAGS(EFlags); \
107 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
108 \
109 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, a_fRW); \
110 IEM_MC_COMMIT_EFLAGS(EFlags); \
111 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
112 IEM_MC_END(); \
113 } \
114 else \
115 { \
116 (void)0
117
118#define IEMOP_BODY_BINARY_rm_r8_NO_LOCK() \
119 IEMOP_HLP_DONE_DECODING(); \
120 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
121 } \
122 } \
123 (void)0
124
125#define IEMOP_BODY_BINARY_rm_r8_LOCKED(a_fnLockedU8) \
126 IEM_MC_BEGIN(3, 2); \
127 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
128 IEM_MC_ARG(uint8_t, u8Src, 1); \
129 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
131 \
132 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
133 IEMOP_HLP_DONE_DECODING(); \
134 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
135 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
136 IEM_MC_FETCH_EFLAGS(EFlags); \
137 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
138 \
139 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW); \
140 IEM_MC_COMMIT_EFLAGS(EFlags); \
141 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
142 IEM_MC_END(); \
143 } \
144 } \
145 (void)0
146
147/**
148 * Body for byte instructions like ADD, AND, OR, ++ with a register as the
149 * destination.
150 */
151#define IEMOP_BODY_BINARY_r8_rm(a_fnNormalU8) \
152 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
153 \
154 /* \
155 * If rm is denoting a register, no more instruction bytes. \
156 */ \
157 if (IEM_IS_MODRM_REG_MODE(bRm)) \
158 { \
159 IEM_MC_BEGIN(3, 0); \
160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
161 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
162 IEM_MC_ARG(uint8_t, u8Src, 1); \
163 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
164 \
165 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
166 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
167 IEM_MC_REF_EFLAGS(pEFlags); \
168 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
169 \
170 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
171 IEM_MC_END(); \
172 } \
173 else \
174 { \
175 /* \
176 * We're accessing memory. \
177 */ \
178 IEM_MC_BEGIN(3, 1); \
179 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
180 IEM_MC_ARG(uint8_t, u8Src, 1); \
181 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
182 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
183 \
184 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
186 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
187 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
188 IEM_MC_REF_EFLAGS(pEFlags); \
189 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
190 \
191 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
192 IEM_MC_END(); \
193 } \
194 (void)0
195
196
197/**
198 * Body for word/dword/qword instructions like ADD, AND, OR, ++ with
199 * memory/register as the destination.
200 */
201#define IEMOP_BODY_BINARY_rm_rv(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fRW) \
202 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
203 \
204 /* \
205 * If rm is denoting a register, no more instruction bytes. \
206 */ \
207 if (IEM_IS_MODRM_REG_MODE(bRm)) \
208 { \
209 switch (pVCpu->iem.s.enmEffOpSize) \
210 { \
211 case IEMMODE_16BIT: \
212 IEM_MC_BEGIN(3, 0); \
213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
214 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
215 IEM_MC_ARG(uint16_t, u16Src, 1); \
216 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
217 \
218 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
219 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
220 IEM_MC_REF_EFLAGS(pEFlags); \
221 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
222 \
223 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
224 IEM_MC_END(); \
225 break; \
226 \
227 case IEMMODE_32BIT: \
228 IEM_MC_BEGIN(3, 0); \
229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
230 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
231 IEM_MC_ARG(uint32_t, u32Src, 1); \
232 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
233 \
234 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
235 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
236 IEM_MC_REF_EFLAGS(pEFlags); \
237 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
238 \
239 if ((a_fRW) == IEM_ACCESS_DATA_RW) /* not TEST and CMP */ \
240 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
241 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
242 IEM_MC_END(); \
243 break; \
244 \
245 case IEMMODE_64BIT: \
246 IEM_MC_BEGIN(3, 0); \
247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
248 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
249 IEM_MC_ARG(uint64_t, u64Src, 1); \
250 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
251 \
252 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
253 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
254 IEM_MC_REF_EFLAGS(pEFlags); \
255 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
256 \
257 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
258 IEM_MC_END(); \
259 break; \
260 \
261 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
262 } \
263 } \
264 else \
265 { \
266 /* \
267 * We're accessing memory. \
268 * Note! We're putting the eflags on the stack here so we can commit them \
269 * after the memory. \
270 */ \
271 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
272 { \
273 switch (pVCpu->iem.s.enmEffOpSize) \
274 { \
275 case IEMMODE_16BIT: \
276 IEM_MC_BEGIN(3, 2); \
277 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
278 IEM_MC_ARG(uint16_t, u16Src, 1); \
279 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
280 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
281 \
282 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
283 IEMOP_HLP_DONE_DECODING(); \
284 IEM_MC_MEM_MAP(pu16Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
285 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
286 IEM_MC_FETCH_EFLAGS(EFlags); \
287 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
288 \
289 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, a_fRW); \
290 IEM_MC_COMMIT_EFLAGS(EFlags); \
291 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
292 IEM_MC_END(); \
293 break; \
294 \
295 case IEMMODE_32BIT: \
296 IEM_MC_BEGIN(3, 2); \
297 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
298 IEM_MC_ARG(uint32_t, u32Src, 1); \
299 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
300 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
301 \
302 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
303 IEMOP_HLP_DONE_DECODING(); \
304 IEM_MC_MEM_MAP(pu32Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
305 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
306 IEM_MC_FETCH_EFLAGS(EFlags); \
307 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
308 \
309 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, a_fRW); \
310 IEM_MC_COMMIT_EFLAGS(EFlags); \
311 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
312 IEM_MC_END(); \
313 break; \
314 \
315 case IEMMODE_64BIT: \
316 IEM_MC_BEGIN(3, 2); \
317 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
318 IEM_MC_ARG(uint64_t, u64Src, 1); \
319 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
320 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
321 \
322 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
323 IEMOP_HLP_DONE_DECODING(); \
324 IEM_MC_MEM_MAP(pu64Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
325 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
326 IEM_MC_FETCH_EFLAGS(EFlags); \
327 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
328 \
329 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, a_fRW); \
330 IEM_MC_COMMIT_EFLAGS(EFlags); \
331 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
332 IEM_MC_END(); \
333 break; \
334 \
335 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
336 } \
337 } \
338 else \
339 { \
340 (void)0
341
342#define IEMOP_BODY_BINARY_rm_rv_NO_LOCK() \
343 IEMOP_HLP_DONE_DECODING(); \
344 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
345 } \
346 } \
347 (void)0
348
349#define IEMOP_BODY_BINARY_rm_rv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
350 switch (pVCpu->iem.s.enmEffOpSize) \
351 { \
352 case IEMMODE_16BIT: \
353 IEM_MC_BEGIN(3, 2); \
354 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
355 IEM_MC_ARG(uint16_t, u16Src, 1); \
356 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
357 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
358 \
359 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
360 IEMOP_HLP_DONE_DECODING(); \
361 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
362 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
363 IEM_MC_FETCH_EFLAGS(EFlags); \
364 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
365 \
366 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
367 IEM_MC_COMMIT_EFLAGS(EFlags); \
368 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
369 IEM_MC_END(); \
370 break; \
371 \
372 case IEMMODE_32BIT: \
373 IEM_MC_BEGIN(3, 2); \
374 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
375 IEM_MC_ARG(uint32_t, u32Src, 1); \
376 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
377 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
378 \
379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
380 IEMOP_HLP_DONE_DECODING(); \
381 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
382 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
383 IEM_MC_FETCH_EFLAGS(EFlags); \
384 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
385 \
386 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW /* CMP,TEST */); \
387 IEM_MC_COMMIT_EFLAGS(EFlags); \
388 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
389 IEM_MC_END(); \
390 break; \
391 \
392 case IEMMODE_64BIT: \
393 IEM_MC_BEGIN(3, 2); \
394 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
395 IEM_MC_ARG(uint64_t, u64Src, 1); \
396 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
398 \
399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
400 IEMOP_HLP_DONE_DECODING(); \
401 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
402 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
403 IEM_MC_FETCH_EFLAGS(EFlags); \
404 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
405 \
406 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
407 IEM_MC_COMMIT_EFLAGS(EFlags); \
408 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
409 IEM_MC_END(); \
410 break; \
411 \
412 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
413 } \
414 } \
415 } \
416 (void)0
417
418
419/**
420 * Body for instructions like ADD, AND, OR, ++ with working on AL with
421 * a byte immediate.
422 */
423#define IEMOP_BODY_BINARY_AL_Ib(a_fnNormalU8) \
424 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
425 \
426 IEM_MC_BEGIN(3, 0); \
427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
428 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
429 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1); \
430 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
431 \
432 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX); \
433 IEM_MC_REF_EFLAGS(pEFlags); \
434 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
435 \
436 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
437 IEM_MC_END()
438
439/**
440 * Body for instructions like ADD, AND, OR, ++ with working on
441 * AX/EAX/RAX with a word/dword immediate.
442 */
443#define IEMOP_BODY_BINARY_rAX_Iz(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fModifiesDstReg) \
444 switch (pVCpu->iem.s.enmEffOpSize) \
445 { \
446 case IEMMODE_16BIT: \
447 { \
448 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
449 \
450 IEM_MC_BEGIN(3, 0); \
451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
452 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
453 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1); \
454 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
455 \
456 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX); \
457 IEM_MC_REF_EFLAGS(pEFlags); \
458 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
459 \
460 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
461 IEM_MC_END(); \
462 } \
463 \
464 case IEMMODE_32BIT: \
465 { \
466 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
467 \
468 IEM_MC_BEGIN(3, 0); \
469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
470 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
471 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1); \
472 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
473 \
474 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX); \
475 IEM_MC_REF_EFLAGS(pEFlags); \
476 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
477 \
478 if (a_fModifiesDstReg) \
479 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
480 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
481 IEM_MC_END(); \
482 } \
483 \
484 case IEMMODE_64BIT: \
485 { \
486 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
487 \
488 IEM_MC_BEGIN(3, 0); \
489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
490 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
491 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1); \
492 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
493 \
494 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX); \
495 IEM_MC_REF_EFLAGS(pEFlags); \
496 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
497 \
498 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
499 IEM_MC_END(); \
500 } \
501 \
502 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
503 } \
504 (void)0
505
506
507
508/* Instruction specification format - work in progress: */
509
510/**
511 * @opcode 0x00
512 * @opmnemonic add
513 * @op1 rm:Eb
514 * @op2 reg:Gb
515 * @opmaps one
516 * @openc ModR/M
517 * @opflmodify cf,pf,af,zf,sf,of
518 * @ophints harmless ignores_op_sizes
519 * @opstats add_Eb_Gb
520 * @opgroup og_gen_arith_bin
521 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
522 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
523 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
524 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
525 */
526FNIEMOP_DEF(iemOp_add_Eb_Gb)
527{
528 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
529 IEMOP_BODY_BINARY_rm_r8( iemAImpl_add_u8, IEM_ACCESS_DATA_RW);
530 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_add_u8_locked);
531}
532
533
534/**
535 * @opcode 0x01
536 * @opgroup og_gen_arith_bin
537 * @opflmodify cf,pf,af,zf,sf,of
538 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
539 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
540 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
541 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
542 */
543FNIEMOP_DEF(iemOp_add_Ev_Gv)
544{
545 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
546 IEMOP_BODY_BINARY_rm_rv( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, IEM_ACCESS_DATA_RW);
547 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
548}
549
550
551/**
552 * @opcode 0x02
553 * @opgroup og_gen_arith_bin
554 * @opflmodify cf,pf,af,zf,sf,of
555 * @opcopytests iemOp_add_Eb_Gb
556 */
557FNIEMOP_DEF(iemOp_add_Gb_Eb)
558{
559 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
560 IEMOP_BODY_BINARY_r8_rm(iemAImpl_add_u8);
561}
562
563
564/**
565 * @opcode 0x03
566 * @opgroup og_gen_arith_bin
567 * @opflmodify cf,pf,af,zf,sf,of
568 * @opcopytests iemOp_add_Ev_Gv
569 */
570FNIEMOP_DEF(iemOp_add_Gv_Ev)
571{
572 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
573 IEMOP_BODY_BINARY_rv_rm(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
574}
575
576
577/**
578 * @opcode 0x04
579 * @opgroup og_gen_arith_bin
580 * @opflmodify cf,pf,af,zf,sf,of
581 * @opcopytests iemOp_add_Eb_Gb
582 */
583FNIEMOP_DEF(iemOp_add_Al_Ib)
584{
585 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
586 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_add_u8);
587}
588
589
590/**
591 * @opcode 0x05
592 * @opgroup og_gen_arith_bin
593 * @opflmodify cf,pf,af,zf,sf,of
594 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
595 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
596 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
597 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
598 */
599FNIEMOP_DEF(iemOp_add_eAX_Iz)
600{
601 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
602 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
603}
604
605
606/**
607 * @opcode 0x06
608 * @opgroup og_stack_sreg
609 */
610FNIEMOP_DEF(iemOp_push_ES)
611{
612 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
613 IEMOP_HLP_NO_64BIT();
614 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
615}
616
617
618/**
619 * @opcode 0x07
620 * @opgroup og_stack_sreg
621 */
622FNIEMOP_DEF(iemOp_pop_ES)
623{
624 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
625 IEMOP_HLP_NO_64BIT();
626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
627 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE, iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
628}
629
630
631/**
632 * @opcode 0x08
633 * @opgroup og_gen_arith_bin
634 * @opflmodify cf,pf,af,zf,sf,of
635 * @opflundef af
636 * @opflclear of,cf
637 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
638 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
639 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
640 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
641 */
642FNIEMOP_DEF(iemOp_or_Eb_Gb)
643{
644 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
645 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
646 IEMOP_BODY_BINARY_rm_r8( iemAImpl_or_u8, IEM_ACCESS_DATA_RW);
647 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_or_u8_locked);
648}
649
650
651/*
652 * @opcode 0x09
653 * @opgroup og_gen_arith_bin
654 * @opflmodify cf,pf,af,zf,sf,of
655 * @opflundef af
656 * @opflclear of,cf
657 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
658 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
659 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
660 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
661 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
662 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
663 */
664FNIEMOP_DEF(iemOp_or_Ev_Gv)
665{
666 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
667 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
668 IEMOP_BODY_BINARY_rm_rv( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, IEM_ACCESS_DATA_RW);
669 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
670}
671
672
673/**
674 * @opcode 0x0a
675 * @opgroup og_gen_arith_bin
676 * @opflmodify cf,pf,af,zf,sf,of
677 * @opflundef af
678 * @opflclear of,cf
679 * @opcopytests iemOp_or_Eb_Gb
680 */
681FNIEMOP_DEF(iemOp_or_Gb_Eb)
682{
683 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
684 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
685 IEMOP_BODY_BINARY_r8_rm(iemAImpl_or_u8);
686}
687
688
689/**
690 * @opcode 0x0b
691 * @opgroup og_gen_arith_bin
692 * @opflmodify cf,pf,af,zf,sf,of
693 * @opflundef af
694 * @opflclear of,cf
695 * @opcopytests iemOp_or_Ev_Gv
696 */
697FNIEMOP_DEF(iemOp_or_Gv_Ev)
698{
699 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
700 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
701 IEMOP_BODY_BINARY_rv_rm(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
702}
703
704
705/**
706 * @opcode 0x0c
707 * @opgroup og_gen_arith_bin
708 * @opflmodify cf,pf,af,zf,sf,of
709 * @opflundef af
710 * @opflclear of,cf
711 * @opcopytests iemOp_or_Eb_Gb
712 */
713FNIEMOP_DEF(iemOp_or_Al_Ib)
714{
715 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
716 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
717 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_or_u8);
718}
719
720
721/**
722 * @opcode 0x0d
723 * @opgroup og_gen_arith_bin
724 * @opflmodify cf,pf,af,zf,sf,of
725 * @opflundef af
726 * @opflclear of,cf
727 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
728 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
729 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
730 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
731 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
732 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
733 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
734 */
735FNIEMOP_DEF(iemOp_or_eAX_Iz)
736{
737 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
738 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
739 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
740}
741
742
743/**
744 * @opcode 0x0e
745 * @opgroup og_stack_sreg
746 */
747FNIEMOP_DEF(iemOp_push_CS)
748{
749 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, 0);
750 IEMOP_HLP_NO_64BIT();
751 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
752}
753
754
755/**
756 * @opcode 0x0f
757 * @opmnemonic EscTwo0f
758 * @openc two0f
759 * @opdisenum OP_2B_ESC
760 * @ophints harmless
761 * @opgroup og_escapes
762 */
763FNIEMOP_DEF(iemOp_2byteEscape)
764{
765#if 0 /// @todo def VBOX_STRICT
766 /* Sanity check the table the first time around. */
767 static bool s_fTested = false;
768 if (RT_LIKELY(s_fTested)) { /* likely */ }
769 else
770 {
771 s_fTested = true;
772 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
773 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
774 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
775 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
776 }
777#endif
778
779 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
780 {
781 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
782 IEMOP_HLP_MIN_286();
783 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
784 }
785 /* @opdone */
786
787 /*
788 * On the 8086 this is a POP CS instruction.
789 * For the time being we don't specify this this.
790 */
791 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, IEMOPHINT_SKIP_PYTHON);
792 IEMOP_HLP_NO_64BIT();
793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
794 /** @todo eliminate END_TB here */
795 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_END_TB,
796 iemCImpl_pop_Sreg, X86_SREG_CS, pVCpu->iem.s.enmEffOpSize);
797}
798
799/**
800 * @opcode 0x10
801 * @opgroup og_gen_arith_bin
802 * @opfltest cf
803 * @opflmodify cf,pf,af,zf,sf,of
804 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
805 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
806 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
807 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
808 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
809 */
810FNIEMOP_DEF(iemOp_adc_Eb_Gb)
811{
812 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
813 IEMOP_BODY_BINARY_rm_r8( iemAImpl_adc_u8, IEM_ACCESS_DATA_RW);
814 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_adc_u8_locked);
815}
816
817
818/**
819 * @opcode 0x11
820 * @opgroup og_gen_arith_bin
821 * @opfltest cf
822 * @opflmodify cf,pf,af,zf,sf,of
823 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
824 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
825 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
826 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
827 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
828 */
829FNIEMOP_DEF(iemOp_adc_Ev_Gv)
830{
831 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
832 IEMOP_BODY_BINARY_rm_rv( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, IEM_ACCESS_DATA_RW);
833 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
834}
835
836
837/**
838 * @opcode 0x12
839 * @opgroup og_gen_arith_bin
840 * @opfltest cf
841 * @opflmodify cf,pf,af,zf,sf,of
842 * @opcopytests iemOp_adc_Eb_Gb
843 */
844FNIEMOP_DEF(iemOp_adc_Gb_Eb)
845{
846 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
847 IEMOP_BODY_BINARY_r8_rm(iemAImpl_adc_u8);
848}
849
850
851/**
852 * @opcode 0x13
853 * @opgroup og_gen_arith_bin
854 * @opfltest cf
855 * @opflmodify cf,pf,af,zf,sf,of
856 * @opcopytests iemOp_adc_Ev_Gv
857 */
858FNIEMOP_DEF(iemOp_adc_Gv_Ev)
859{
860 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
861 IEMOP_BODY_BINARY_rv_rm(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
862}
863
864
865/**
866 * @opcode 0x14
867 * @opgroup og_gen_arith_bin
868 * @opfltest cf
869 * @opflmodify cf,pf,af,zf,sf,of
870 * @opcopytests iemOp_adc_Eb_Gb
871 */
872FNIEMOP_DEF(iemOp_adc_Al_Ib)
873{
874 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
875 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_adc_u8);
876}
877
878
879/**
880 * @opcode 0x15
881 * @opgroup og_gen_arith_bin
882 * @opfltest cf
883 * @opflmodify cf,pf,af,zf,sf,of
884 * @opcopytests iemOp_adc_Ev_Gv
885 */
886FNIEMOP_DEF(iemOp_adc_eAX_Iz)
887{
888 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
889 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
890}
891
892
893/**
894 * @opcode 0x16
895 */
896FNIEMOP_DEF(iemOp_push_SS)
897{
898 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
899 IEMOP_HLP_NO_64BIT();
900 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
901}
902
903
904/**
905 * @opcode 0x17
906 * @opgroup og_gen_arith_bin
907 * @opfltest cf
908 * @opflmodify cf,pf,af,zf,sf,of
909 */
910FNIEMOP_DEF(iemOp_pop_SS)
911{
912 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
914 IEMOP_HLP_NO_64BIT();
915 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE, iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
916}
917
918
919/**
920 * @opcode 0x18
921 * @opgroup og_gen_arith_bin
922 * @opfltest cf
923 * @opflmodify cf,pf,af,zf,sf,of
924 */
925FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
926{
927 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
928 IEMOP_BODY_BINARY_rm_r8( iemAImpl_sbb_u8, IEM_ACCESS_DATA_RW);
929 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sbb_u8_locked);
930}
931
932
933/**
934 * @opcode 0x19
935 * @opgroup og_gen_arith_bin
936 * @opfltest cf
937 * @opflmodify cf,pf,af,zf,sf,of
938 */
939FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
940{
941 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
942 IEMOP_BODY_BINARY_rm_rv( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, IEM_ACCESS_DATA_RW);
943 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
944}
945
946
947/**
948 * @opcode 0x1a
949 * @opgroup og_gen_arith_bin
950 * @opfltest cf
951 * @opflmodify cf,pf,af,zf,sf,of
952 */
953FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
954{
955 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
956 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sbb_u8);
957}
958
959
960/**
961 * @opcode 0x1b
962 * @opgroup og_gen_arith_bin
963 * @opfltest cf
964 * @opflmodify cf,pf,af,zf,sf,of
965 */
966FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
967{
968 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
969 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
970}
971
972
973/**
974 * @opcode 0x1c
975 * @opgroup og_gen_arith_bin
976 * @opfltest cf
977 * @opflmodify cf,pf,af,zf,sf,of
978 */
979FNIEMOP_DEF(iemOp_sbb_Al_Ib)
980{
981 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
982 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sbb_u8);
983}
984
985
986/**
987 * @opcode 0x1d
988 * @opgroup og_gen_arith_bin
989 * @opfltest cf
990 * @opflmodify cf,pf,af,zf,sf,of
991 */
992FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
993{
994 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
995 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
996}
997
998
999/**
1000 * @opcode 0x1e
1001 * @opgroup og_stack_sreg
1002 */
1003FNIEMOP_DEF(iemOp_push_DS)
1004{
1005 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1006 IEMOP_HLP_NO_64BIT();
1007 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
1008}
1009
1010
1011/**
1012 * @opcode 0x1f
1013 * @opgroup og_stack_sreg
1014 */
1015FNIEMOP_DEF(iemOp_pop_DS)
1016{
1017 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1019 IEMOP_HLP_NO_64BIT();
1020 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE, iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
1021}
1022
1023
1024/**
1025 * @opcode 0x20
1026 * @opgroup og_gen_arith_bin
1027 * @opflmodify cf,pf,af,zf,sf,of
1028 * @opflundef af
1029 * @opflclear of,cf
1030 */
1031FNIEMOP_DEF(iemOp_and_Eb_Gb)
1032{
1033 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1034 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1035 IEMOP_BODY_BINARY_rm_r8( iemAImpl_and_u8, IEM_ACCESS_DATA_RW);
1036 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_and_u8_locked);
1037}
1038
1039
1040/**
1041 * @opcode 0x21
1042 * @opgroup og_gen_arith_bin
1043 * @opflmodify cf,pf,af,zf,sf,of
1044 * @opflundef af
1045 * @opflclear of,cf
1046 */
1047FNIEMOP_DEF(iemOp_and_Ev_Gv)
1048{
1049 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1050 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1051 IEMOP_BODY_BINARY_rm_rv( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, IEM_ACCESS_DATA_RW);
1052 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
1053}
1054
1055
1056/**
1057 * @opcode 0x22
1058 * @opgroup og_gen_arith_bin
1059 * @opflmodify cf,pf,af,zf,sf,of
1060 * @opflundef af
1061 * @opflclear of,cf
1062 */
1063FNIEMOP_DEF(iemOp_and_Gb_Eb)
1064{
1065 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1066 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1067 IEMOP_BODY_BINARY_r8_rm(iemAImpl_and_u8);
1068}
1069
1070
1071/**
1072 * @opcode 0x23
1073 * @opgroup og_gen_arith_bin
1074 * @opflmodify cf,pf,af,zf,sf,of
1075 * @opflundef af
1076 * @opflclear of,cf
1077 */
1078FNIEMOP_DEF(iemOp_and_Gv_Ev)
1079{
1080 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1081 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1082 IEMOP_BODY_BINARY_rv_rm(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1083}
1084
1085
1086/**
1087 * @opcode 0x24
1088 * @opgroup og_gen_arith_bin
1089 * @opflmodify cf,pf,af,zf,sf,of
1090 * @opflundef af
1091 * @opflclear of,cf
1092 */
1093FNIEMOP_DEF(iemOp_and_Al_Ib)
1094{
1095 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
1096 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1097 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_and_u8);
1098}
1099
1100
1101/**
1102 * @opcode 0x25
1103 * @opgroup og_gen_arith_bin
1104 * @opflmodify cf,pf,af,zf,sf,of
1105 * @opflundef af
1106 * @opflclear of,cf
1107 */
1108FNIEMOP_DEF(iemOp_and_eAX_Iz)
1109{
1110 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1111 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1112 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1113}
1114
1115
1116/**
1117 * @opcode 0x26
1118 * @opmnemonic SEG
1119 * @op1 ES
1120 * @opgroup og_prefix
1121 * @openc prefix
1122 * @opdisenum OP_SEG
1123 * @ophints harmless
1124 */
1125FNIEMOP_DEF(iemOp_seg_ES)
1126{
1127 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
1128 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
1129 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
1130
1131 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1132 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1133}
1134
1135
1136/**
1137 * @opcode 0x27
1138 * @opfltest af,cf
1139 * @opflmodify cf,pf,af,zf,sf,of
1140 * @opflundef of
1141 */
1142FNIEMOP_DEF(iemOp_daa)
1143{
1144 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1145 IEMOP_HLP_NO_64BIT();
1146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1147 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1148 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_daa);
1149}
1150
1151
1152/**
1153 * @opcode 0x28
1154 * @opgroup og_gen_arith_bin
1155 * @opflmodify cf,pf,af,zf,sf,of
1156 */
1157FNIEMOP_DEF(iemOp_sub_Eb_Gb)
1158{
1159 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1160 IEMOP_BODY_BINARY_rm_r8( iemAImpl_sub_u8, IEM_ACCESS_DATA_RW);
1161 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sub_u8_locked);
1162}
1163
1164
1165/**
1166 * @opcode 0x29
1167 * @opgroup og_gen_arith_bin
1168 * @opflmodify cf,pf,af,zf,sf,of
1169 */
1170FNIEMOP_DEF(iemOp_sub_Ev_Gv)
1171{
1172 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1173 IEMOP_BODY_BINARY_rm_rv( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, IEM_ACCESS_DATA_RW);
1174 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
1175}
1176
1177
1178/**
1179 * @opcode 0x2a
1180 * @opgroup og_gen_arith_bin
1181 * @opflmodify cf,pf,af,zf,sf,of
1182 */
1183FNIEMOP_DEF(iemOp_sub_Gb_Eb)
1184{
1185 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1186 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sub_u8);
1187}
1188
1189
1190/**
1191 * @opcode 0x2b
1192 * @opgroup og_gen_arith_bin
1193 * @opflmodify cf,pf,af,zf,sf,of
1194 */
1195FNIEMOP_DEF(iemOp_sub_Gv_Ev)
1196{
1197 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1198 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1199}
1200
1201
1202/**
1203 * @opcode 0x2c
1204 * @opgroup og_gen_arith_bin
1205 * @opflmodify cf,pf,af,zf,sf,of
1206 */
1207FNIEMOP_DEF(iemOp_sub_Al_Ib)
1208{
1209 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1210 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sub_u8);
1211}
1212
1213
1214/**
1215 * @opcode 0x2d
1216 * @opgroup og_gen_arith_bin
1217 * @opflmodify cf,pf,af,zf,sf,of
1218 */
1219FNIEMOP_DEF(iemOp_sub_eAX_Iz)
1220{
1221 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1222 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1223}
1224
1225
1226/**
1227 * @opcode 0x2e
1228 * @opmnemonic SEG
1229 * @op1 CS
1230 * @opgroup og_prefix
1231 * @openc prefix
1232 * @opdisenum OP_SEG
1233 * @ophints harmless
1234 */
1235FNIEMOP_DEF(iemOp_seg_CS)
1236{
1237 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
1238 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
1239 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
1240
1241 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1242 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1243}
1244
1245
1246/**
1247 * @opcode 0x2f
1248 * @opfltest af,cf
1249 * @opflmodify cf,pf,af,zf,sf,of
1250 * @opflundef of
1251 */
1252FNIEMOP_DEF(iemOp_das)
1253{
1254 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1255 IEMOP_HLP_NO_64BIT();
1256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1257 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1258 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_das);
1259}
1260
1261
1262/**
1263 * @opcode 0x30
1264 * @opgroup og_gen_arith_bin
1265 * @opflmodify cf,pf,af,zf,sf,of
1266 * @opflundef af
1267 * @opflclear of,cf
1268 */
1269FNIEMOP_DEF(iemOp_xor_Eb_Gb)
1270{
1271 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1272 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1273 IEMOP_BODY_BINARY_rm_r8( iemAImpl_xor_u8, IEM_ACCESS_DATA_RW);
1274 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_xor_u8_locked);
1275}
1276
1277
1278/**
1279 * @opcode 0x31
1280 * @opgroup og_gen_arith_bin
1281 * @opflmodify cf,pf,af,zf,sf,of
1282 * @opflundef af
1283 * @opflclear of,cf
1284 */
1285FNIEMOP_DEF(iemOp_xor_Ev_Gv)
1286{
1287 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1288 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1289 IEMOP_BODY_BINARY_rm_rv( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, IEM_ACCESS_DATA_RW);
1290 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
1291}
1292
1293
1294/**
1295 * @opcode 0x32
1296 * @opgroup og_gen_arith_bin
1297 * @opflmodify cf,pf,af,zf,sf,of
1298 * @opflundef af
1299 * @opflclear of,cf
1300 */
1301FNIEMOP_DEF(iemOp_xor_Gb_Eb)
1302{
1303 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1304 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1305 IEMOP_BODY_BINARY_r8_rm(iemAImpl_xor_u8);
1306}
1307
1308
1309/**
1310 * @opcode 0x33
1311 * @opgroup og_gen_arith_bin
1312 * @opflmodify cf,pf,af,zf,sf,of
1313 * @opflundef af
1314 * @opflclear of,cf
1315 */
1316FNIEMOP_DEF(iemOp_xor_Gv_Ev)
1317{
1318 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1319 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1320 IEMOP_BODY_BINARY_rv_rm(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1321}
1322
1323
1324/**
1325 * @opcode 0x34
1326 * @opgroup og_gen_arith_bin
1327 * @opflmodify cf,pf,af,zf,sf,of
1328 * @opflundef af
1329 * @opflclear of,cf
1330 */
1331FNIEMOP_DEF(iemOp_xor_Al_Ib)
1332{
1333 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1334 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1335 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_xor_u8);
1336}
1337
1338
1339/**
1340 * @opcode 0x35
1341 * @opgroup og_gen_arith_bin
1342 * @opflmodify cf,pf,af,zf,sf,of
1343 * @opflundef af
1344 * @opflclear of,cf
1345 */
1346FNIEMOP_DEF(iemOp_xor_eAX_Iz)
1347{
1348 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1349 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1350 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1351}
1352
1353
1354/**
1355 * @opcode 0x36
1356 * @opmnemonic SEG
1357 * @op1 SS
1358 * @opgroup og_prefix
1359 * @openc prefix
1360 * @opdisenum OP_SEG
1361 * @ophints harmless
1362 */
1363FNIEMOP_DEF(iemOp_seg_SS)
1364{
1365 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
1366 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
1367 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
1368
1369 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1370 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1371}
1372
1373
1374/**
1375 * @opcode 0x37
1376 * @opfltest af,cf
1377 * @opflmodify cf,pf,af,zf,sf,of
1378 * @opflundef pf,zf,sf,of
1379 * @opgroup og_gen_arith_dec
1380 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
1381 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
1382 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1383 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1384 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1385 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
1386 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
1387 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
1388 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
1389 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
1390 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
1391 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
1392 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
1393 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
1394 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
1395 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1396 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1397 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1398 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1399 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
1400 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
1401 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
1402 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
1403 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
1404 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
1405 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1406 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
1407 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
1408 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
1409 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
1410 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
1411 */
1412FNIEMOP_DEF(iemOp_aaa)
1413{
1414 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1415 IEMOP_HLP_NO_64BIT();
1416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1417 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1418
1419 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aaa);
1420}
1421
1422
1423/**
1424 * @opcode 0x38
1425 */
1426FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
1427{
1428 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
1429 IEMOP_BODY_BINARY_rm_r8(iemAImpl_cmp_u8, IEM_ACCESS_DATA_R);
1430 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
1431}
1432
1433
1434/**
1435 * @opcode 0x39
1436 */
1437FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
1438{
1439 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
1440 IEMOP_BODY_BINARY_rm_rv(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, IEM_ACCESS_DATA_R);
1441 IEMOP_BODY_BINARY_rm_rv_NO_LOCK();
1442}
1443
1444
1445/**
1446 * @opcode 0x3a
1447 */
1448FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
1449{
1450 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
1451 IEMOP_BODY_BINARY_r8_rm(iemAImpl_cmp_u8);
1452}
1453
1454
1455/**
1456 * @opcode 0x3b
1457 */
1458FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
1459{
1460 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
1461 IEMOP_BODY_BINARY_rv_rm(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1462}
1463
1464
1465/**
1466 * @opcode 0x3c
1467 */
1468FNIEMOP_DEF(iemOp_cmp_Al_Ib)
1469{
1470 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
1471 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_cmp_u8);
1472}
1473
1474
1475/**
1476 * @opcode 0x3d
1477 */
1478FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1479{
1480 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1481 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1482}
1483
1484
1485/**
1486 * @opcode 0x3e
1487 */
1488FNIEMOP_DEF(iemOp_seg_DS)
1489{
1490 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1491 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1492 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1493
1494 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1495 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1496}
1497
1498
1499/**
1500 * @opcode 0x3f
1501 * @opfltest af,cf
1502 * @opflmodify cf,pf,af,zf,sf,of
1503 * @opflundef pf,zf,sf,of
1504 * @opgroup og_gen_arith_dec
1505 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1506 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1507 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1508 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1509 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1510 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1511 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1512 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1513 * @optest amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1514 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1515 * @optest amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1516 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1517 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1518 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1519 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1520 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1521 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1522 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1523 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1524 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1525 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
1526 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1527 * @optest amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
1528 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1529 * @optest amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
1530 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1531 * @optest amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
1532 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1533 * @optest amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
1534 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1535 * @optest amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
1536 * @optest intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1537 * @optest amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1538 * @optest intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1539 * @optest amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1540 */
1541FNIEMOP_DEF(iemOp_aas)
1542{
1543 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1544 IEMOP_HLP_NO_64BIT();
1545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1546 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1547
1548 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aas);
1549}
1550
1551
1552/**
1553 * Common 'inc/dec register' helper.
1554 *
1555 * Not for 64-bit code, only for what became the rex prefixes.
1556 */
1557#define IEMOP_BODY_UNARY_GReg(a_fnNormalU16, a_fnNormalU32, a_iReg) \
1558 switch (pVCpu->iem.s.enmEffOpSize) \
1559 { \
1560 case IEMMODE_16BIT: \
1561 IEM_MC_BEGIN(2, 0); \
1562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1563 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
1564 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1565 IEM_MC_REF_GREG_U16(pu16Dst, a_iReg); \
1566 IEM_MC_REF_EFLAGS(pEFlags); \
1567 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
1568 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1569 IEM_MC_END(); \
1570 break; \
1571 \
1572 case IEMMODE_32BIT: \
1573 IEM_MC_BEGIN(2, 0); \
1574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1575 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
1576 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1577 IEM_MC_REF_GREG_U32(pu32Dst, a_iReg); \
1578 IEM_MC_REF_EFLAGS(pEFlags); \
1579 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
1580 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
1581 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1582 IEM_MC_END(); \
1583 break; \
1584 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1585 } \
1586 (void)0
1587
1588/**
1589 * @opcode 0x40
1590 */
1591FNIEMOP_DEF(iemOp_inc_eAX)
1592{
1593 /*
1594 * This is a REX prefix in 64-bit mode.
1595 */
1596 if (IEM_IS_64BIT_CODE(pVCpu))
1597 {
1598 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1599 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1600
1601 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1602 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1603 }
1604
1605 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1606 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xAX);
1607}
1608
1609
1610/**
1611 * @opcode 0x41
1612 */
1613FNIEMOP_DEF(iemOp_inc_eCX)
1614{
1615 /*
1616 * This is a REX prefix in 64-bit mode.
1617 */
1618 if (IEM_IS_64BIT_CODE(pVCpu))
1619 {
1620 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1621 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1622 pVCpu->iem.s.uRexB = 1 << 3;
1623
1624 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1625 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1626 }
1627
1628 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1629 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xCX);
1630}
1631
1632
1633/**
1634 * @opcode 0x42
1635 */
1636FNIEMOP_DEF(iemOp_inc_eDX)
1637{
1638 /*
1639 * This is a REX prefix in 64-bit mode.
1640 */
1641 if (IEM_IS_64BIT_CODE(pVCpu))
1642 {
1643 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1644 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1645 pVCpu->iem.s.uRexIndex = 1 << 3;
1646
1647 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1648 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1649 }
1650
1651 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1652 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDX);
1653}
1654
1655
1656
1657/**
1658 * @opcode 0x43
1659 */
1660FNIEMOP_DEF(iemOp_inc_eBX)
1661{
1662 /*
1663 * This is a REX prefix in 64-bit mode.
1664 */
1665 if (IEM_IS_64BIT_CODE(pVCpu))
1666 {
1667 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1668 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1669 pVCpu->iem.s.uRexB = 1 << 3;
1670 pVCpu->iem.s.uRexIndex = 1 << 3;
1671
1672 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1673 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1674 }
1675
1676 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1677 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBX);
1678}
1679
1680
1681/**
1682 * @opcode 0x44
1683 */
1684FNIEMOP_DEF(iemOp_inc_eSP)
1685{
1686 /*
1687 * This is a REX prefix in 64-bit mode.
1688 */
1689 if (IEM_IS_64BIT_CODE(pVCpu))
1690 {
1691 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1692 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1693 pVCpu->iem.s.uRexReg = 1 << 3;
1694
1695 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1696 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1697 }
1698
1699 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1700 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSP);
1701}
1702
1703
1704/**
1705 * @opcode 0x45
1706 */
1707FNIEMOP_DEF(iemOp_inc_eBP)
1708{
1709 /*
1710 * This is a REX prefix in 64-bit mode.
1711 */
1712 if (IEM_IS_64BIT_CODE(pVCpu))
1713 {
1714 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1715 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1716 pVCpu->iem.s.uRexReg = 1 << 3;
1717 pVCpu->iem.s.uRexB = 1 << 3;
1718
1719 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1720 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1721 }
1722
1723 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1724 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBP);
1725}
1726
1727
1728/**
1729 * @opcode 0x46
1730 */
1731FNIEMOP_DEF(iemOp_inc_eSI)
1732{
1733 /*
1734 * This is a REX prefix in 64-bit mode.
1735 */
1736 if (IEM_IS_64BIT_CODE(pVCpu))
1737 {
1738 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1739 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1740 pVCpu->iem.s.uRexReg = 1 << 3;
1741 pVCpu->iem.s.uRexIndex = 1 << 3;
1742
1743 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1744 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1745 }
1746
1747 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1748 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSI);
1749}
1750
1751
1752/**
1753 * @opcode 0x47
1754 */
1755FNIEMOP_DEF(iemOp_inc_eDI)
1756{
1757 /*
1758 * This is a REX prefix in 64-bit mode.
1759 */
1760 if (IEM_IS_64BIT_CODE(pVCpu))
1761 {
1762 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1763 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1764 pVCpu->iem.s.uRexReg = 1 << 3;
1765 pVCpu->iem.s.uRexB = 1 << 3;
1766 pVCpu->iem.s.uRexIndex = 1 << 3;
1767
1768 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1769 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1770 }
1771
1772 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1773 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDI);
1774}
1775
1776
1777/**
1778 * @opcode 0x48
1779 */
1780FNIEMOP_DEF(iemOp_dec_eAX)
1781{
1782 /*
1783 * This is a REX prefix in 64-bit mode.
1784 */
1785 if (IEM_IS_64BIT_CODE(pVCpu))
1786 {
1787 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
1788 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
1789 iemRecalEffOpSize(pVCpu);
1790
1791 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1792 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1793 }
1794
1795 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
1796 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xAX);
1797}
1798
1799
1800/**
1801 * @opcode 0x49
1802 */
1803FNIEMOP_DEF(iemOp_dec_eCX)
1804{
1805 /*
1806 * This is a REX prefix in 64-bit mode.
1807 */
1808 if (IEM_IS_64BIT_CODE(pVCpu))
1809 {
1810 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
1811 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1812 pVCpu->iem.s.uRexB = 1 << 3;
1813 iemRecalEffOpSize(pVCpu);
1814
1815 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1816 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1817 }
1818
1819 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
1820 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xCX);
1821}
1822
1823
1824/**
1825 * @opcode 0x4a
1826 */
1827FNIEMOP_DEF(iemOp_dec_eDX)
1828{
1829 /*
1830 * This is a REX prefix in 64-bit mode.
1831 */
1832 if (IEM_IS_64BIT_CODE(pVCpu))
1833 {
1834 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
1835 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1836 pVCpu->iem.s.uRexIndex = 1 << 3;
1837 iemRecalEffOpSize(pVCpu);
1838
1839 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1840 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1841 }
1842
1843 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
1844 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDX);
1845}
1846
1847
1848/**
1849 * @opcode 0x4b
1850 */
1851FNIEMOP_DEF(iemOp_dec_eBX)
1852{
1853 /*
1854 * This is a REX prefix in 64-bit mode.
1855 */
1856 if (IEM_IS_64BIT_CODE(pVCpu))
1857 {
1858 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
1859 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1860 pVCpu->iem.s.uRexB = 1 << 3;
1861 pVCpu->iem.s.uRexIndex = 1 << 3;
1862 iemRecalEffOpSize(pVCpu);
1863
1864 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1865 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1866 }
1867
1868 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
1869 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBX);
1870}
1871
1872
1873/**
1874 * @opcode 0x4c
1875 */
1876FNIEMOP_DEF(iemOp_dec_eSP)
1877{
1878 /*
1879 * This is a REX prefix in 64-bit mode.
1880 */
1881 if (IEM_IS_64BIT_CODE(pVCpu))
1882 {
1883 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
1884 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
1885 pVCpu->iem.s.uRexReg = 1 << 3;
1886 iemRecalEffOpSize(pVCpu);
1887
1888 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1889 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1890 }
1891
1892 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
1893 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSP);
1894}
1895
1896
1897/**
1898 * @opcode 0x4d
1899 */
1900FNIEMOP_DEF(iemOp_dec_eBP)
1901{
1902 /*
1903 * This is a REX prefix in 64-bit mode.
1904 */
1905 if (IEM_IS_64BIT_CODE(pVCpu))
1906 {
1907 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
1908 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1909 pVCpu->iem.s.uRexReg = 1 << 3;
1910 pVCpu->iem.s.uRexB = 1 << 3;
1911 iemRecalEffOpSize(pVCpu);
1912
1913 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1914 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1915 }
1916
1917 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
1918 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBP);
1919}
1920
1921
1922/**
1923 * @opcode 0x4e
1924 */
1925FNIEMOP_DEF(iemOp_dec_eSI)
1926{
1927 /*
1928 * This is a REX prefix in 64-bit mode.
1929 */
1930 if (IEM_IS_64BIT_CODE(pVCpu))
1931 {
1932 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
1933 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1934 pVCpu->iem.s.uRexReg = 1 << 3;
1935 pVCpu->iem.s.uRexIndex = 1 << 3;
1936 iemRecalEffOpSize(pVCpu);
1937
1938 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1939 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1940 }
1941
1942 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
1943 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSI);
1944}
1945
1946
1947/**
1948 * @opcode 0x4f
1949 */
1950FNIEMOP_DEF(iemOp_dec_eDI)
1951{
1952 /*
1953 * This is a REX prefix in 64-bit mode.
1954 */
1955 if (IEM_IS_64BIT_CODE(pVCpu))
1956 {
1957 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
1958 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1959 pVCpu->iem.s.uRexReg = 1 << 3;
1960 pVCpu->iem.s.uRexB = 1 << 3;
1961 pVCpu->iem.s.uRexIndex = 1 << 3;
1962 iemRecalEffOpSize(pVCpu);
1963
1964 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1965 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1966 }
1967
1968 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
1969 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDI);
1970}
1971
1972
1973/**
1974 * Common 'push register' helper.
1975 */
1976FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
1977{
1978 if (IEM_IS_64BIT_CODE(pVCpu))
1979 {
1980 iReg |= pVCpu->iem.s.uRexB;
1981 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1982 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1983 }
1984
1985 switch (pVCpu->iem.s.enmEffOpSize)
1986 {
1987 case IEMMODE_16BIT:
1988 IEM_MC_BEGIN(0, 1);
1989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1990 IEM_MC_LOCAL(uint16_t, u16Value);
1991 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
1992 IEM_MC_PUSH_U16(u16Value);
1993 IEM_MC_ADVANCE_RIP_AND_FINISH();
1994 IEM_MC_END();
1995 break;
1996
1997 case IEMMODE_32BIT:
1998 IEM_MC_BEGIN(0, 1);
1999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2000 IEM_MC_LOCAL(uint32_t, u32Value);
2001 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
2002 IEM_MC_PUSH_U32(u32Value);
2003 IEM_MC_ADVANCE_RIP_AND_FINISH();
2004 IEM_MC_END();
2005 break;
2006
2007 case IEMMODE_64BIT:
2008 IEM_MC_BEGIN(0, 1);
2009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2010 IEM_MC_LOCAL(uint64_t, u64Value);
2011 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
2012 IEM_MC_PUSH_U64(u64Value);
2013 IEM_MC_ADVANCE_RIP_AND_FINISH();
2014 IEM_MC_END();
2015 break;
2016
2017 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2018 }
2019}
2020
2021
2022/**
2023 * @opcode 0x50
2024 */
2025FNIEMOP_DEF(iemOp_push_eAX)
2026{
2027 IEMOP_MNEMONIC(push_rAX, "push rAX");
2028 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
2029}
2030
2031
2032/**
2033 * @opcode 0x51
2034 */
2035FNIEMOP_DEF(iemOp_push_eCX)
2036{
2037 IEMOP_MNEMONIC(push_rCX, "push rCX");
2038 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
2039}
2040
2041
2042/**
2043 * @opcode 0x52
2044 */
2045FNIEMOP_DEF(iemOp_push_eDX)
2046{
2047 IEMOP_MNEMONIC(push_rDX, "push rDX");
2048 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
2049}
2050
2051
2052/**
2053 * @opcode 0x53
2054 */
2055FNIEMOP_DEF(iemOp_push_eBX)
2056{
2057 IEMOP_MNEMONIC(push_rBX, "push rBX");
2058 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
2059}
2060
2061
2062/**
2063 * @opcode 0x54
2064 */
2065FNIEMOP_DEF(iemOp_push_eSP)
2066{
2067 IEMOP_MNEMONIC(push_rSP, "push rSP");
2068 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
2069 {
2070 IEM_MC_BEGIN(0, 1);
2071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2072 IEM_MC_LOCAL(uint16_t, u16Value);
2073 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
2074 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
2075 IEM_MC_PUSH_U16(u16Value);
2076 IEM_MC_ADVANCE_RIP_AND_FINISH();
2077 IEM_MC_END();
2078 }
2079 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
2080}
2081
2082
2083/**
2084 * @opcode 0x55
2085 */
2086FNIEMOP_DEF(iemOp_push_eBP)
2087{
2088 IEMOP_MNEMONIC(push_rBP, "push rBP");
2089 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
2090}
2091
2092
2093/**
2094 * @opcode 0x56
2095 */
2096FNIEMOP_DEF(iemOp_push_eSI)
2097{
2098 IEMOP_MNEMONIC(push_rSI, "push rSI");
2099 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
2100}
2101
2102
2103/**
2104 * @opcode 0x57
2105 */
2106FNIEMOP_DEF(iemOp_push_eDI)
2107{
2108 IEMOP_MNEMONIC(push_rDI, "push rDI");
2109 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
2110}
2111
2112
2113/**
2114 * Common 'pop register' helper.
2115 */
2116FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
2117{
2118 if (IEM_IS_64BIT_CODE(pVCpu))
2119 {
2120 iReg |= pVCpu->iem.s.uRexB;
2121 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2122 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2123 }
2124
2125 switch (pVCpu->iem.s.enmEffOpSize)
2126 {
2127 case IEMMODE_16BIT:
2128 IEM_MC_BEGIN(0, 1);
2129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2130 IEM_MC_LOCAL(uint16_t *, pu16Dst);
2131 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
2132 IEM_MC_POP_U16(pu16Dst);
2133 IEM_MC_ADVANCE_RIP_AND_FINISH();
2134 IEM_MC_END();
2135 break;
2136
2137 case IEMMODE_32BIT:
2138 IEM_MC_BEGIN(0, 1);
2139 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2140 IEM_MC_LOCAL(uint32_t *, pu32Dst);
2141 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
2142 IEM_MC_POP_U32(pu32Dst);
2143 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
2144 IEM_MC_ADVANCE_RIP_AND_FINISH();
2145 IEM_MC_END();
2146 break;
2147
2148 case IEMMODE_64BIT:
2149 IEM_MC_BEGIN(0, 1);
2150 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2151 IEM_MC_LOCAL(uint64_t *, pu64Dst);
2152 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
2153 IEM_MC_POP_U64(pu64Dst);
2154 IEM_MC_ADVANCE_RIP_AND_FINISH();
2155 IEM_MC_END();
2156 break;
2157
2158 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2159 }
2160}
2161
2162
2163/**
2164 * @opcode 0x58
2165 */
2166FNIEMOP_DEF(iemOp_pop_eAX)
2167{
2168 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
2169 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
2170}
2171
2172
2173/**
2174 * @opcode 0x59
2175 */
2176FNIEMOP_DEF(iemOp_pop_eCX)
2177{
2178 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
2179 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
2180}
2181
2182
2183/**
2184 * @opcode 0x5a
2185 */
2186FNIEMOP_DEF(iemOp_pop_eDX)
2187{
2188 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
2189 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
2190}
2191
2192
2193/**
2194 * @opcode 0x5b
2195 */
2196FNIEMOP_DEF(iemOp_pop_eBX)
2197{
2198 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
2199 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
2200}
2201
2202
2203/**
2204 * @opcode 0x5c
2205 */
2206FNIEMOP_DEF(iemOp_pop_eSP)
2207{
2208 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
2209 if (IEM_IS_64BIT_CODE(pVCpu))
2210 {
2211 if (pVCpu->iem.s.uRexB)
2212 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
2213 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2214 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2215 }
2216
2217 /** @todo add testcase for this instruction. */
2218 switch (pVCpu->iem.s.enmEffOpSize)
2219 {
2220 case IEMMODE_16BIT:
2221 IEM_MC_BEGIN(0, 1);
2222 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2223 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2224 IEM_MC_LOCAL(uint16_t, u16Dst);
2225 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
2226 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
2227 IEM_MC_ADVANCE_RIP_AND_FINISH();
2228 IEM_MC_END();
2229 break;
2230
2231 case IEMMODE_32BIT:
2232 IEM_MC_BEGIN(0, 1);
2233 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2234 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2235 IEM_MC_LOCAL(uint32_t, u32Dst);
2236 IEM_MC_POP_U32(&u32Dst);
2237 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
2238 IEM_MC_ADVANCE_RIP_AND_FINISH();
2239 IEM_MC_END();
2240 break;
2241
2242 case IEMMODE_64BIT:
2243 IEM_MC_BEGIN(0, 1);
2244 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2245 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2246 IEM_MC_LOCAL(uint64_t, u64Dst);
2247 IEM_MC_POP_U64(&u64Dst);
2248 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
2249 IEM_MC_ADVANCE_RIP_AND_FINISH();
2250 IEM_MC_END();
2251 break;
2252
2253 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2254 }
2255}
2256
2257
2258/**
2259 * @opcode 0x5d
2260 */
2261FNIEMOP_DEF(iemOp_pop_eBP)
2262{
2263 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
2264 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
2265}
2266
2267
2268/**
2269 * @opcode 0x5e
2270 */
2271FNIEMOP_DEF(iemOp_pop_eSI)
2272{
2273 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
2274 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
2275}
2276
2277
2278/**
2279 * @opcode 0x5f
2280 */
2281FNIEMOP_DEF(iemOp_pop_eDI)
2282{
2283 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
2284 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
2285}
2286
2287
2288/**
2289 * @opcode 0x60
2290 */
2291FNIEMOP_DEF(iemOp_pusha)
2292{
2293 IEMOP_MNEMONIC(pusha, "pusha");
2294 IEMOP_HLP_MIN_186();
2295 IEMOP_HLP_NO_64BIT();
2296 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2297 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_pusha_16);
2298 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2299 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_pusha_32);
2300}
2301
2302
2303/**
2304 * @opcode 0x61
2305 */
2306FNIEMOP_DEF(iemOp_popa__mvex)
2307{
2308 if (!IEM_IS_64BIT_CODE(pVCpu))
2309 {
2310 IEMOP_MNEMONIC(popa, "popa");
2311 IEMOP_HLP_MIN_186();
2312 IEMOP_HLP_NO_64BIT();
2313 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2314 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_popa_16);
2315 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2316 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_popa_32);
2317 }
2318 IEMOP_MNEMONIC(mvex, "mvex");
2319 Log(("mvex prefix is not supported!\n"));
2320 IEMOP_RAISE_INVALID_OPCODE_RET();
2321}
2322
2323
2324/**
2325 * @opcode 0x62
2326 * @opmnemonic bound
2327 * @op1 Gv_RO
2328 * @op2 Ma
2329 * @opmincpu 80186
2330 * @ophints harmless x86_invalid_64
2331 * @optest op1=0 op2=0 ->
2332 * @optest op1=1 op2=0 -> value.xcpt=5
2333 * @optest o16 / op1=0xffff op2=0x0000fffe ->
2334 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
2335 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
2336 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
2337 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
2338 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
2339 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
2340 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
2341 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
2342 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
2343 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
2344 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
2345 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
2346 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
2347 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
2348 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
2349 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
2350 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
2351 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
2352 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
2353 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
2354 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
2355 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
2356 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
2357 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
2358 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
2359 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
2360 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
2361 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
2362 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
2363 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
2364 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
2365 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
2366 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
2367 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
2368 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
2369 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
2370 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
2371 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
2372 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
2373 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
2374 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
2375 */
2376FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
2377{
2378 /* The BOUND instruction is invalid 64-bit mode. In legacy and
2379 compatability mode it is invalid with MOD=3.
2380
2381 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
2382 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
2383 given as R and X without an exact description, so we assume it builds on
2384 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
2385 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
2386 uint8_t bRm;
2387 if (!IEM_IS_64BIT_CODE(pVCpu))
2388 {
2389 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2390 IEMOP_HLP_MIN_186();
2391 IEM_OPCODE_GET_NEXT_U8(&bRm);
2392 if (IEM_IS_MODRM_MEM_MODE(bRm))
2393 {
2394 /** @todo testcase: check that there are two memory accesses involved. Check
2395 * whether they're both read before the \#BR triggers. */
2396 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2397 {
2398 IEM_MC_BEGIN(3, 1);
2399 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2400 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
2401 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
2402 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2403
2404 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2406
2407 IEM_MC_FETCH_GREG_U16(u16Index, IEM_GET_MODRM_REG_8(bRm));
2408 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2409 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
2410
2411 IEM_MC_CALL_CIMPL_3(0, iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
2412 IEM_MC_END();
2413 }
2414 else /* 32-bit operands */
2415 {
2416 IEM_MC_BEGIN(3, 1);
2417 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2418 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
2419 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
2420 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2421
2422 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2424
2425 IEM_MC_FETCH_GREG_U32(u32Index, IEM_GET_MODRM_REG_8(bRm));
2426 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2427 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
2428
2429 IEM_MC_CALL_CIMPL_3(0, iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
2430 IEM_MC_END();
2431 }
2432 }
2433
2434 /*
2435 * @opdone
2436 */
2437 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2438 {
2439 /* Note that there is no need for the CPU to fetch further bytes
2440 here because MODRM.MOD == 3. */
2441 Log(("evex not supported by the guest CPU!\n"));
2442 IEMOP_RAISE_INVALID_OPCODE_RET();
2443 }
2444 }
2445 else
2446 {
2447 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
2448 * does modr/m read, whereas AMD probably doesn't... */
2449 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2450 {
2451 Log(("evex not supported by the guest CPU!\n"));
2452 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
2453 }
2454 IEM_OPCODE_GET_NEXT_U8(&bRm);
2455 }
2456
2457 IEMOP_MNEMONIC(evex, "evex");
2458 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
2459 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
2460 Log(("evex prefix is not implemented!\n"));
2461 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2462}
2463
2464
2465/** Opcode 0x63 - non-64-bit modes. */
2466FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
2467{
2468 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
2469 IEMOP_HLP_MIN_286();
2470 IEMOP_HLP_NO_REAL_OR_V86_MODE();
2471 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2472
2473 if (IEM_IS_MODRM_REG_MODE(bRm))
2474 {
2475 /* Register */
2476 IEM_MC_BEGIN(3, 0);
2477 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2478 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2479 IEM_MC_ARG(uint16_t, u16Src, 1);
2480 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2481
2482 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2483 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM_8(bRm));
2484 IEM_MC_REF_EFLAGS(pEFlags);
2485 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2486
2487 IEM_MC_ADVANCE_RIP_AND_FINISH();
2488 IEM_MC_END();
2489 }
2490 else
2491 {
2492 /* Memory */
2493 IEM_MC_BEGIN(3, 2);
2494 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2495 IEM_MC_ARG(uint16_t, u16Src, 1);
2496 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2497 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2498
2499 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2500 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2501 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2502 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2503 IEM_MC_FETCH_EFLAGS(EFlags);
2504 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2505
2506 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
2507 IEM_MC_COMMIT_EFLAGS(EFlags);
2508 IEM_MC_ADVANCE_RIP_AND_FINISH();
2509 IEM_MC_END();
2510 }
2511}
2512
2513
2514/**
2515 * @opcode 0x63
2516 *
2517 * @note This is a weird one. It works like a regular move instruction if
2518 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2519 * @todo This definitely needs a testcase to verify the odd cases. */
2520FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2521{
2522 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2523
2524 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2525 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2526
2527 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2528 {
2529 if (IEM_IS_MODRM_REG_MODE(bRm))
2530 {
2531 /*
2532 * Register to register.
2533 */
2534 IEM_MC_BEGIN(0, 1);
2535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2536 IEM_MC_LOCAL(uint64_t, u64Value);
2537 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
2538 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2539 IEM_MC_ADVANCE_RIP_AND_FINISH();
2540 IEM_MC_END();
2541 }
2542 else
2543 {
2544 /*
2545 * We're loading a register from memory.
2546 */
2547 IEM_MC_BEGIN(0, 2);
2548 IEM_MC_LOCAL(uint64_t, u64Value);
2549 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2550 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2552 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2553 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2554 IEM_MC_ADVANCE_RIP_AND_FINISH();
2555 IEM_MC_END();
2556 }
2557 }
2558 else
2559 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
2560}
2561
2562
2563/**
2564 * @opcode 0x64
2565 * @opmnemonic segfs
2566 * @opmincpu 80386
2567 * @opgroup og_prefixes
2568 */
2569FNIEMOP_DEF(iemOp_seg_FS)
2570{
2571 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2572 IEMOP_HLP_MIN_386();
2573
2574 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2575 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2576
2577 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2578 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2579}
2580
2581
2582/**
2583 * @opcode 0x65
2584 * @opmnemonic seggs
2585 * @opmincpu 80386
2586 * @opgroup og_prefixes
2587 */
2588FNIEMOP_DEF(iemOp_seg_GS)
2589{
2590 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2591 IEMOP_HLP_MIN_386();
2592
2593 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2594 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2595
2596 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2597 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2598}
2599
2600
2601/**
2602 * @opcode 0x66
2603 * @opmnemonic opsize
2604 * @openc prefix
2605 * @opmincpu 80386
2606 * @ophints harmless
2607 * @opgroup og_prefixes
2608 */
2609FNIEMOP_DEF(iemOp_op_size)
2610{
2611 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2612 IEMOP_HLP_MIN_386();
2613
2614 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2615 iemRecalEffOpSize(pVCpu);
2616
2617 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2618 when REPZ or REPNZ are present. */
2619 if (pVCpu->iem.s.idxPrefix == 0)
2620 pVCpu->iem.s.idxPrefix = 1;
2621
2622 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2623 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2624}
2625
2626
2627/**
2628 * @opcode 0x67
2629 * @opmnemonic addrsize
2630 * @openc prefix
2631 * @opmincpu 80386
2632 * @ophints harmless
2633 * @opgroup og_prefixes
2634 */
2635FNIEMOP_DEF(iemOp_addr_size)
2636{
2637 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2638 IEMOP_HLP_MIN_386();
2639
2640 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2641 switch (pVCpu->iem.s.enmDefAddrMode)
2642 {
2643 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2644 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2645 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2646 default: AssertFailed();
2647 }
2648
2649 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2650 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2651}
2652
2653
2654/**
2655 * @opcode 0x68
2656 */
2657FNIEMOP_DEF(iemOp_push_Iz)
2658{
2659 IEMOP_MNEMONIC(push_Iz, "push Iz");
2660 IEMOP_HLP_MIN_186();
2661 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2662 switch (pVCpu->iem.s.enmEffOpSize)
2663 {
2664 case IEMMODE_16BIT:
2665 {
2666 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2667 IEM_MC_BEGIN(0,0);
2668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2669 IEM_MC_PUSH_U16(u16Imm);
2670 IEM_MC_ADVANCE_RIP_AND_FINISH();
2671 IEM_MC_END();
2672 break;
2673 }
2674
2675 case IEMMODE_32BIT:
2676 {
2677 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2678 IEM_MC_BEGIN(0,0);
2679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2680 IEM_MC_PUSH_U32(u32Imm);
2681 IEM_MC_ADVANCE_RIP_AND_FINISH();
2682 IEM_MC_END();
2683 break;
2684 }
2685
2686 case IEMMODE_64BIT:
2687 {
2688 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2689 IEM_MC_BEGIN(0,0);
2690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2691 IEM_MC_PUSH_U64(u64Imm);
2692 IEM_MC_ADVANCE_RIP_AND_FINISH();
2693 IEM_MC_END();
2694 break;
2695 }
2696
2697 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2698 }
2699}
2700
2701
2702/**
2703 * @opcode 0x69
2704 */
2705FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
2706{
2707 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
2708 IEMOP_HLP_MIN_186();
2709 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2710 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2711
2712 switch (pVCpu->iem.s.enmEffOpSize)
2713 {
2714 case IEMMODE_16BIT:
2715 {
2716 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
2717 if (IEM_IS_MODRM_REG_MODE(bRm))
2718 {
2719 /* register operand */
2720 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2721 IEM_MC_BEGIN(3, 1);
2722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2723 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2724 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
2725 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2726 IEM_MC_LOCAL(uint16_t, u16Tmp);
2727
2728 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2729 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2730 IEM_MC_REF_EFLAGS(pEFlags);
2731 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2732 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2733
2734 IEM_MC_ADVANCE_RIP_AND_FINISH();
2735 IEM_MC_END();
2736 }
2737 else
2738 {
2739 /* memory operand */
2740 IEM_MC_BEGIN(3, 2);
2741 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2742 IEM_MC_ARG(uint16_t, u16Src, 1);
2743 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2744 IEM_MC_LOCAL(uint16_t, u16Tmp);
2745 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2746
2747 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2748 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2749 IEM_MC_ASSIGN(u16Src, u16Imm);
2750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2751 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2752 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2753 IEM_MC_REF_EFLAGS(pEFlags);
2754 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2755 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2756
2757 IEM_MC_ADVANCE_RIP_AND_FINISH();
2758 IEM_MC_END();
2759 }
2760 break;
2761 }
2762
2763 case IEMMODE_32BIT:
2764 {
2765 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
2766 if (IEM_IS_MODRM_REG_MODE(bRm))
2767 {
2768 /* register operand */
2769 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2770 IEM_MC_BEGIN(3, 1);
2771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2772 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2773 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
2774 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2775 IEM_MC_LOCAL(uint32_t, u32Tmp);
2776
2777 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2778 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2779 IEM_MC_REF_EFLAGS(pEFlags);
2780 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2781 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2782
2783 IEM_MC_ADVANCE_RIP_AND_FINISH();
2784 IEM_MC_END();
2785 }
2786 else
2787 {
2788 /* memory operand */
2789 IEM_MC_BEGIN(3, 2);
2790 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2791 IEM_MC_ARG(uint32_t, u32Src, 1);
2792 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2793 IEM_MC_LOCAL(uint32_t, u32Tmp);
2794 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2795
2796 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2797 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2798 IEM_MC_ASSIGN(u32Src, u32Imm);
2799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2800 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2801 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2802 IEM_MC_REF_EFLAGS(pEFlags);
2803 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2804 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2805
2806 IEM_MC_ADVANCE_RIP_AND_FINISH();
2807 IEM_MC_END();
2808 }
2809 break;
2810 }
2811
2812 case IEMMODE_64BIT:
2813 {
2814 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
2815 if (IEM_IS_MODRM_REG_MODE(bRm))
2816 {
2817 /* register operand */
2818 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2819 IEM_MC_BEGIN(3, 1);
2820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2821 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2822 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
2823 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2824 IEM_MC_LOCAL(uint64_t, u64Tmp);
2825
2826 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2827 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2828 IEM_MC_REF_EFLAGS(pEFlags);
2829 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
2830 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2831
2832 IEM_MC_ADVANCE_RIP_AND_FINISH();
2833 IEM_MC_END();
2834 }
2835 else
2836 {
2837 /* memory operand */
2838 IEM_MC_BEGIN(3, 2);
2839 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2840 IEM_MC_ARG(uint64_t, u64Src, 1);
2841 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2842 IEM_MC_LOCAL(uint64_t, u64Tmp);
2843 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2844
2845 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2846 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); /* Not using IEM_OPCODE_GET_NEXT_S32_SX_U64 to reduce the */
2847 IEM_MC_ASSIGN_U32_SX_U64(u64Src, u32Imm); /* parameter count for the threaded function for this block. */
2848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2849 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2850 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2851 IEM_MC_REF_EFLAGS(pEFlags);
2852 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
2853 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2854
2855 IEM_MC_ADVANCE_RIP_AND_FINISH();
2856 IEM_MC_END();
2857 }
2858 break;
2859 }
2860
2861 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2862 }
2863}
2864
2865
2866/**
2867 * @opcode 0x6a
2868 */
2869FNIEMOP_DEF(iemOp_push_Ib)
2870{
2871 IEMOP_MNEMONIC(push_Ib, "push Ib");
2872 IEMOP_HLP_MIN_186();
2873 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2874 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2875
2876 switch (pVCpu->iem.s.enmEffOpSize)
2877 {
2878 case IEMMODE_16BIT:
2879 IEM_MC_BEGIN(0,0);
2880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2881 IEM_MC_PUSH_U16(i8Imm);
2882 IEM_MC_ADVANCE_RIP_AND_FINISH();
2883 IEM_MC_END();
2884 break;
2885 case IEMMODE_32BIT:
2886 IEM_MC_BEGIN(0,0);
2887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2888 IEM_MC_PUSH_U32(i8Imm);
2889 IEM_MC_ADVANCE_RIP_AND_FINISH();
2890 IEM_MC_END();
2891 break;
2892 case IEMMODE_64BIT:
2893 IEM_MC_BEGIN(0,0);
2894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2895 IEM_MC_PUSH_U64(i8Imm);
2896 IEM_MC_ADVANCE_RIP_AND_FINISH();
2897 IEM_MC_END();
2898 break;
2899 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2900 }
2901}
2902
2903
2904/**
2905 * @opcode 0x6b
2906 */
2907FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
2908{
2909 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
2910 IEMOP_HLP_MIN_186();
2911 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2912 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2913
2914 switch (pVCpu->iem.s.enmEffOpSize)
2915 {
2916 case IEMMODE_16BIT:
2917 {
2918 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
2919 if (IEM_IS_MODRM_REG_MODE(bRm))
2920 {
2921 /* register operand */
2922 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2923 IEM_MC_BEGIN(3, 1);
2924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2925 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2926 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
2927 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2928 IEM_MC_LOCAL(uint16_t, u16Tmp);
2929
2930 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2931 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2932 IEM_MC_REF_EFLAGS(pEFlags);
2933 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2934 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2935
2936 IEM_MC_ADVANCE_RIP_AND_FINISH();
2937 IEM_MC_END();
2938 }
2939 else
2940 {
2941 /* memory operand */
2942 IEM_MC_BEGIN(3, 2);
2943 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2944 IEM_MC_ARG(uint16_t, u16Src, 1);
2945 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2946 IEM_MC_LOCAL(uint16_t, u16Tmp);
2947 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2948
2949 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2950 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
2951 IEM_MC_ASSIGN(u16Src, u16Imm);
2952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2953 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2954 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2955 IEM_MC_REF_EFLAGS(pEFlags);
2956 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2957 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2958
2959 IEM_MC_ADVANCE_RIP_AND_FINISH();
2960 IEM_MC_END();
2961 }
2962 break;
2963 }
2964
2965 case IEMMODE_32BIT:
2966 {
2967 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
2968 if (IEM_IS_MODRM_REG_MODE(bRm))
2969 {
2970 /* register operand */
2971 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2972 IEM_MC_BEGIN(3, 1);
2973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2974 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2975 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
2976 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2977 IEM_MC_LOCAL(uint32_t, u32Tmp);
2978
2979 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2980 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2981 IEM_MC_REF_EFLAGS(pEFlags);
2982 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2983 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2984
2985 IEM_MC_ADVANCE_RIP_AND_FINISH();
2986 IEM_MC_END();
2987 }
2988 else
2989 {
2990 /* memory operand */
2991 IEM_MC_BEGIN(3, 2);
2992 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2993 IEM_MC_ARG(uint32_t, u32Src, 1);
2994 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2995 IEM_MC_LOCAL(uint32_t, u32Tmp);
2996 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2997
2998 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2999 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
3000 IEM_MC_ASSIGN(u32Src, u32Imm);
3001 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3002 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3003 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
3004 IEM_MC_REF_EFLAGS(pEFlags);
3005 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3006 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3007
3008 IEM_MC_ADVANCE_RIP_AND_FINISH();
3009 IEM_MC_END();
3010 }
3011 break;
3012 }
3013
3014 case IEMMODE_64BIT:
3015 {
3016 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3017 if (IEM_IS_MODRM_REG_MODE(bRm))
3018 {
3019 /* register operand */
3020 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3021 IEM_MC_BEGIN(3, 1);
3022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3023 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3024 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
3025 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3026 IEM_MC_LOCAL(uint64_t, u64Tmp);
3027
3028 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3029 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3030 IEM_MC_REF_EFLAGS(pEFlags);
3031 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3032 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3033
3034 IEM_MC_ADVANCE_RIP_AND_FINISH();
3035 IEM_MC_END();
3036 }
3037 else
3038 {
3039 /* memory operand */
3040 IEM_MC_BEGIN(3, 2);
3041 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3042 IEM_MC_ARG(uint64_t, u64Src, 1);
3043 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3044 IEM_MC_LOCAL(uint64_t, u64Tmp);
3045 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3046
3047 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3048 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); /* Not using IEM_OPCODE_GET_NEXT_S8_SX_U64 to reduce the */
3049 IEM_MC_ASSIGN_U8_SX_U64(u64Src, u8Imm); /* parameter count for the threaded function for this block. */
3050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3051 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3052 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3053 IEM_MC_REF_EFLAGS(pEFlags);
3054 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3055 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3056
3057 IEM_MC_ADVANCE_RIP_AND_FINISH();
3058 IEM_MC_END();
3059 }
3060 break;
3061 }
3062
3063 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3064 }
3065}
3066
3067
3068/**
3069 * @opcode 0x6c
3070 */
3071FNIEMOP_DEF(iemOp_insb_Yb_DX)
3072{
3073 IEMOP_HLP_MIN_186();
3074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3075 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3076 {
3077 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
3078 switch (pVCpu->iem.s.enmEffAddrMode)
3079 {
3080 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3081 iemCImpl_rep_ins_op8_addr16, false);
3082 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3083 iemCImpl_rep_ins_op8_addr32, false);
3084 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3085 iemCImpl_rep_ins_op8_addr64, false);
3086 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3087 }
3088 }
3089 else
3090 {
3091 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
3092 switch (pVCpu->iem.s.enmEffAddrMode)
3093 {
3094 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3095 iemCImpl_ins_op8_addr16, false);
3096 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3097 iemCImpl_ins_op8_addr32, false);
3098 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3099 iemCImpl_ins_op8_addr64, false);
3100 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3101 }
3102 }
3103}
3104
3105
3106/**
3107 * @opcode 0x6d
3108 */
3109FNIEMOP_DEF(iemOp_inswd_Yv_DX)
3110{
3111 IEMOP_HLP_MIN_186();
3112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3113 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3114 {
3115 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
3116 switch (pVCpu->iem.s.enmEffOpSize)
3117 {
3118 case IEMMODE_16BIT:
3119 switch (pVCpu->iem.s.enmEffAddrMode)
3120 {
3121 case IEMMODE_16BIT:
3122 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3123 iemCImpl_rep_ins_op16_addr16, false);
3124 case IEMMODE_32BIT:
3125 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3126 iemCImpl_rep_ins_op16_addr32, false);
3127 case IEMMODE_64BIT:
3128 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3129 iemCImpl_rep_ins_op16_addr64, false);
3130 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3131 }
3132 break;
3133 case IEMMODE_64BIT:
3134 case IEMMODE_32BIT:
3135 switch (pVCpu->iem.s.enmEffAddrMode)
3136 {
3137 case IEMMODE_16BIT:
3138 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3139 iemCImpl_rep_ins_op32_addr16, false);
3140 case IEMMODE_32BIT:
3141 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3142 iemCImpl_rep_ins_op32_addr32, false);
3143 case IEMMODE_64BIT:
3144 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3145 iemCImpl_rep_ins_op32_addr64, false);
3146 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3147 }
3148 break;
3149 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3150 }
3151 }
3152 else
3153 {
3154 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
3155 switch (pVCpu->iem.s.enmEffOpSize)
3156 {
3157 case IEMMODE_16BIT:
3158 switch (pVCpu->iem.s.enmEffAddrMode)
3159 {
3160 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3161 iemCImpl_ins_op16_addr16, false);
3162 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3163 iemCImpl_ins_op16_addr32, false);
3164 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3165 iemCImpl_ins_op16_addr64, false);
3166 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3167 }
3168 break;
3169 case IEMMODE_64BIT:
3170 case IEMMODE_32BIT:
3171 switch (pVCpu->iem.s.enmEffAddrMode)
3172 {
3173 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3174 iemCImpl_ins_op32_addr16, false);
3175 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3176 iemCImpl_ins_op32_addr32, false);
3177 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3178 iemCImpl_ins_op32_addr64, false);
3179 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3180 }
3181 break;
3182 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3183 }
3184 }
3185}
3186
3187
3188/**
3189 * @opcode 0x6e
3190 */
3191FNIEMOP_DEF(iemOp_outsb_Yb_DX)
3192{
3193 IEMOP_HLP_MIN_186();
3194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3195 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3196 {
3197 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
3198 switch (pVCpu->iem.s.enmEffAddrMode)
3199 {
3200 case IEMMODE_16BIT:
3201 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3202 iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3203 case IEMMODE_32BIT:
3204 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3205 iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3206 case IEMMODE_64BIT:
3207 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3208 iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3209 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3210 }
3211 }
3212 else
3213 {
3214 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
3215 switch (pVCpu->iem.s.enmEffAddrMode)
3216 {
3217 case IEMMODE_16BIT:
3218 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3219 iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3220 case IEMMODE_32BIT:
3221 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3222 iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3223 case IEMMODE_64BIT:
3224 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3225 iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3226 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3227 }
3228 }
3229}
3230
3231
3232/**
3233 * @opcode 0x6f
3234 */
3235FNIEMOP_DEF(iemOp_outswd_Yv_DX)
3236{
3237 IEMOP_HLP_MIN_186();
3238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3239 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3240 {
3241 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
3242 switch (pVCpu->iem.s.enmEffOpSize)
3243 {
3244 case IEMMODE_16BIT:
3245 switch (pVCpu->iem.s.enmEffAddrMode)
3246 {
3247 case IEMMODE_16BIT:
3248 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3249 iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3250 case IEMMODE_32BIT:
3251 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3252 iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3253 case IEMMODE_64BIT:
3254 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3255 iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3256 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3257 }
3258 break;
3259 case IEMMODE_64BIT:
3260 case IEMMODE_32BIT:
3261 switch (pVCpu->iem.s.enmEffAddrMode)
3262 {
3263 case IEMMODE_16BIT:
3264 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3265 iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3266 case IEMMODE_32BIT:
3267 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3268 iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3269 case IEMMODE_64BIT:
3270 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3271 iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3272 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3273 }
3274 break;
3275 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3276 }
3277 }
3278 else
3279 {
3280 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
3281 switch (pVCpu->iem.s.enmEffOpSize)
3282 {
3283 case IEMMODE_16BIT:
3284 switch (pVCpu->iem.s.enmEffAddrMode)
3285 {
3286 case IEMMODE_16BIT:
3287 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3288 iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3289 case IEMMODE_32BIT:
3290 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3291 iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3292 case IEMMODE_64BIT:
3293 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3294 iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3295 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3296 }
3297 break;
3298 case IEMMODE_64BIT:
3299 case IEMMODE_32BIT:
3300 switch (pVCpu->iem.s.enmEffAddrMode)
3301 {
3302 case IEMMODE_16BIT:
3303 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3304 iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3305 case IEMMODE_32BIT:
3306 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3307 iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3308 case IEMMODE_64BIT:
3309 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3310 iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3311 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3312 }
3313 break;
3314 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3315 }
3316 }
3317}
3318
3319
3320/**
3321 * @opcode 0x70
3322 */
3323FNIEMOP_DEF(iemOp_jo_Jb)
3324{
3325 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
3326 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3327 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3328
3329 IEM_MC_BEGIN(0, 0);
3330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3331 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3332 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3333 } IEM_MC_ELSE() {
3334 IEM_MC_ADVANCE_RIP_AND_FINISH();
3335 } IEM_MC_ENDIF();
3336 IEM_MC_END();
3337}
3338
3339
3340/**
3341 * @opcode 0x71
3342 */
3343FNIEMOP_DEF(iemOp_jno_Jb)
3344{
3345 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
3346 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3347 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3348
3349 IEM_MC_BEGIN(0, 0);
3350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3351 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3352 IEM_MC_ADVANCE_RIP_AND_FINISH();
3353 } IEM_MC_ELSE() {
3354 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3355 } IEM_MC_ENDIF();
3356 IEM_MC_END();
3357}
3358
3359/**
3360 * @opcode 0x72
3361 */
3362FNIEMOP_DEF(iemOp_jc_Jb)
3363{
3364 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
3365 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3366 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3367
3368 IEM_MC_BEGIN(0, 0);
3369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3370 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3371 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3372 } IEM_MC_ELSE() {
3373 IEM_MC_ADVANCE_RIP_AND_FINISH();
3374 } IEM_MC_ENDIF();
3375 IEM_MC_END();
3376}
3377
3378
3379/**
3380 * @opcode 0x73
3381 */
3382FNIEMOP_DEF(iemOp_jnc_Jb)
3383{
3384 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
3385 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3386 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3387
3388 IEM_MC_BEGIN(0, 0);
3389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3390 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3391 IEM_MC_ADVANCE_RIP_AND_FINISH();
3392 } IEM_MC_ELSE() {
3393 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3394 } IEM_MC_ENDIF();
3395 IEM_MC_END();
3396}
3397
3398
3399/**
3400 * @opcode 0x74
3401 */
3402FNIEMOP_DEF(iemOp_je_Jb)
3403{
3404 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
3405 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3406 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3407
3408 IEM_MC_BEGIN(0, 0);
3409 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3410 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3411 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3412 } IEM_MC_ELSE() {
3413 IEM_MC_ADVANCE_RIP_AND_FINISH();
3414 } IEM_MC_ENDIF();
3415 IEM_MC_END();
3416}
3417
3418
3419/**
3420 * @opcode 0x75
3421 */
3422FNIEMOP_DEF(iemOp_jne_Jb)
3423{
3424 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
3425 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3426 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3427
3428 IEM_MC_BEGIN(0, 0);
3429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3430 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3431 IEM_MC_ADVANCE_RIP_AND_FINISH();
3432 } IEM_MC_ELSE() {
3433 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3434 } IEM_MC_ENDIF();
3435 IEM_MC_END();
3436}
3437
3438
3439/**
3440 * @opcode 0x76
3441 */
3442FNIEMOP_DEF(iemOp_jbe_Jb)
3443{
3444 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
3445 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3446 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3447
3448 IEM_MC_BEGIN(0, 0);
3449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3450 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3451 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3452 } IEM_MC_ELSE() {
3453 IEM_MC_ADVANCE_RIP_AND_FINISH();
3454 } IEM_MC_ENDIF();
3455 IEM_MC_END();
3456}
3457
3458
3459/**
3460 * @opcode 0x77
3461 */
3462FNIEMOP_DEF(iemOp_jnbe_Jb)
3463{
3464 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
3465 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3466 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3467
3468 IEM_MC_BEGIN(0, 0);
3469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3470 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3471 IEM_MC_ADVANCE_RIP_AND_FINISH();
3472 } IEM_MC_ELSE() {
3473 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3474 } IEM_MC_ENDIF();
3475 IEM_MC_END();
3476}
3477
3478
3479/**
3480 * @opcode 0x78
3481 */
3482FNIEMOP_DEF(iemOp_js_Jb)
3483{
3484 IEMOP_MNEMONIC(js_Jb, "js Jb");
3485 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3486 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3487
3488 IEM_MC_BEGIN(0, 0);
3489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3490 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3491 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3492 } IEM_MC_ELSE() {
3493 IEM_MC_ADVANCE_RIP_AND_FINISH();
3494 } IEM_MC_ENDIF();
3495 IEM_MC_END();
3496}
3497
3498
3499/**
3500 * @opcode 0x79
3501 */
3502FNIEMOP_DEF(iemOp_jns_Jb)
3503{
3504 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
3505 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3506 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3507
3508 IEM_MC_BEGIN(0, 0);
3509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3510 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3511 IEM_MC_ADVANCE_RIP_AND_FINISH();
3512 } IEM_MC_ELSE() {
3513 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3514 } IEM_MC_ENDIF();
3515 IEM_MC_END();
3516}
3517
3518
3519/**
3520 * @opcode 0x7a
3521 */
3522FNIEMOP_DEF(iemOp_jp_Jb)
3523{
3524 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
3525 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3526 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3527
3528 IEM_MC_BEGIN(0, 0);
3529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3530 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3531 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3532 } IEM_MC_ELSE() {
3533 IEM_MC_ADVANCE_RIP_AND_FINISH();
3534 } IEM_MC_ENDIF();
3535 IEM_MC_END();
3536}
3537
3538
3539/**
3540 * @opcode 0x7b
3541 */
3542FNIEMOP_DEF(iemOp_jnp_Jb)
3543{
3544 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
3545 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3546 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3547
3548 IEM_MC_BEGIN(0, 0);
3549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3550 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3551 IEM_MC_ADVANCE_RIP_AND_FINISH();
3552 } IEM_MC_ELSE() {
3553 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3554 } IEM_MC_ENDIF();
3555 IEM_MC_END();
3556}
3557
3558
3559/**
3560 * @opcode 0x7c
3561 */
3562FNIEMOP_DEF(iemOp_jl_Jb)
3563{
3564 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
3565 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3566 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3567
3568 IEM_MC_BEGIN(0, 0);
3569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3570 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3571 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3572 } IEM_MC_ELSE() {
3573 IEM_MC_ADVANCE_RIP_AND_FINISH();
3574 } IEM_MC_ENDIF();
3575 IEM_MC_END();
3576}
3577
3578
3579/**
3580 * @opcode 0x7d
3581 */
3582FNIEMOP_DEF(iemOp_jnl_Jb)
3583{
3584 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
3585 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3586 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3587
3588 IEM_MC_BEGIN(0, 0);
3589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3590 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3591 IEM_MC_ADVANCE_RIP_AND_FINISH();
3592 } IEM_MC_ELSE() {
3593 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3594 } IEM_MC_ENDIF();
3595 IEM_MC_END();
3596}
3597
3598
3599/**
3600 * @opcode 0x7e
3601 */
3602FNIEMOP_DEF(iemOp_jle_Jb)
3603{
3604 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
3605 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3606 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3607
3608 IEM_MC_BEGIN(0, 0);
3609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3610 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3611 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3612 } IEM_MC_ELSE() {
3613 IEM_MC_ADVANCE_RIP_AND_FINISH();
3614 } IEM_MC_ENDIF();
3615 IEM_MC_END();
3616}
3617
3618
3619/**
3620 * @opcode 0x7f
3621 */
3622FNIEMOP_DEF(iemOp_jnle_Jb)
3623{
3624 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
3625 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3626 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3627
3628 IEM_MC_BEGIN(0, 0);
3629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3630 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3631 IEM_MC_ADVANCE_RIP_AND_FINISH();
3632 } IEM_MC_ELSE() {
3633 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3634 } IEM_MC_ENDIF();
3635 IEM_MC_END();
3636}
3637
3638
3639/**
3640 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
3641 * iemOp_Grp1_Eb_Ib_80.
3642 */
3643#define IEMOP_BODY_BINARY_Eb_Ib(a_fnNormalU8, a_fRW) \
3644 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3645 { \
3646 /* register target */ \
3647 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3648 IEM_MC_BEGIN(3, 0); \
3649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3650 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3651 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3652 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3653 \
3654 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3655 IEM_MC_REF_EFLAGS(pEFlags); \
3656 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3657 \
3658 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3659 IEM_MC_END(); \
3660 } \
3661 else \
3662 { \
3663 /* memory target */ \
3664 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
3665 { \
3666 IEM_MC_BEGIN(3, 2); \
3667 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3668 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3669 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3670 \
3671 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3672 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3673 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3674 IEMOP_HLP_DONE_DECODING(); \
3675 \
3676 IEM_MC_MEM_MAP(pu8Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3677 IEM_MC_FETCH_EFLAGS(EFlags); \
3678 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3679 \
3680 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, a_fRW); \
3681 IEM_MC_COMMIT_EFLAGS(EFlags); \
3682 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3683 IEM_MC_END(); \
3684 } \
3685 else \
3686 { \
3687 (void)0
3688
3689#define IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK() \
3690 IEMOP_HLP_DONE_DECODING(); \
3691 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
3692 } \
3693 } \
3694 (void)0
3695
3696#define IEMOP_BODY_BINARY_Eb_Ib_LOCKED(a_fnLockedU8) \
3697 IEM_MC_BEGIN(3, 2); \
3698 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3699 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3700 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3701 \
3702 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3703 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3704 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3705 IEMOP_HLP_DONE_DECODING(); \
3706 \
3707 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3708 IEM_MC_FETCH_EFLAGS(EFlags); \
3709 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
3710 \
3711 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW); \
3712 IEM_MC_COMMIT_EFLAGS(EFlags); \
3713 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3714 IEM_MC_END(); \
3715 } \
3716 } \
3717 (void)0
3718
3719
3720/**
3721 * @opmaps grp1_80,grp1_83
3722 * @opcode /0
3723 */
3724FNIEMOP_DEF_1(iemOp_Grp1_add_Eb_Ib, uint8_t, bRm)
3725{
3726 IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib");
3727 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_add_u8, IEM_ACCESS_DATA_RW);
3728 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_add_u8_locked);
3729}
3730
3731
3732/**
3733 * @opmaps grp1_80,grp1_83
3734 * @opcode /1
3735 */
3736FNIEMOP_DEF_1(iemOp_Grp1_or_Eb_Ib, uint8_t, bRm)
3737{
3738 IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib");
3739 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_or_u8, IEM_ACCESS_DATA_RW);
3740 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_or_u8_locked);
3741}
3742
3743
3744/**
3745 * @opmaps grp1_80,grp1_83
3746 * @opcode /2
3747 */
3748FNIEMOP_DEF_1(iemOp_Grp1_adc_Eb_Ib, uint8_t, bRm)
3749{
3750 IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib");
3751 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_adc_u8, IEM_ACCESS_DATA_RW);
3752 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_adc_u8_locked);
3753}
3754
3755
3756/**
3757 * @opmaps grp1_80,grp1_83
3758 * @opcode /3
3759 */
3760FNIEMOP_DEF_1(iemOp_Grp1_sbb_Eb_Ib, uint8_t, bRm)
3761{
3762 IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib");
3763 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_sbb_u8, IEM_ACCESS_DATA_RW);
3764 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sbb_u8_locked);
3765}
3766
3767
3768/**
3769 * @opmaps grp1_80,grp1_83
3770 * @opcode /4
3771 */
3772FNIEMOP_DEF_1(iemOp_Grp1_and_Eb_Ib, uint8_t, bRm)
3773{
3774 IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib");
3775 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_and_u8, IEM_ACCESS_DATA_RW);
3776 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_and_u8_locked);
3777}
3778
3779
3780/**
3781 * @opmaps grp1_80,grp1_83
3782 * @opcode /5
3783 */
3784FNIEMOP_DEF_1(iemOp_Grp1_sub_Eb_Ib, uint8_t, bRm)
3785{
3786 IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib");
3787 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_sub_u8, IEM_ACCESS_DATA_RW);
3788 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sub_u8_locked);
3789}
3790
3791
3792/**
3793 * @opmaps grp1_80,grp1_83
3794 * @opcode /6
3795 */
3796FNIEMOP_DEF_1(iemOp_Grp1_xor_Eb_Ib, uint8_t, bRm)
3797{
3798 IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib");
3799 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_xor_u8, IEM_ACCESS_DATA_RW);
3800 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_xor_u8_locked);
3801}
3802
3803
3804/**
3805 * @opmaps grp1_80,grp1_83
3806 * @opcode /7
3807 */
3808FNIEMOP_DEF_1(iemOp_Grp1_cmp_Eb_Ib, uint8_t, bRm)
3809{
3810 IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib");
3811 IEMOP_BODY_BINARY_Eb_Ib(iemAImpl_cmp_u8, IEM_ACCESS_DATA_R);
3812 IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK();
3813}
3814
3815
3816/**
3817 * @opcode 0x80
3818 */
3819FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
3820{
3821 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3822 switch (IEM_GET_MODRM_REG_8(bRm))
3823 {
3824 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Eb_Ib, bRm);
3825 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Eb_Ib, bRm);
3826 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Eb_Ib, bRm);
3827 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Eb_Ib, bRm);
3828 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Eb_Ib, bRm);
3829 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Eb_Ib, bRm);
3830 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Eb_Ib, bRm);
3831 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Eb_Ib, bRm);
3832 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3833 }
3834}
3835
3836
3837/**
3838 * Body for a group 1 binary operator.
3839 */
3840#define IEMOP_BODY_BINARY_Ev_Iz(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fRW) \
3841 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3842 { \
3843 /* register target */ \
3844 switch (pVCpu->iem.s.enmEffOpSize) \
3845 { \
3846 case IEMMODE_16BIT: \
3847 { \
3848 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
3849 IEM_MC_BEGIN(3, 0); \
3850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3851 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
3852 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
3853 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3854 \
3855 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3856 IEM_MC_REF_EFLAGS(pEFlags); \
3857 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
3858 \
3859 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3860 IEM_MC_END(); \
3861 break; \
3862 } \
3863 \
3864 case IEMMODE_32BIT: \
3865 { \
3866 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
3867 IEM_MC_BEGIN(3, 0); \
3868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3869 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
3870 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
3871 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3872 \
3873 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3874 IEM_MC_REF_EFLAGS(pEFlags); \
3875 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
3876 if (a_fRW == IEM_ACCESS_DATA_RW) \
3877 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
3878 \
3879 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3880 IEM_MC_END(); \
3881 break; \
3882 } \
3883 \
3884 case IEMMODE_64BIT: \
3885 { \
3886 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
3887 IEM_MC_BEGIN(3, 0); \
3888 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3889 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
3890 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
3891 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3892 \
3893 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3894 IEM_MC_REF_EFLAGS(pEFlags); \
3895 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
3896 \
3897 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3898 IEM_MC_END(); \
3899 break; \
3900 } \
3901 \
3902 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
3903 } \
3904 } \
3905 else \
3906 { \
3907 /* memory target */ \
3908 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
3909 { \
3910 switch (pVCpu->iem.s.enmEffOpSize) \
3911 { \
3912 case IEMMODE_16BIT: \
3913 { \
3914 IEM_MC_BEGIN(3, 2); \
3915 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
3916 IEM_MC_ARG(uint16_t, u16Src, 1); \
3917 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3918 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3919 \
3920 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
3921 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
3922 IEM_MC_ASSIGN(u16Src, u16Imm); \
3923 IEMOP_HLP_DONE_DECODING(); \
3924 IEM_MC_MEM_MAP(pu16Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3925 IEM_MC_FETCH_EFLAGS(EFlags); \
3926 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
3927 \
3928 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, a_fRW); \
3929 IEM_MC_COMMIT_EFLAGS(EFlags); \
3930 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3931 IEM_MC_END(); \
3932 break; \
3933 } \
3934 \
3935 case IEMMODE_32BIT: \
3936 { \
3937 IEM_MC_BEGIN(3, 2); \
3938 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
3939 IEM_MC_ARG(uint32_t, u32Src, 1); \
3940 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3941 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3942 \
3943 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
3944 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
3945 IEM_MC_ASSIGN(u32Src, u32Imm); \
3946 IEMOP_HLP_DONE_DECODING(); \
3947 IEM_MC_MEM_MAP(pu32Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3948 IEM_MC_FETCH_EFLAGS(EFlags); \
3949 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
3950 \
3951 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, a_fRW); \
3952 IEM_MC_COMMIT_EFLAGS(EFlags); \
3953 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3954 IEM_MC_END(); \
3955 break; \
3956 } \
3957 \
3958 case IEMMODE_64BIT: \
3959 { \
3960 IEM_MC_BEGIN(3, 2); \
3961 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
3962 IEM_MC_ARG(uint64_t, u64Src, 1); \
3963 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3964 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3965 \
3966 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
3967 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
3968 IEMOP_HLP_DONE_DECODING(); \
3969 IEM_MC_ASSIGN(u64Src, u64Imm); \
3970 IEM_MC_MEM_MAP(pu64Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3971 IEM_MC_FETCH_EFLAGS(EFlags); \
3972 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
3973 \
3974 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, a_fRW); \
3975 IEM_MC_COMMIT_EFLAGS(EFlags); \
3976 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3977 IEM_MC_END(); \
3978 break; \
3979 } \
3980 \
3981 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
3982 } \
3983 } \
3984 else \
3985 { \
3986 (void)0
3987
3988#define IEMOP_BODY_BINARY_Ev_Iz_NO_LOCK() \
3989 IEMOP_HLP_DONE_DECODING(); \
3990 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
3991 } \
3992 } \
3993 (void)0
3994
3995#define IEMOP_BODY_BINARY_Ev_Iz_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
3996 switch (pVCpu->iem.s.enmEffOpSize) \
3997 { \
3998 case IEMMODE_16BIT: \
3999 { \
4000 IEM_MC_BEGIN(3, 2); \
4001 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4002 IEM_MC_ARG(uint16_t, u16Src, 1); \
4003 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4004 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4005 \
4006 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4007 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4008 IEM_MC_ASSIGN(u16Src, u16Imm); \
4009 IEMOP_HLP_DONE_DECODING(); \
4010 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4011 IEM_MC_FETCH_EFLAGS(EFlags); \
4012 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4013 \
4014 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
4015 IEM_MC_COMMIT_EFLAGS(EFlags); \
4016 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4017 IEM_MC_END(); \
4018 break; \
4019 } \
4020 \
4021 case IEMMODE_32BIT: \
4022 { \
4023 IEM_MC_BEGIN(3, 2); \
4024 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4025 IEM_MC_ARG(uint32_t, u32Src, 1); \
4026 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4027 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4028 \
4029 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4030 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4031 IEM_MC_ASSIGN(u32Src, u32Imm); \
4032 IEMOP_HLP_DONE_DECODING(); \
4033 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4034 IEM_MC_FETCH_EFLAGS(EFlags); \
4035 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4036 \
4037 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); \
4038 IEM_MC_COMMIT_EFLAGS(EFlags); \
4039 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4040 IEM_MC_END(); \
4041 break; \
4042 } \
4043 \
4044 case IEMMODE_64BIT: \
4045 { \
4046 IEM_MC_BEGIN(3, 2); \
4047 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4048 IEM_MC_ARG(uint64_t, u64Src, 1); \
4049 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4050 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4051 \
4052 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4053 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4054 IEMOP_HLP_DONE_DECODING(); \
4055 IEM_MC_ASSIGN(u64Src, u64Imm); \
4056 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4057 IEM_MC_FETCH_EFLAGS(EFlags); \
4058 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4059 \
4060 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
4061 IEM_MC_COMMIT_EFLAGS(EFlags); \
4062 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4063 IEM_MC_END(); \
4064 break; \
4065 } \
4066 \
4067 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4068 } \
4069 } \
4070 } \
4071 (void)0
4072
4073
4074/**
4075 * @opmaps grp1_81
4076 * @opcode /0
4077 */
4078FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Iz, uint8_t, bRm)
4079{
4080 IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz");
4081 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, IEM_ACCESS_DATA_RW);
4082 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4083}
4084
4085
4086/**
4087 * @opmaps grp1_81
4088 * @opcode /1
4089 */
4090FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Iz, uint8_t, bRm)
4091{
4092 IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz");
4093 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, IEM_ACCESS_DATA_RW);
4094 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4095}
4096
4097
4098/**
4099 * @opmaps grp1_81
4100 * @opcode /2
4101 */
4102FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Iz, uint8_t, bRm)
4103{
4104 IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz");
4105 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, IEM_ACCESS_DATA_RW);
4106 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
4107}
4108
4109
4110/**
4111 * @opmaps grp1_81
4112 * @opcode /3
4113 */
4114FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Iz, uint8_t, bRm)
4115{
4116 IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz");
4117 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, IEM_ACCESS_DATA_RW);
4118 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
4119}
4120
4121
4122/**
4123 * @opmaps grp1_81
4124 * @opcode /4
4125 */
4126FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Iz, uint8_t, bRm)
4127{
4128 IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz");
4129 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, IEM_ACCESS_DATA_RW);
4130 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
4131}
4132
4133
4134/**
4135 * @opmaps grp1_81
4136 * @opcode /5
4137 */
4138FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Iz, uint8_t, bRm)
4139{
4140 IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz");
4141 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, IEM_ACCESS_DATA_RW);
4142 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
4143}
4144
4145
4146/**
4147 * @opmaps grp1_81
4148 * @opcode /6
4149 */
4150FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Iz, uint8_t, bRm)
4151{
4152 IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz");
4153 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, IEM_ACCESS_DATA_RW);
4154 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
4155}
4156
4157
4158/**
4159 * @opmaps grp1_81
4160 * @opcode /7
4161 */
4162FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Iz, uint8_t, bRm)
4163{
4164 IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz");
4165 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, IEM_ACCESS_DATA_R);
4166 IEMOP_BODY_BINARY_Ev_Iz_NO_LOCK();
4167}
4168
4169
4170/**
4171 * @opcode 0x81
4172 */
4173FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
4174{
4175 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4176 switch (IEM_GET_MODRM_REG_8(bRm))
4177 {
4178 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Iz, bRm);
4179 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Iz, bRm);
4180 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Iz, bRm);
4181 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Iz, bRm);
4182 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Iz, bRm);
4183 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Iz, bRm);
4184 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Iz, bRm);
4185 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Iz, bRm);
4186 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4187 }
4188}
4189
4190
4191/**
4192 * @opcode 0x82
4193 * @opmnemonic grp1_82
4194 * @opgroup og_groups
4195 */
4196FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
4197{
4198 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
4199 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
4200}
4201
4202
4203/**
4204 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
4205 * iemOp_Grp1_Ev_Ib.
4206 */
4207#define IEMOP_BODY_BINARY_Ev_Ib(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fRW) \
4208 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4209 { \
4210 /* \
4211 * Register target \
4212 */ \
4213 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4214 switch (pVCpu->iem.s.enmEffOpSize) \
4215 { \
4216 case IEMMODE_16BIT: \
4217 { \
4218 IEM_MC_BEGIN(3, 0); \
4219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4220 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4221 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4222 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4223 \
4224 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4225 IEM_MC_REF_EFLAGS(pEFlags); \
4226 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4227 \
4228 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4229 IEM_MC_END(); \
4230 break; \
4231 } \
4232 \
4233 case IEMMODE_32BIT: \
4234 { \
4235 IEM_MC_BEGIN(3, 0); \
4236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4237 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4238 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4239 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4240 \
4241 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4242 IEM_MC_REF_EFLAGS(pEFlags); \
4243 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4244 if ((a_fRW) != IEM_ACCESS_DATA_R) \
4245 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
4246 \
4247 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4248 IEM_MC_END(); \
4249 break; \
4250 } \
4251 \
4252 case IEMMODE_64BIT: \
4253 { \
4254 IEM_MC_BEGIN(3, 0); \
4255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4256 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4257 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4258 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4259 \
4260 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4261 IEM_MC_REF_EFLAGS(pEFlags); \
4262 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4263 \
4264 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4265 IEM_MC_END(); \
4266 break; \
4267 } \
4268 \
4269 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4270 } \
4271 } \
4272 else \
4273 { \
4274 /* \
4275 * Memory target. \
4276 */ \
4277 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4278 { \
4279 switch (pVCpu->iem.s.enmEffOpSize) \
4280 { \
4281 case IEMMODE_16BIT: \
4282 { \
4283 IEM_MC_BEGIN(3, 2); \
4284 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4285 IEM_MC_ARG(uint16_t, u16Src, 1); \
4286 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4287 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4288 \
4289 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4290 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4291 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm); \
4292 IEMOP_HLP_DONE_DECODING(); \
4293 IEM_MC_MEM_MAP(pu16Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4294 IEM_MC_FETCH_EFLAGS(EFlags); \
4295 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4296 \
4297 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, a_fRW); \
4298 IEM_MC_COMMIT_EFLAGS(EFlags); \
4299 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4300 IEM_MC_END(); \
4301 break; \
4302 } \
4303 \
4304 case IEMMODE_32BIT: \
4305 { \
4306 IEM_MC_BEGIN(3, 2); \
4307 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4308 IEM_MC_ARG(uint32_t, u32Src, 1); \
4309 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4310 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4311 \
4312 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4313 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4314 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm); \
4315 IEMOP_HLP_DONE_DECODING(); \
4316 IEM_MC_MEM_MAP(pu32Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4317 IEM_MC_FETCH_EFLAGS(EFlags); \
4318 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4319 \
4320 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, a_fRW); \
4321 IEM_MC_COMMIT_EFLAGS(EFlags); \
4322 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4323 IEM_MC_END(); \
4324 break; \
4325 } \
4326 \
4327 case IEMMODE_64BIT: \
4328 { \
4329 IEM_MC_BEGIN(3, 2); \
4330 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4331 IEM_MC_ARG(uint64_t, u64Src, 1); \
4332 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4333 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4334 \
4335 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4336 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4337 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm); \
4338 IEMOP_HLP_DONE_DECODING(); \
4339 IEM_MC_MEM_MAP(pu64Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4340 IEM_MC_FETCH_EFLAGS(EFlags); \
4341 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4342 \
4343 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, a_fRW); \
4344 IEM_MC_COMMIT_EFLAGS(EFlags); \
4345 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4346 IEM_MC_END(); \
4347 break; \
4348 } \
4349 \
4350 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4351 } \
4352 } \
4353 else \
4354 { \
4355 (void)0
4356
4357#define IEMOP_BODY_BINARY_Ev_Ib_NO_LOCK() \
4358 IEMOP_HLP_DONE_DECODING(); \
4359 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4360 } \
4361 } \
4362 (void)0
4363
4364#define IEMOP_BODY_BINARY_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4365 switch (pVCpu->iem.s.enmEffOpSize) \
4366 { \
4367 case IEMMODE_16BIT: \
4368 { \
4369 IEM_MC_BEGIN(3, 2); \
4370 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4371 IEM_MC_ARG(uint16_t, u16Src, 1); \
4372 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4373 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4374 \
4375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4376 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4377 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm); \
4378 IEMOP_HLP_DONE_DECODING(); \
4379 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4380 IEM_MC_FETCH_EFLAGS(EFlags); \
4381 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4382 \
4383 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
4384 IEM_MC_COMMIT_EFLAGS(EFlags); \
4385 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4386 IEM_MC_END(); \
4387 break; \
4388 } \
4389 \
4390 case IEMMODE_32BIT: \
4391 { \
4392 IEM_MC_BEGIN(3, 2); \
4393 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4394 IEM_MC_ARG(uint32_t, u32Src, 1); \
4395 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4397 \
4398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4399 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4400 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm); \
4401 IEMOP_HLP_DONE_DECODING(); \
4402 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4403 IEM_MC_FETCH_EFLAGS(EFlags); \
4404 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4405 \
4406 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); \
4407 IEM_MC_COMMIT_EFLAGS(EFlags); \
4408 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4409 IEM_MC_END(); \
4410 break; \
4411 } \
4412 \
4413 case IEMMODE_64BIT: \
4414 { \
4415 IEM_MC_BEGIN(3, 2); \
4416 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4417 IEM_MC_ARG(uint64_t, u64Src, 1); \
4418 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4419 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4420 \
4421 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4422 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4423 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm); \
4424 IEMOP_HLP_DONE_DECODING(); \
4425 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4426 IEM_MC_FETCH_EFLAGS(EFlags); \
4427 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4428 \
4429 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
4430 IEM_MC_COMMIT_EFLAGS(EFlags); \
4431 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4432 IEM_MC_END(); \
4433 break; \
4434 } \
4435 \
4436 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4437 } \
4438 } \
4439 } \
4440 (void)0
4441
4442/**
4443 * @opmaps grp1_83
4444 * @opcode /0
4445 */
4446FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Ib, uint8_t, bRm)
4447{
4448 IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib");
4449 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, IEM_ACCESS_DATA_RW);
4450 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4451}
4452
4453
4454/**
4455 * @opmaps grp1_83
4456 * @opcode /1
4457 */
4458FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Ib, uint8_t, bRm)
4459{
4460 IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib");
4461 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, IEM_ACCESS_DATA_RW);
4462 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4463}
4464
4465
4466/**
4467 * @opmaps grp1_83
4468 * @opcode /2
4469 */
4470FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Ib, uint8_t, bRm)
4471{
4472 IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib");
4473 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, IEM_ACCESS_DATA_RW);
4474 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
4475}
4476
4477
4478/**
4479 * @opmaps grp1_83
4480 * @opcode /3
4481 */
4482FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Ib, uint8_t, bRm)
4483{
4484 IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib");
4485 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, IEM_ACCESS_DATA_RW);
4486 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
4487}
4488
4489
4490/**
4491 * @opmaps grp1_83
4492 * @opcode /4
4493 */
4494FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Ib, uint8_t, bRm)
4495{
4496 IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib");
4497 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, IEM_ACCESS_DATA_RW);
4498 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
4499}
4500
4501
4502/**
4503 * @opmaps grp1_83
4504 * @opcode /5
4505 */
4506FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Ib, uint8_t, bRm)
4507{
4508 IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib");
4509 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, IEM_ACCESS_DATA_RW);
4510 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
4511}
4512
4513
4514/**
4515 * @opmaps grp1_83
4516 * @opcode /6
4517 */
4518FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Ib, uint8_t, bRm)
4519{
4520 IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib");
4521 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, IEM_ACCESS_DATA_RW);
4522 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
4523}
4524
4525
4526/**
4527 * @opmaps grp1_83
4528 * @opcode /7
4529 */
4530FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Ib, uint8_t, bRm)
4531{
4532 IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib");
4533 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, IEM_ACCESS_DATA_R);
4534 IEMOP_BODY_BINARY_Ev_Ib_NO_LOCK();
4535}
4536
4537
4538/**
4539 * @opcode 0x83
4540 */
4541FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
4542{
4543 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
4544 to the 386 even if absent in the intel reference manuals and some
4545 3rd party opcode listings. */
4546 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4547 switch (IEM_GET_MODRM_REG_8(bRm))
4548 {
4549 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Ib, bRm);
4550 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Ib, bRm);
4551 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Ib, bRm);
4552 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Ib, bRm);
4553 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Ib, bRm);
4554 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Ib, bRm);
4555 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Ib, bRm);
4556 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Ib, bRm);
4557 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4558 }
4559}
4560
4561
4562/**
4563 * @opcode 0x84
4564 */
4565FNIEMOP_DEF(iemOp_test_Eb_Gb)
4566{
4567 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
4568 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
4569 IEMOP_BODY_BINARY_rm_r8(iemAImpl_test_u8, IEM_ACCESS_DATA_R);
4570 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
4571}
4572
4573
4574/**
4575 * @opcode 0x85
4576 */
4577FNIEMOP_DEF(iemOp_test_Ev_Gv)
4578{
4579 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
4580 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
4581 IEMOP_BODY_BINARY_rm_rv(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, IEM_ACCESS_DATA_R);
4582 IEMOP_BODY_BINARY_rm_rv_NO_LOCK();
4583}
4584
4585
4586/**
4587 * @opcode 0x86
4588 */
4589FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
4590{
4591 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4592 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
4593
4594 /*
4595 * If rm is denoting a register, no more instruction bytes.
4596 */
4597 if (IEM_IS_MODRM_REG_MODE(bRm))
4598 {
4599 IEM_MC_BEGIN(0, 2);
4600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4601 IEM_MC_LOCAL(uint8_t, uTmp1);
4602 IEM_MC_LOCAL(uint8_t, uTmp2);
4603
4604 IEM_MC_FETCH_GREG_U8(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
4605 IEM_MC_FETCH_GREG_U8(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
4606 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
4607 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
4608
4609 IEM_MC_ADVANCE_RIP_AND_FINISH();
4610 IEM_MC_END();
4611 }
4612 else
4613 {
4614 /*
4615 * We're accessing memory.
4616 */
4617/** @todo the register must be committed separately! */
4618 IEM_MC_BEGIN(2, 2);
4619 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
4620 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
4621 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4622
4623 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4625 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
4626 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
4627 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
4628 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_locked, pu8Mem, pu8Reg);
4629 else
4630 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_unlocked, pu8Mem, pu8Reg);
4631 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
4632
4633 IEM_MC_ADVANCE_RIP_AND_FINISH();
4634 IEM_MC_END();
4635 }
4636}
4637
4638
4639/**
4640 * @opcode 0x87
4641 */
4642FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
4643{
4644 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
4645 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4646
4647 /*
4648 * If rm is denoting a register, no more instruction bytes.
4649 */
4650 if (IEM_IS_MODRM_REG_MODE(bRm))
4651 {
4652 switch (pVCpu->iem.s.enmEffOpSize)
4653 {
4654 case IEMMODE_16BIT:
4655 IEM_MC_BEGIN(0, 2);
4656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4657 IEM_MC_LOCAL(uint16_t, uTmp1);
4658 IEM_MC_LOCAL(uint16_t, uTmp2);
4659
4660 IEM_MC_FETCH_GREG_U16(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
4661 IEM_MC_FETCH_GREG_U16(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
4662 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
4663 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
4664
4665 IEM_MC_ADVANCE_RIP_AND_FINISH();
4666 IEM_MC_END();
4667 break;
4668
4669 case IEMMODE_32BIT:
4670 IEM_MC_BEGIN(0, 2);
4671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4672 IEM_MC_LOCAL(uint32_t, uTmp1);
4673 IEM_MC_LOCAL(uint32_t, uTmp2);
4674
4675 IEM_MC_FETCH_GREG_U32(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
4676 IEM_MC_FETCH_GREG_U32(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
4677 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
4678 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
4679
4680 IEM_MC_ADVANCE_RIP_AND_FINISH();
4681 IEM_MC_END();
4682 break;
4683
4684 case IEMMODE_64BIT:
4685 IEM_MC_BEGIN(0, 2);
4686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4687 IEM_MC_LOCAL(uint64_t, uTmp1);
4688 IEM_MC_LOCAL(uint64_t, uTmp2);
4689
4690 IEM_MC_FETCH_GREG_U64(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
4691 IEM_MC_FETCH_GREG_U64(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
4692 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
4693 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
4694
4695 IEM_MC_ADVANCE_RIP_AND_FINISH();
4696 IEM_MC_END();
4697 break;
4698
4699 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4700 }
4701 }
4702 else
4703 {
4704 /*
4705 * We're accessing memory.
4706 */
4707 switch (pVCpu->iem.s.enmEffOpSize)
4708 {
4709/** @todo the register must be committed separately! */
4710 case IEMMODE_16BIT:
4711 IEM_MC_BEGIN(2, 2);
4712 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
4713 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
4714 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4715
4716 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4718 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
4719 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
4720 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
4721 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_locked, pu16Mem, pu16Reg);
4722 else
4723 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_unlocked, pu16Mem, pu16Reg);
4724 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
4725
4726 IEM_MC_ADVANCE_RIP_AND_FINISH();
4727 IEM_MC_END();
4728 break;
4729
4730 case IEMMODE_32BIT:
4731 IEM_MC_BEGIN(2, 2);
4732 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
4733 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
4734 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4735
4736 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4738 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
4739 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
4740 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
4741 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_locked, pu32Mem, pu32Reg);
4742 else
4743 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_unlocked, pu32Mem, pu32Reg);
4744 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
4745
4746 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
4747 IEM_MC_ADVANCE_RIP_AND_FINISH();
4748 IEM_MC_END();
4749 break;
4750
4751 case IEMMODE_64BIT:
4752 IEM_MC_BEGIN(2, 2);
4753 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
4754 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
4755 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4756
4757 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4759 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
4760 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
4761 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
4762 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_locked, pu64Mem, pu64Reg);
4763 else
4764 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_unlocked, pu64Mem, pu64Reg);
4765 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
4766
4767 IEM_MC_ADVANCE_RIP_AND_FINISH();
4768 IEM_MC_END();
4769 break;
4770
4771 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4772 }
4773 }
4774}
4775
4776
4777/**
4778 * @opcode 0x88
4779 */
4780FNIEMOP_DEF(iemOp_mov_Eb_Gb)
4781{
4782 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
4783
4784 uint8_t bRm;
4785 IEM_OPCODE_GET_NEXT_U8(&bRm);
4786
4787 /*
4788 * If rm is denoting a register, no more instruction bytes.
4789 */
4790 if (IEM_IS_MODRM_REG_MODE(bRm))
4791 {
4792 IEM_MC_BEGIN(0, 1);
4793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4794 IEM_MC_LOCAL(uint8_t, u8Value);
4795 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4796 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Value);
4797 IEM_MC_ADVANCE_RIP_AND_FINISH();
4798 IEM_MC_END();
4799 }
4800 else
4801 {
4802 /*
4803 * We're writing a register to memory.
4804 */
4805 IEM_MC_BEGIN(0, 2);
4806 IEM_MC_LOCAL(uint8_t, u8Value);
4807 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4808 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4810 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4811 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
4812 IEM_MC_ADVANCE_RIP_AND_FINISH();
4813 IEM_MC_END();
4814 }
4815}
4816
4817
4818/**
4819 * @opcode 0x89
4820 */
4821FNIEMOP_DEF(iemOp_mov_Ev_Gv)
4822{
4823 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
4824
4825 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4826
4827 /*
4828 * If rm is denoting a register, no more instruction bytes.
4829 */
4830 if (IEM_IS_MODRM_REG_MODE(bRm))
4831 {
4832 switch (pVCpu->iem.s.enmEffOpSize)
4833 {
4834 case IEMMODE_16BIT:
4835 IEM_MC_BEGIN(0, 1);
4836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4837 IEM_MC_LOCAL(uint16_t, u16Value);
4838 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4839 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
4840 IEM_MC_ADVANCE_RIP_AND_FINISH();
4841 IEM_MC_END();
4842 break;
4843
4844 case IEMMODE_32BIT:
4845 IEM_MC_BEGIN(0, 1);
4846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4847 IEM_MC_LOCAL(uint32_t, u32Value);
4848 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4849 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
4850 IEM_MC_ADVANCE_RIP_AND_FINISH();
4851 IEM_MC_END();
4852 break;
4853
4854 case IEMMODE_64BIT:
4855 IEM_MC_BEGIN(0, 1);
4856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4857 IEM_MC_LOCAL(uint64_t, u64Value);
4858 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4859 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
4860 IEM_MC_ADVANCE_RIP_AND_FINISH();
4861 IEM_MC_END();
4862 break;
4863
4864 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4865 }
4866 }
4867 else
4868 {
4869 /*
4870 * We're writing a register to memory.
4871 */
4872 switch (pVCpu->iem.s.enmEffOpSize)
4873 {
4874 case IEMMODE_16BIT:
4875 IEM_MC_BEGIN(0, 2);
4876 IEM_MC_LOCAL(uint16_t, u16Value);
4877 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4878 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4879 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4880 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4881 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
4882 IEM_MC_ADVANCE_RIP_AND_FINISH();
4883 IEM_MC_END();
4884 break;
4885
4886 case IEMMODE_32BIT:
4887 IEM_MC_BEGIN(0, 2);
4888 IEM_MC_LOCAL(uint32_t, u32Value);
4889 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4890 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4891 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4892 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4893 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
4894 IEM_MC_ADVANCE_RIP_AND_FINISH();
4895 IEM_MC_END();
4896 break;
4897
4898 case IEMMODE_64BIT:
4899 IEM_MC_BEGIN(0, 2);
4900 IEM_MC_LOCAL(uint64_t, u64Value);
4901 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4902 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4904 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4905 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
4906 IEM_MC_ADVANCE_RIP_AND_FINISH();
4907 IEM_MC_END();
4908 break;
4909
4910 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4911 }
4912 }
4913}
4914
4915
4916/**
4917 * @opcode 0x8a
4918 */
4919FNIEMOP_DEF(iemOp_mov_Gb_Eb)
4920{
4921 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
4922
4923 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4924
4925 /*
4926 * If rm is denoting a register, no more instruction bytes.
4927 */
4928 if (IEM_IS_MODRM_REG_MODE(bRm))
4929 {
4930 IEM_MC_BEGIN(0, 1);
4931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4932 IEM_MC_LOCAL(uint8_t, u8Value);
4933 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4934 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
4935 IEM_MC_ADVANCE_RIP_AND_FINISH();
4936 IEM_MC_END();
4937 }
4938 else
4939 {
4940 /*
4941 * We're loading a register from memory.
4942 */
4943 IEM_MC_BEGIN(0, 2);
4944 IEM_MC_LOCAL(uint8_t, u8Value);
4945 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4946 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4948 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4949 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
4950 IEM_MC_ADVANCE_RIP_AND_FINISH();
4951 IEM_MC_END();
4952 }
4953}
4954
4955
4956/**
4957 * @opcode 0x8b
4958 */
4959FNIEMOP_DEF(iemOp_mov_Gv_Ev)
4960{
4961 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
4962
4963 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4964
4965 /*
4966 * If rm is denoting a register, no more instruction bytes.
4967 */
4968 if (IEM_IS_MODRM_REG_MODE(bRm))
4969 {
4970 switch (pVCpu->iem.s.enmEffOpSize)
4971 {
4972 case IEMMODE_16BIT:
4973 IEM_MC_BEGIN(0, 1);
4974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4975 IEM_MC_LOCAL(uint16_t, u16Value);
4976 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4977 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
4978 IEM_MC_ADVANCE_RIP_AND_FINISH();
4979 IEM_MC_END();
4980 break;
4981
4982 case IEMMODE_32BIT:
4983 IEM_MC_BEGIN(0, 1);
4984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4985 IEM_MC_LOCAL(uint32_t, u32Value);
4986 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4987 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
4988 IEM_MC_ADVANCE_RIP_AND_FINISH();
4989 IEM_MC_END();
4990 break;
4991
4992 case IEMMODE_64BIT:
4993 IEM_MC_BEGIN(0, 1);
4994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4995 IEM_MC_LOCAL(uint64_t, u64Value);
4996 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4997 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
4998 IEM_MC_ADVANCE_RIP_AND_FINISH();
4999 IEM_MC_END();
5000 break;
5001
5002 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5003 }
5004 }
5005 else
5006 {
5007 /*
5008 * We're loading a register from memory.
5009 */
5010 switch (pVCpu->iem.s.enmEffOpSize)
5011 {
5012 case IEMMODE_16BIT:
5013 IEM_MC_BEGIN(0, 2);
5014 IEM_MC_LOCAL(uint16_t, u16Value);
5015 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5016 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5018 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5019 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5020 IEM_MC_ADVANCE_RIP_AND_FINISH();
5021 IEM_MC_END();
5022 break;
5023
5024 case IEMMODE_32BIT:
5025 IEM_MC_BEGIN(0, 2);
5026 IEM_MC_LOCAL(uint32_t, u32Value);
5027 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5030 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5031 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5032 IEM_MC_ADVANCE_RIP_AND_FINISH();
5033 IEM_MC_END();
5034 break;
5035
5036 case IEMMODE_64BIT:
5037 IEM_MC_BEGIN(0, 2);
5038 IEM_MC_LOCAL(uint64_t, u64Value);
5039 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5040 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5042 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5043 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5044 IEM_MC_ADVANCE_RIP_AND_FINISH();
5045 IEM_MC_END();
5046 break;
5047
5048 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5049 }
5050 }
5051}
5052
5053
5054/**
5055 * opcode 0x63
5056 * @todo Table fixme
5057 */
5058FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
5059{
5060 if (!IEM_IS_64BIT_CODE(pVCpu))
5061 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
5062 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
5063 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
5064 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
5065}
5066
5067
5068/**
5069 * @opcode 0x8c
5070 */
5071FNIEMOP_DEF(iemOp_mov_Ev_Sw)
5072{
5073 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
5074
5075 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5076
5077 /*
5078 * Check that the destination register exists. The REX.R prefix is ignored.
5079 */
5080 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5081 if (iSegReg > X86_SREG_GS)
5082 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5083
5084 /*
5085 * If rm is denoting a register, no more instruction bytes.
5086 * In that case, the operand size is respected and the upper bits are
5087 * cleared (starting with some pentium).
5088 */
5089 if (IEM_IS_MODRM_REG_MODE(bRm))
5090 {
5091 switch (pVCpu->iem.s.enmEffOpSize)
5092 {
5093 case IEMMODE_16BIT:
5094 IEM_MC_BEGIN(0, 1);
5095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5096 IEM_MC_LOCAL(uint16_t, u16Value);
5097 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5098 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5099 IEM_MC_ADVANCE_RIP_AND_FINISH();
5100 IEM_MC_END();
5101 break;
5102
5103 case IEMMODE_32BIT:
5104 IEM_MC_BEGIN(0, 1);
5105 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5106 IEM_MC_LOCAL(uint32_t, u32Value);
5107 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
5108 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5109 IEM_MC_ADVANCE_RIP_AND_FINISH();
5110 IEM_MC_END();
5111 break;
5112
5113 case IEMMODE_64BIT:
5114 IEM_MC_BEGIN(0, 1);
5115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5116 IEM_MC_LOCAL(uint64_t, u64Value);
5117 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
5118 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5119 IEM_MC_ADVANCE_RIP_AND_FINISH();
5120 IEM_MC_END();
5121 break;
5122
5123 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5124 }
5125 }
5126 else
5127 {
5128 /*
5129 * We're saving the register to memory. The access is word sized
5130 * regardless of operand size prefixes.
5131 */
5132#if 0 /* not necessary */
5133 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5134#endif
5135 IEM_MC_BEGIN(0, 2);
5136 IEM_MC_LOCAL(uint16_t, u16Value);
5137 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5138 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5139 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5140 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5141 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5142 IEM_MC_ADVANCE_RIP_AND_FINISH();
5143 IEM_MC_END();
5144 }
5145}
5146
5147
5148
5149
5150/**
5151 * @opcode 0x8d
5152 */
5153FNIEMOP_DEF(iemOp_lea_Gv_M)
5154{
5155 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
5156 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5157 if (IEM_IS_MODRM_REG_MODE(bRm))
5158 IEMOP_RAISE_INVALID_OPCODE_RET(); /* no register form */
5159
5160 switch (pVCpu->iem.s.enmEffOpSize)
5161 {
5162 case IEMMODE_16BIT:
5163 IEM_MC_BEGIN(0, 2);
5164 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5165 IEM_MC_LOCAL(uint16_t, u16Cast);
5166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5168 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
5169 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Cast);
5170 IEM_MC_ADVANCE_RIP_AND_FINISH();
5171 IEM_MC_END();
5172 break;
5173
5174 case IEMMODE_32BIT:
5175 IEM_MC_BEGIN(0, 2);
5176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5177 IEM_MC_LOCAL(uint32_t, u32Cast);
5178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5180 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
5181 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Cast);
5182 IEM_MC_ADVANCE_RIP_AND_FINISH();
5183 IEM_MC_END();
5184 break;
5185
5186 case IEMMODE_64BIT:
5187 IEM_MC_BEGIN(0, 1);
5188 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5190 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5191 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), GCPtrEffSrc);
5192 IEM_MC_ADVANCE_RIP_AND_FINISH();
5193 IEM_MC_END();
5194 break;
5195
5196 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5197 }
5198}
5199
5200
5201/**
5202 * @opcode 0x8e
5203 */
5204FNIEMOP_DEF(iemOp_mov_Sw_Ev)
5205{
5206 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
5207
5208 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5209
5210 /*
5211 * The practical operand size is 16-bit.
5212 */
5213#if 0 /* not necessary */
5214 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5215#endif
5216
5217 /*
5218 * Check that the destination register exists and can be used with this
5219 * instruction. The REX.R prefix is ignored.
5220 */
5221 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5222 /** @todo r=bird: What does 8086 do here wrt CS? */
5223 if ( iSegReg == X86_SREG_CS
5224 || iSegReg > X86_SREG_GS)
5225 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5226
5227 /*
5228 * If rm is denoting a register, no more instruction bytes.
5229 */
5230 if (IEM_IS_MODRM_REG_MODE(bRm))
5231 {
5232 IEM_MC_BEGIN(2, 0);
5233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5234 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
5235 IEM_MC_ARG(uint16_t, u16Value, 1);
5236 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5237 if (iSRegArg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5238 IEM_MC_CALL_CIMPL_2( 0, iemCImpl_load_SReg, iSRegArg, u16Value);
5239 else
5240 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE, iemCImpl_load_SReg, iSRegArg, u16Value);
5241 IEM_MC_END();
5242 }
5243 else
5244 {
5245 /*
5246 * We're loading the register from memory. The access is word sized
5247 * regardless of operand size prefixes.
5248 */
5249 IEM_MC_BEGIN(2, 1);
5250 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
5251 IEM_MC_ARG(uint16_t, u16Value, 1);
5252 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5253 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5255 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5256 if (iSRegArg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5257 IEM_MC_CALL_CIMPL_2( 0, iemCImpl_load_SReg, iSRegArg, u16Value);
5258 else
5259 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE, iemCImpl_load_SReg, iSRegArg, u16Value);
5260 IEM_MC_END();
5261 }
5262}
5263
5264
5265/** Opcode 0x8f /0. */
5266FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
5267{
5268 /* This bugger is rather annoying as it requires rSP to be updated before
5269 doing the effective address calculations. Will eventually require a
5270 split between the R/M+SIB decoding and the effective address
5271 calculation - which is something that is required for any attempt at
5272 reusing this code for a recompiler. It may also be good to have if we
5273 need to delay #UD exception caused by invalid lock prefixes.
5274
5275 For now, we'll do a mostly safe interpreter-only implementation here. */
5276 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
5277 * now until tests show it's checked.. */
5278 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
5279
5280 /* Register access is relatively easy and can share code. */
5281 if (IEM_IS_MODRM_REG_MODE(bRm))
5282 return FNIEMOP_CALL_1(iemOpCommonPopGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
5283
5284 /*
5285 * Memory target.
5286 *
5287 * Intel says that RSP is incremented before it's used in any effective
5288 * address calcuations. This means some serious extra annoyance here since
5289 * we decode and calculate the effective address in one step and like to
5290 * delay committing registers till everything is done.
5291 *
5292 * So, we'll decode and calculate the effective address twice. This will
5293 * require some recoding if turned into a recompiler.
5294 */
5295 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
5296
5297#if 1 /* This can be compiled, optimize later if needed. */
5298 switch (pVCpu->iem.s.enmEffOpSize)
5299 {
5300 case IEMMODE_16BIT:
5301 {
5302 IEM_MC_BEGIN(2, 0);
5303 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5304 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5305 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2 << 8);
5306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5307 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5308 IEM_MC_CALL_CIMPL_2(0, iemCImpl_pop_mem16, iEffSeg, GCPtrEffDst);
5309 IEM_MC_END();
5310 }
5311
5312 case IEMMODE_32BIT:
5313 {
5314 IEM_MC_BEGIN(2, 0);
5315 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5316 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5317 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4 << 8);
5318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5319 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5320 IEM_MC_CALL_CIMPL_2(0, iemCImpl_pop_mem32, iEffSeg, GCPtrEffDst);
5321 IEM_MC_END();
5322 }
5323
5324 case IEMMODE_64BIT:
5325 {
5326 IEM_MC_BEGIN(2, 0);
5327 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5328 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5329 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 8 << 8);
5330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5331 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5332 IEM_MC_CALL_CIMPL_2(0, iemCImpl_pop_mem64, iEffSeg, GCPtrEffDst);
5333 IEM_MC_END();
5334 }
5335
5336 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5337 }
5338
5339#else
5340# ifndef TST_IEM_CHECK_MC
5341 /* Calc effective address with modified ESP. */
5342/** @todo testcase */
5343 RTGCPTR GCPtrEff;
5344 VBOXSTRICTRC rcStrict;
5345 switch (pVCpu->iem.s.enmEffOpSize)
5346 {
5347 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 2 << 8, &GCPtrEff); break;
5348 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 4 << 8, &GCPtrEff); break;
5349 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 8 << 8, &GCPtrEff); break;
5350 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5351 }
5352 if (rcStrict != VINF_SUCCESS)
5353 return rcStrict;
5354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5355
5356 /* Perform the operation - this should be CImpl. */
5357 RTUINT64U TmpRsp;
5358 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
5359 switch (pVCpu->iem.s.enmEffOpSize)
5360 {
5361 case IEMMODE_16BIT:
5362 {
5363 uint16_t u16Value;
5364 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
5365 if (rcStrict == VINF_SUCCESS)
5366 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
5367 break;
5368 }
5369
5370 case IEMMODE_32BIT:
5371 {
5372 uint32_t u32Value;
5373 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
5374 if (rcStrict == VINF_SUCCESS)
5375 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
5376 break;
5377 }
5378
5379 case IEMMODE_64BIT:
5380 {
5381 uint64_t u64Value;
5382 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
5383 if (rcStrict == VINF_SUCCESS)
5384 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
5385 break;
5386 }
5387
5388 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5389 }
5390 if (rcStrict == VINF_SUCCESS)
5391 {
5392 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
5393 return iemRegUpdateRipAndFinishClearingRF(pVCpu);
5394 }
5395 return rcStrict;
5396
5397# else
5398 return VERR_IEM_IPE_2;
5399# endif
5400#endif
5401}
5402
5403
5404/**
5405 * @opcode 0x8f
5406 */
5407FNIEMOP_DEF(iemOp_Grp1A__xop)
5408{
5409 /*
5410 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
5411 * three byte VEX prefix, except that the mmmmm field cannot have the values
5412 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
5413 */
5414 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5415 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
5416 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
5417
5418 IEMOP_MNEMONIC(xop, "xop");
5419 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
5420 {
5421 /** @todo Test when exctly the XOP conformance checks kick in during
5422 * instruction decoding and fetching (using \#PF). */
5423 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
5424 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
5425 if ( ( pVCpu->iem.s.fPrefixes
5426 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
5427 == 0)
5428 {
5429 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
5430 if ((bXop2 & 0x80 /* XOP.W */) && IEM_IS_64BIT_CODE(pVCpu))
5431 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
5432 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
5433 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
5434 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
5435 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
5436 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
5437 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
5438
5439 /** @todo XOP: Just use new tables and decoders. */
5440 switch (bRm & 0x1f)
5441 {
5442 case 8: /* xop opcode map 8. */
5443 IEMOP_BITCH_ABOUT_STUB();
5444 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5445
5446 case 9: /* xop opcode map 9. */
5447 IEMOP_BITCH_ABOUT_STUB();
5448 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5449
5450 case 10: /* xop opcode map 10. */
5451 IEMOP_BITCH_ABOUT_STUB();
5452 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5453
5454 default:
5455 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
5456 IEMOP_RAISE_INVALID_OPCODE_RET();
5457 }
5458 }
5459 else
5460 Log(("XOP: Invalid prefix mix!\n"));
5461 }
5462 else
5463 Log(("XOP: XOP support disabled!\n"));
5464 IEMOP_RAISE_INVALID_OPCODE_RET();
5465}
5466
5467
5468/**
5469 * Common 'xchg reg,rAX' helper.
5470 */
5471FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
5472{
5473 iReg |= pVCpu->iem.s.uRexB;
5474 switch (pVCpu->iem.s.enmEffOpSize)
5475 {
5476 case IEMMODE_16BIT:
5477 IEM_MC_BEGIN(0, 2);
5478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5479 IEM_MC_LOCAL(uint16_t, u16Tmp1);
5480 IEM_MC_LOCAL(uint16_t, u16Tmp2);
5481 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
5482 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
5483 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
5484 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
5485 IEM_MC_ADVANCE_RIP_AND_FINISH();
5486 IEM_MC_END();
5487 break;
5488
5489 case IEMMODE_32BIT:
5490 IEM_MC_BEGIN(0, 2);
5491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5492 IEM_MC_LOCAL(uint32_t, u32Tmp1);
5493 IEM_MC_LOCAL(uint32_t, u32Tmp2);
5494 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
5495 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
5496 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
5497 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
5498 IEM_MC_ADVANCE_RIP_AND_FINISH();
5499 IEM_MC_END();
5500 break;
5501
5502 case IEMMODE_64BIT:
5503 IEM_MC_BEGIN(0, 2);
5504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5505 IEM_MC_LOCAL(uint64_t, u64Tmp1);
5506 IEM_MC_LOCAL(uint64_t, u64Tmp2);
5507 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
5508 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
5509 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
5510 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
5511 IEM_MC_ADVANCE_RIP_AND_FINISH();
5512 IEM_MC_END();
5513 break;
5514
5515 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5516 }
5517}
5518
5519
5520/**
5521 * @opcode 0x90
5522 */
5523FNIEMOP_DEF(iemOp_nop)
5524{
5525 /* R8/R8D and RAX/EAX can be exchanged. */
5526 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
5527 {
5528 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
5529 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
5530 }
5531
5532 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
5533 {
5534 IEMOP_MNEMONIC(pause, "pause");
5535 /* ASSUMING that we keep the IEM_F_X86_CTX_IN_GUEST, IEM_F_X86_CTX_VMX
5536 and IEM_F_X86_CTX_SVM in the TB key, we can safely do the following: */
5537 if (!IEM_IS_IN_GUEST(pVCpu))
5538 { /* probable */ }
5539#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
5540 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_VMX)
5541 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmx_pause);
5542#endif
5543#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
5544 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_SVM)
5545 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_svm_pause);
5546#endif
5547 }
5548 else
5549 IEMOP_MNEMONIC(nop, "nop");
5550 /** @todo testcase: lock nop; lock pause */
5551 IEM_MC_BEGIN(0, 0);
5552 IEMOP_HLP_DONE_DECODING();
5553 IEM_MC_ADVANCE_RIP_AND_FINISH();
5554 IEM_MC_END();
5555}
5556
5557
5558/**
5559 * @opcode 0x91
5560 */
5561FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
5562{
5563 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
5564 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
5565}
5566
5567
5568/**
5569 * @opcode 0x92
5570 */
5571FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
5572{
5573 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
5574 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
5575}
5576
5577
5578/**
5579 * @opcode 0x93
5580 */
5581FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
5582{
5583 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
5584 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
5585}
5586
5587
5588/**
5589 * @opcode 0x94
5590 */
5591FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
5592{
5593 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
5594 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
5595}
5596
5597
5598/**
5599 * @opcode 0x95
5600 */
5601FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
5602{
5603 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
5604 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
5605}
5606
5607
5608/**
5609 * @opcode 0x96
5610 */
5611FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
5612{
5613 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
5614 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
5615}
5616
5617
5618/**
5619 * @opcode 0x97
5620 */
5621FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
5622{
5623 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
5624 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
5625}
5626
5627
5628/**
5629 * @opcode 0x98
5630 */
5631FNIEMOP_DEF(iemOp_cbw)
5632{
5633 switch (pVCpu->iem.s.enmEffOpSize)
5634 {
5635 case IEMMODE_16BIT:
5636 IEMOP_MNEMONIC(cbw, "cbw");
5637 IEM_MC_BEGIN(0, 1);
5638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5639 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
5640 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
5641 } IEM_MC_ELSE() {
5642 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
5643 } IEM_MC_ENDIF();
5644 IEM_MC_ADVANCE_RIP_AND_FINISH();
5645 IEM_MC_END();
5646 break;
5647
5648 case IEMMODE_32BIT:
5649 IEMOP_MNEMONIC(cwde, "cwde");
5650 IEM_MC_BEGIN(0, 1);
5651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5652 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
5653 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
5654 } IEM_MC_ELSE() {
5655 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
5656 } IEM_MC_ENDIF();
5657 IEM_MC_ADVANCE_RIP_AND_FINISH();
5658 IEM_MC_END();
5659 break;
5660
5661 case IEMMODE_64BIT:
5662 IEMOP_MNEMONIC(cdqe, "cdqe");
5663 IEM_MC_BEGIN(0, 1);
5664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5665 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
5666 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
5667 } IEM_MC_ELSE() {
5668 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
5669 } IEM_MC_ENDIF();
5670 IEM_MC_ADVANCE_RIP_AND_FINISH();
5671 IEM_MC_END();
5672 break;
5673
5674 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5675 }
5676}
5677
5678
5679/**
5680 * @opcode 0x99
5681 */
5682FNIEMOP_DEF(iemOp_cwd)
5683{
5684 switch (pVCpu->iem.s.enmEffOpSize)
5685 {
5686 case IEMMODE_16BIT:
5687 IEMOP_MNEMONIC(cwd, "cwd");
5688 IEM_MC_BEGIN(0, 1);
5689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5690 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
5691 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
5692 } IEM_MC_ELSE() {
5693 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
5694 } IEM_MC_ENDIF();
5695 IEM_MC_ADVANCE_RIP_AND_FINISH();
5696 IEM_MC_END();
5697 break;
5698
5699 case IEMMODE_32BIT:
5700 IEMOP_MNEMONIC(cdq, "cdq");
5701 IEM_MC_BEGIN(0, 1);
5702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5703 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
5704 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
5705 } IEM_MC_ELSE() {
5706 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
5707 } IEM_MC_ENDIF();
5708 IEM_MC_ADVANCE_RIP_AND_FINISH();
5709 IEM_MC_END();
5710 break;
5711
5712 case IEMMODE_64BIT:
5713 IEMOP_MNEMONIC(cqo, "cqo");
5714 IEM_MC_BEGIN(0, 1);
5715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5716 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
5717 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
5718 } IEM_MC_ELSE() {
5719 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
5720 } IEM_MC_ENDIF();
5721 IEM_MC_ADVANCE_RIP_AND_FINISH();
5722 IEM_MC_END();
5723 break;
5724
5725 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5726 }
5727}
5728
5729
5730/**
5731 * @opcode 0x9a
5732 */
5733FNIEMOP_DEF(iemOp_call_Ap)
5734{
5735 IEMOP_MNEMONIC(call_Ap, "call Ap");
5736 IEMOP_HLP_NO_64BIT();
5737
5738 /* Decode the far pointer address and pass it on to the far call C implementation. */
5739 uint32_t off32Seg;
5740 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
5741 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
5742 else
5743 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
5744 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
5745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5746 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
5747 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
5748 iemCImpl_callf, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
5749}
5750
5751
5752/** Opcode 0x9b. (aka fwait) */
5753FNIEMOP_DEF(iemOp_wait)
5754{
5755 IEMOP_MNEMONIC(wait, "wait");
5756 IEM_MC_BEGIN(0, 0);
5757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5758 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
5759 IEM_MC_MAYBE_RAISE_FPU_XCPT();
5760 IEM_MC_ADVANCE_RIP_AND_FINISH();
5761 IEM_MC_END();
5762}
5763
5764
5765/**
5766 * @opcode 0x9c
5767 */
5768FNIEMOP_DEF(iemOp_pushf_Fv)
5769{
5770 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
5771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5772 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5773 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
5774}
5775
5776
5777/**
5778 * @opcode 0x9d
5779 */
5780FNIEMOP_DEF(iemOp_popf_Fv)
5781{
5782 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
5783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5784 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5785 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE_AND_AFTER,
5786 iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
5787}
5788
5789
5790/**
5791 * @opcode 0x9e
5792 */
5793FNIEMOP_DEF(iemOp_sahf)
5794{
5795 IEMOP_MNEMONIC(sahf, "sahf");
5796 if ( IEM_IS_64BIT_CODE(pVCpu)
5797 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
5798 IEMOP_RAISE_INVALID_OPCODE_RET();
5799 IEM_MC_BEGIN(0, 2);
5800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5801 IEM_MC_LOCAL(uint32_t, u32Flags);
5802 IEM_MC_LOCAL(uint32_t, EFlags);
5803 IEM_MC_FETCH_EFLAGS(EFlags);
5804 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
5805 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
5806 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
5807 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
5808 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
5809 IEM_MC_COMMIT_EFLAGS(EFlags);
5810 IEM_MC_ADVANCE_RIP_AND_FINISH();
5811 IEM_MC_END();
5812}
5813
5814
5815/**
5816 * @opcode 0x9f
5817 */
5818FNIEMOP_DEF(iemOp_lahf)
5819{
5820 IEMOP_MNEMONIC(lahf, "lahf");
5821 if ( IEM_IS_64BIT_CODE(pVCpu)
5822 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
5823 IEMOP_RAISE_INVALID_OPCODE_RET();
5824 IEM_MC_BEGIN(0, 1);
5825 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5826 IEM_MC_LOCAL(uint8_t, u8Flags);
5827 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
5828 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
5829 IEM_MC_ADVANCE_RIP_AND_FINISH();
5830 IEM_MC_END();
5831}
5832
5833
5834/**
5835 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
5836 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode.
5837 * Will return/throw on failures.
5838 * @param a_GCPtrMemOff The variable to store the offset in.
5839 */
5840#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
5841 do \
5842 { \
5843 switch (pVCpu->iem.s.enmEffAddrMode) \
5844 { \
5845 case IEMMODE_16BIT: \
5846 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
5847 break; \
5848 case IEMMODE_32BIT: \
5849 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
5850 break; \
5851 case IEMMODE_64BIT: \
5852 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
5853 break; \
5854 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5855 } \
5856 } while (0)
5857
5858/**
5859 * @opcode 0xa0
5860 */
5861FNIEMOP_DEF(iemOp_mov_AL_Ob)
5862{
5863 /*
5864 * Get the offset.
5865 */
5866 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
5867 RTGCPTR GCPtrMemOff;
5868 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
5869
5870 /*
5871 * Fetch AL.
5872 */
5873 IEM_MC_BEGIN(0,1);
5874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5875 IEM_MC_LOCAL(uint8_t, u8Tmp);
5876 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
5877 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
5878 IEM_MC_ADVANCE_RIP_AND_FINISH();
5879 IEM_MC_END();
5880}
5881
5882
5883/**
5884 * @opcode 0xa1
5885 */
5886FNIEMOP_DEF(iemOp_mov_rAX_Ov)
5887{
5888 /*
5889 * Get the offset.
5890 */
5891 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
5892 RTGCPTR GCPtrMemOff;
5893 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
5894
5895 /*
5896 * Fetch rAX.
5897 */
5898 switch (pVCpu->iem.s.enmEffOpSize)
5899 {
5900 case IEMMODE_16BIT:
5901 IEM_MC_BEGIN(0,1);
5902 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5903 IEM_MC_LOCAL(uint16_t, u16Tmp);
5904 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
5905 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
5906 IEM_MC_ADVANCE_RIP_AND_FINISH();
5907 IEM_MC_END();
5908 break;
5909
5910 case IEMMODE_32BIT:
5911 IEM_MC_BEGIN(0,1);
5912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5913 IEM_MC_LOCAL(uint32_t, u32Tmp);
5914 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
5915 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
5916 IEM_MC_ADVANCE_RIP_AND_FINISH();
5917 IEM_MC_END();
5918 break;
5919
5920 case IEMMODE_64BIT:
5921 IEM_MC_BEGIN(0,1);
5922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5923 IEM_MC_LOCAL(uint64_t, u64Tmp);
5924 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
5925 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
5926 IEM_MC_ADVANCE_RIP_AND_FINISH();
5927 IEM_MC_END();
5928 break;
5929
5930 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5931 }
5932}
5933
5934
5935/**
5936 * @opcode 0xa2
5937 */
5938FNIEMOP_DEF(iemOp_mov_Ob_AL)
5939{
5940 /*
5941 * Get the offset.
5942 */
5943 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
5944 RTGCPTR GCPtrMemOff;
5945 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
5946
5947 /*
5948 * Store AL.
5949 */
5950 IEM_MC_BEGIN(0,1);
5951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5952 IEM_MC_LOCAL(uint8_t, u8Tmp);
5953 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
5954 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
5955 IEM_MC_ADVANCE_RIP_AND_FINISH();
5956 IEM_MC_END();
5957}
5958
5959
5960/**
5961 * @opcode 0xa3
5962 */
5963FNIEMOP_DEF(iemOp_mov_Ov_rAX)
5964{
5965 /*
5966 * Get the offset.
5967 */
5968 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
5969 RTGCPTR GCPtrMemOff;
5970 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
5971
5972 /*
5973 * Store rAX.
5974 */
5975 switch (pVCpu->iem.s.enmEffOpSize)
5976 {
5977 case IEMMODE_16BIT:
5978 IEM_MC_BEGIN(0,1);
5979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5980 IEM_MC_LOCAL(uint16_t, u16Tmp);
5981 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
5982 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
5983 IEM_MC_ADVANCE_RIP_AND_FINISH();
5984 IEM_MC_END();
5985 break;
5986
5987 case IEMMODE_32BIT:
5988 IEM_MC_BEGIN(0,1);
5989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5990 IEM_MC_LOCAL(uint32_t, u32Tmp);
5991 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
5992 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
5993 IEM_MC_ADVANCE_RIP_AND_FINISH();
5994 IEM_MC_END();
5995 break;
5996
5997 case IEMMODE_64BIT:
5998 IEM_MC_BEGIN(0,1);
5999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6000 IEM_MC_LOCAL(uint64_t, u64Tmp);
6001 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
6002 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
6003 IEM_MC_ADVANCE_RIP_AND_FINISH();
6004 IEM_MC_END();
6005 break;
6006
6007 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6008 }
6009}
6010
6011/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
6012#define IEM_MOVS_CASE(ValBits, AddrBits) \
6013 IEM_MC_BEGIN(0, 2); \
6014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6015 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6016 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6017 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6018 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
6019 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6020 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
6021 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6022 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6023 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6024 } IEM_MC_ELSE() { \
6025 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6026 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6027 } IEM_MC_ENDIF(); \
6028 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6029 IEM_MC_END() \
6030
6031/**
6032 * @opcode 0xa4
6033 */
6034FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
6035{
6036 /*
6037 * Use the C implementation if a repeat prefix is encountered.
6038 */
6039 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6040 {
6041 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
6042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6043 switch (pVCpu->iem.s.enmEffAddrMode)
6044 {
6045 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
6046 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
6047 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
6048 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6049 }
6050 }
6051
6052 /*
6053 * Sharing case implementation with movs[wdq] below.
6054 */
6055 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
6056 switch (pVCpu->iem.s.enmEffAddrMode)
6057 {
6058 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
6059 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
6060 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
6061 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6062 }
6063}
6064
6065
6066/**
6067 * @opcode 0xa5
6068 */
6069FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
6070{
6071
6072 /*
6073 * Use the C implementation if a repeat prefix is encountered.
6074 */
6075 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6076 {
6077 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
6078 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6079 switch (pVCpu->iem.s.enmEffOpSize)
6080 {
6081 case IEMMODE_16BIT:
6082 switch (pVCpu->iem.s.enmEffAddrMode)
6083 {
6084 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
6085 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
6086 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
6087 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6088 }
6089 break;
6090 case IEMMODE_32BIT:
6091 switch (pVCpu->iem.s.enmEffAddrMode)
6092 {
6093 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
6094 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
6095 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
6096 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6097 }
6098 case IEMMODE_64BIT:
6099 switch (pVCpu->iem.s.enmEffAddrMode)
6100 {
6101 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
6102 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
6103 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
6104 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6105 }
6106 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6107 }
6108 }
6109
6110 /*
6111 * Annoying double switch here.
6112 * Using ugly macro for implementing the cases, sharing it with movsb.
6113 */
6114 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
6115 switch (pVCpu->iem.s.enmEffOpSize)
6116 {
6117 case IEMMODE_16BIT:
6118 switch (pVCpu->iem.s.enmEffAddrMode)
6119 {
6120 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
6121 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
6122 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
6123 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6124 }
6125 break;
6126
6127 case IEMMODE_32BIT:
6128 switch (pVCpu->iem.s.enmEffAddrMode)
6129 {
6130 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
6131 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
6132 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
6133 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6134 }
6135 break;
6136
6137 case IEMMODE_64BIT:
6138 switch (pVCpu->iem.s.enmEffAddrMode)
6139 {
6140 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6141 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
6142 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
6143 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6144 }
6145 break;
6146 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6147 }
6148}
6149
6150#undef IEM_MOVS_CASE
6151
6152/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
6153#define IEM_CMPS_CASE(ValBits, AddrBits) \
6154 IEM_MC_BEGIN(3, 3); \
6155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6156 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
6157 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
6158 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
6159 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
6160 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6161 \
6162 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6163 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
6164 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6165 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
6166 IEM_MC_REF_LOCAL(puValue1, uValue1); \
6167 IEM_MC_REF_EFLAGS(pEFlags); \
6168 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
6169 \
6170 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6171 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6172 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6173 } IEM_MC_ELSE() { \
6174 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6175 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6176 } IEM_MC_ENDIF(); \
6177 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6178 IEM_MC_END() \
6179
6180/**
6181 * @opcode 0xa6
6182 */
6183FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
6184{
6185
6186 /*
6187 * Use the C implementation if a repeat prefix is encountered.
6188 */
6189 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6190 {
6191 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
6192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6193 switch (pVCpu->iem.s.enmEffAddrMode)
6194 {
6195 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6196 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6197 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6198 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6199 }
6200 }
6201 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6202 {
6203 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
6204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6205 switch (pVCpu->iem.s.enmEffAddrMode)
6206 {
6207 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6208 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6209 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6210 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6211 }
6212 }
6213
6214 /*
6215 * Sharing case implementation with cmps[wdq] below.
6216 */
6217 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
6218 switch (pVCpu->iem.s.enmEffAddrMode)
6219 {
6220 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
6221 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
6222 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
6223 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6224 }
6225}
6226
6227
6228/**
6229 * @opcode 0xa7
6230 */
6231FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
6232{
6233 /*
6234 * Use the C implementation if a repeat prefix is encountered.
6235 */
6236 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6237 {
6238 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
6239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6240 switch (pVCpu->iem.s.enmEffOpSize)
6241 {
6242 case IEMMODE_16BIT:
6243 switch (pVCpu->iem.s.enmEffAddrMode)
6244 {
6245 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
6246 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
6247 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
6248 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6249 }
6250 break;
6251 case IEMMODE_32BIT:
6252 switch (pVCpu->iem.s.enmEffAddrMode)
6253 {
6254 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
6255 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
6256 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
6257 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6258 }
6259 case IEMMODE_64BIT:
6260 switch (pVCpu->iem.s.enmEffAddrMode)
6261 {
6262 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
6263 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
6264 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
6265 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6266 }
6267 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6268 }
6269 }
6270
6271 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6272 {
6273 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
6274 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6275 switch (pVCpu->iem.s.enmEffOpSize)
6276 {
6277 case IEMMODE_16BIT:
6278 switch (pVCpu->iem.s.enmEffAddrMode)
6279 {
6280 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
6281 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
6282 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
6283 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6284 }
6285 break;
6286 case IEMMODE_32BIT:
6287 switch (pVCpu->iem.s.enmEffAddrMode)
6288 {
6289 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
6290 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
6291 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
6292 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6293 }
6294 case IEMMODE_64BIT:
6295 switch (pVCpu->iem.s.enmEffAddrMode)
6296 {
6297 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
6298 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
6299 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
6300 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6301 }
6302 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6303 }
6304 }
6305
6306 /*
6307 * Annoying double switch here.
6308 * Using ugly macro for implementing the cases, sharing it with cmpsb.
6309 */
6310 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
6311 switch (pVCpu->iem.s.enmEffOpSize)
6312 {
6313 case IEMMODE_16BIT:
6314 switch (pVCpu->iem.s.enmEffAddrMode)
6315 {
6316 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
6317 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
6318 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
6319 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6320 }
6321 break;
6322
6323 case IEMMODE_32BIT:
6324 switch (pVCpu->iem.s.enmEffAddrMode)
6325 {
6326 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
6327 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
6328 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
6329 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6330 }
6331 break;
6332
6333 case IEMMODE_64BIT:
6334 switch (pVCpu->iem.s.enmEffAddrMode)
6335 {
6336 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6337 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
6338 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
6339 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6340 }
6341 break;
6342 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6343 }
6344}
6345
6346#undef IEM_CMPS_CASE
6347
6348/**
6349 * @opcode 0xa8
6350 */
6351FNIEMOP_DEF(iemOp_test_AL_Ib)
6352{
6353 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
6354 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6355 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_test_u8);
6356}
6357
6358
6359/**
6360 * @opcode 0xa9
6361 */
6362FNIEMOP_DEF(iemOp_test_eAX_Iz)
6363{
6364 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
6365 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6366 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, 0);
6367}
6368
6369
6370/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
6371#define IEM_STOS_CASE(ValBits, AddrBits) \
6372 IEM_MC_BEGIN(0, 2); \
6373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6374 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6375 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6376 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
6377 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6378 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
6379 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6380 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6381 } IEM_MC_ELSE() { \
6382 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6383 } IEM_MC_ENDIF(); \
6384 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6385 IEM_MC_END() \
6386
6387/**
6388 * @opcode 0xaa
6389 */
6390FNIEMOP_DEF(iemOp_stosb_Yb_AL)
6391{
6392 /*
6393 * Use the C implementation if a repeat prefix is encountered.
6394 */
6395 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6396 {
6397 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
6398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6399 switch (pVCpu->iem.s.enmEffAddrMode)
6400 {
6401 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP, iemCImpl_stos_al_m16);
6402 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP, iemCImpl_stos_al_m32);
6403 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP, iemCImpl_stos_al_m64);
6404 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6405 }
6406 }
6407
6408 /*
6409 * Sharing case implementation with stos[wdq] below.
6410 */
6411 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
6412 switch (pVCpu->iem.s.enmEffAddrMode)
6413 {
6414 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
6415 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
6416 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
6417 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6418 }
6419}
6420
6421
6422/**
6423 * @opcode 0xab
6424 */
6425FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
6426{
6427 /*
6428 * Use the C implementation if a repeat prefix is encountered.
6429 */
6430 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6431 {
6432 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
6433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6434 switch (pVCpu->iem.s.enmEffOpSize)
6435 {
6436 case IEMMODE_16BIT:
6437 switch (pVCpu->iem.s.enmEffAddrMode)
6438 {
6439 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_ax_m16);
6440 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_ax_m32);
6441 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_ax_m64);
6442 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6443 }
6444 break;
6445 case IEMMODE_32BIT:
6446 switch (pVCpu->iem.s.enmEffAddrMode)
6447 {
6448 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_eax_m16);
6449 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_eax_m32);
6450 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_eax_m64);
6451 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6452 }
6453 case IEMMODE_64BIT:
6454 switch (pVCpu->iem.s.enmEffAddrMode)
6455 {
6456 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
6457 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_rax_m32);
6458 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_rax_m64);
6459 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6460 }
6461 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6462 }
6463 }
6464
6465 /*
6466 * Annoying double switch here.
6467 * Using ugly macro for implementing the cases, sharing it with stosb.
6468 */
6469 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
6470 switch (pVCpu->iem.s.enmEffOpSize)
6471 {
6472 case IEMMODE_16BIT:
6473 switch (pVCpu->iem.s.enmEffAddrMode)
6474 {
6475 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
6476 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
6477 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
6478 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6479 }
6480 break;
6481
6482 case IEMMODE_32BIT:
6483 switch (pVCpu->iem.s.enmEffAddrMode)
6484 {
6485 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
6486 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
6487 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
6488 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6489 }
6490 break;
6491
6492 case IEMMODE_64BIT:
6493 switch (pVCpu->iem.s.enmEffAddrMode)
6494 {
6495 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6496 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
6497 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
6498 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6499 }
6500 break;
6501 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6502 }
6503}
6504
6505#undef IEM_STOS_CASE
6506
6507/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
6508#define IEM_LODS_CASE(ValBits, AddrBits) \
6509 IEM_MC_BEGIN(0, 2); \
6510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6511 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6512 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6513 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6514 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
6515 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
6516 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6517 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6518 } IEM_MC_ELSE() { \
6519 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6520 } IEM_MC_ENDIF(); \
6521 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6522 IEM_MC_END() \
6523
6524/**
6525 * @opcode 0xac
6526 */
6527FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
6528{
6529 /*
6530 * Use the C implementation if a repeat prefix is encountered.
6531 */
6532 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6533 {
6534 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
6535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6536 switch (pVCpu->iem.s.enmEffAddrMode)
6537 {
6538 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
6539 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
6540 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
6541 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6542 }
6543 }
6544
6545 /*
6546 * Sharing case implementation with stos[wdq] below.
6547 */
6548 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
6549 switch (pVCpu->iem.s.enmEffAddrMode)
6550 {
6551 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
6552 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
6553 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
6554 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6555 }
6556}
6557
6558
6559/**
6560 * @opcode 0xad
6561 */
6562FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
6563{
6564 /*
6565 * Use the C implementation if a repeat prefix is encountered.
6566 */
6567 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6568 {
6569 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
6570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6571 switch (pVCpu->iem.s.enmEffOpSize)
6572 {
6573 case IEMMODE_16BIT:
6574 switch (pVCpu->iem.s.enmEffAddrMode)
6575 {
6576 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
6577 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
6578 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
6579 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6580 }
6581 break;
6582 case IEMMODE_32BIT:
6583 switch (pVCpu->iem.s.enmEffAddrMode)
6584 {
6585 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
6586 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
6587 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
6588 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6589 }
6590 case IEMMODE_64BIT:
6591 switch (pVCpu->iem.s.enmEffAddrMode)
6592 {
6593 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
6594 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
6595 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
6596 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6597 }
6598 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6599 }
6600 }
6601
6602 /*
6603 * Annoying double switch here.
6604 * Using ugly macro for implementing the cases, sharing it with lodsb.
6605 */
6606 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
6607 switch (pVCpu->iem.s.enmEffOpSize)
6608 {
6609 case IEMMODE_16BIT:
6610 switch (pVCpu->iem.s.enmEffAddrMode)
6611 {
6612 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
6613 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
6614 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
6615 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6616 }
6617 break;
6618
6619 case IEMMODE_32BIT:
6620 switch (pVCpu->iem.s.enmEffAddrMode)
6621 {
6622 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
6623 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
6624 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
6625 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6626 }
6627 break;
6628
6629 case IEMMODE_64BIT:
6630 switch (pVCpu->iem.s.enmEffAddrMode)
6631 {
6632 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6633 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
6634 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
6635 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6636 }
6637 break;
6638 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6639 }
6640}
6641
6642#undef IEM_LODS_CASE
6643
6644/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
6645#define IEM_SCAS_CASE(ValBits, AddrBits) \
6646 IEM_MC_BEGIN(3, 2); \
6647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6648 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
6649 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
6650 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
6651 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6652 \
6653 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6654 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
6655 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
6656 IEM_MC_REF_EFLAGS(pEFlags); \
6657 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
6658 \
6659 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6660 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6661 } IEM_MC_ELSE() { \
6662 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6663 } IEM_MC_ENDIF(); \
6664 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6665 IEM_MC_END();
6666
6667/**
6668 * @opcode 0xae
6669 */
6670FNIEMOP_DEF(iemOp_scasb_AL_Xb)
6671{
6672 /*
6673 * Use the C implementation if a repeat prefix is encountered.
6674 */
6675 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6676 {
6677 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
6678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6679 switch (pVCpu->iem.s.enmEffAddrMode)
6680 {
6681 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_al_m16);
6682 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_al_m32);
6683 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_al_m64);
6684 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6685 }
6686 }
6687 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6688 {
6689 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
6690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6691 switch (pVCpu->iem.s.enmEffAddrMode)
6692 {
6693 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_al_m16);
6694 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_al_m32);
6695 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_al_m64);
6696 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6697 }
6698 }
6699
6700 /*
6701 * Sharing case implementation with stos[wdq] below.
6702 */
6703 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
6704 switch (pVCpu->iem.s.enmEffAddrMode)
6705 {
6706 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
6707 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
6708 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
6709 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6710 }
6711}
6712
6713
6714/**
6715 * @opcode 0xaf
6716 */
6717FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
6718{
6719 /*
6720 * Use the C implementation if a repeat prefix is encountered.
6721 */
6722 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6723 {
6724 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
6725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6726 switch (pVCpu->iem.s.enmEffOpSize)
6727 {
6728 case IEMMODE_16BIT:
6729 switch (pVCpu->iem.s.enmEffAddrMode)
6730 {
6731 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_ax_m16);
6732 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_ax_m32);
6733 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_ax_m64);
6734 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6735 }
6736 break;
6737 case IEMMODE_32BIT:
6738 switch (pVCpu->iem.s.enmEffAddrMode)
6739 {
6740 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_eax_m16);
6741 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_eax_m32);
6742 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_eax_m64);
6743 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6744 }
6745 case IEMMODE_64BIT:
6746 switch (pVCpu->iem.s.enmEffAddrMode)
6747 {
6748 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
6749 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_rax_m32);
6750 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_rax_m64);
6751 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6752 }
6753 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6754 }
6755 }
6756 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6757 {
6758 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
6759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6760 switch (pVCpu->iem.s.enmEffOpSize)
6761 {
6762 case IEMMODE_16BIT:
6763 switch (pVCpu->iem.s.enmEffAddrMode)
6764 {
6765 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_ax_m16);
6766 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_ax_m32);
6767 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_ax_m64);
6768 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6769 }
6770 break;
6771 case IEMMODE_32BIT:
6772 switch (pVCpu->iem.s.enmEffAddrMode)
6773 {
6774 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_eax_m16);
6775 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_eax_m32);
6776 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_eax_m64);
6777 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6778 }
6779 case IEMMODE_64BIT:
6780 switch (pVCpu->iem.s.enmEffAddrMode)
6781 {
6782 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
6783 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_rax_m32);
6784 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_rax_m64);
6785 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6786 }
6787 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6788 }
6789 }
6790
6791 /*
6792 * Annoying double switch here.
6793 * Using ugly macro for implementing the cases, sharing it with scasb.
6794 */
6795 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
6796 switch (pVCpu->iem.s.enmEffOpSize)
6797 {
6798 case IEMMODE_16BIT:
6799 switch (pVCpu->iem.s.enmEffAddrMode)
6800 {
6801 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
6802 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
6803 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
6804 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6805 }
6806 break;
6807
6808 case IEMMODE_32BIT:
6809 switch (pVCpu->iem.s.enmEffAddrMode)
6810 {
6811 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
6812 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
6813 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
6814 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6815 }
6816 break;
6817
6818 case IEMMODE_64BIT:
6819 switch (pVCpu->iem.s.enmEffAddrMode)
6820 {
6821 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6822 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
6823 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
6824 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6825 }
6826 break;
6827 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6828 }
6829}
6830
6831#undef IEM_SCAS_CASE
6832
6833/**
6834 * Common 'mov r8, imm8' helper.
6835 */
6836FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iFixedReg)
6837{
6838 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6839 IEM_MC_BEGIN(0, 1);
6840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6841 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
6842 IEM_MC_STORE_GREG_U8(iFixedReg, u8Value);
6843 IEM_MC_ADVANCE_RIP_AND_FINISH();
6844 IEM_MC_END();
6845}
6846
6847
6848/**
6849 * @opcode 0xb0
6850 */
6851FNIEMOP_DEF(iemOp_mov_AL_Ib)
6852{
6853 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
6854 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
6855}
6856
6857
6858/**
6859 * @opcode 0xb1
6860 */
6861FNIEMOP_DEF(iemOp_CL_Ib)
6862{
6863 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
6864 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
6865}
6866
6867
6868/**
6869 * @opcode 0xb2
6870 */
6871FNIEMOP_DEF(iemOp_DL_Ib)
6872{
6873 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
6874 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
6875}
6876
6877
6878/**
6879 * @opcode 0xb3
6880 */
6881FNIEMOP_DEF(iemOp_BL_Ib)
6882{
6883 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
6884 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
6885}
6886
6887
6888/**
6889 * @opcode 0xb4
6890 */
6891FNIEMOP_DEF(iemOp_mov_AH_Ib)
6892{
6893 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
6894 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
6895}
6896
6897
6898/**
6899 * @opcode 0xb5
6900 */
6901FNIEMOP_DEF(iemOp_CH_Ib)
6902{
6903 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
6904 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
6905}
6906
6907
6908/**
6909 * @opcode 0xb6
6910 */
6911FNIEMOP_DEF(iemOp_DH_Ib)
6912{
6913 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
6914 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
6915}
6916
6917
6918/**
6919 * @opcode 0xb7
6920 */
6921FNIEMOP_DEF(iemOp_BH_Ib)
6922{
6923 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
6924 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
6925}
6926
6927
6928/**
6929 * Common 'mov regX,immX' helper.
6930 */
6931FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iFixedReg)
6932{
6933 switch (pVCpu->iem.s.enmEffOpSize)
6934 {
6935 case IEMMODE_16BIT:
6936 {
6937 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6938 IEM_MC_BEGIN(0, 1);
6939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6940 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
6941 IEM_MC_STORE_GREG_U16(iFixedReg, u16Value);
6942 IEM_MC_ADVANCE_RIP_AND_FINISH();
6943 IEM_MC_END();
6944 break;
6945 }
6946
6947 case IEMMODE_32BIT:
6948 {
6949 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6950 IEM_MC_BEGIN(0, 1);
6951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6952 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
6953 IEM_MC_STORE_GREG_U32(iFixedReg, u32Value);
6954 IEM_MC_ADVANCE_RIP_AND_FINISH();
6955 IEM_MC_END();
6956 break;
6957 }
6958 case IEMMODE_64BIT:
6959 {
6960 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
6961 IEM_MC_BEGIN(0, 1);
6962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6963 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
6964 IEM_MC_STORE_GREG_U64(iFixedReg, u64Value);
6965 IEM_MC_ADVANCE_RIP_AND_FINISH();
6966 IEM_MC_END();
6967 break;
6968 }
6969 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6970 }
6971}
6972
6973
6974/**
6975 * @opcode 0xb8
6976 */
6977FNIEMOP_DEF(iemOp_eAX_Iv)
6978{
6979 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
6980 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
6981}
6982
6983
6984/**
6985 * @opcode 0xb9
6986 */
6987FNIEMOP_DEF(iemOp_eCX_Iv)
6988{
6989 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
6990 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
6991}
6992
6993
6994/**
6995 * @opcode 0xba
6996 */
6997FNIEMOP_DEF(iemOp_eDX_Iv)
6998{
6999 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
7000 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7001}
7002
7003
7004/**
7005 * @opcode 0xbb
7006 */
7007FNIEMOP_DEF(iemOp_eBX_Iv)
7008{
7009 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
7010 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7011}
7012
7013
7014/**
7015 * @opcode 0xbc
7016 */
7017FNIEMOP_DEF(iemOp_eSP_Iv)
7018{
7019 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
7020 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7021}
7022
7023
7024/**
7025 * @opcode 0xbd
7026 */
7027FNIEMOP_DEF(iemOp_eBP_Iv)
7028{
7029 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
7030 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7031}
7032
7033
7034/**
7035 * @opcode 0xbe
7036 */
7037FNIEMOP_DEF(iemOp_eSI_Iv)
7038{
7039 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
7040 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7041}
7042
7043
7044/**
7045 * @opcode 0xbf
7046 */
7047FNIEMOP_DEF(iemOp_eDI_Iv)
7048{
7049 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
7050 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7051}
7052
7053
7054/**
7055 * @opcode 0xc0
7056 */
7057FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
7058{
7059 IEMOP_HLP_MIN_186();
7060 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7061 PCIEMOPSHIFTSIZES pImpl;
7062 switch (IEM_GET_MODRM_REG_8(bRm))
7063 {
7064 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
7065 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
7066 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
7067 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
7068 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
7069 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
7070 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
7071 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7072 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
7073 }
7074 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7075
7076 if (IEM_IS_MODRM_REG_MODE(bRm))
7077 {
7078 /* register */
7079 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7080 IEM_MC_BEGIN(3, 0);
7081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7082 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7083 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7084 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7085 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7086 IEM_MC_REF_EFLAGS(pEFlags);
7087 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7088 IEM_MC_ADVANCE_RIP_AND_FINISH();
7089 IEM_MC_END();
7090 }
7091 else
7092 {
7093 /* memory */
7094 IEM_MC_BEGIN(3, 2);
7095 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7096 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7097 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7098 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7099
7100 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7101 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7102 IEM_MC_ASSIGN(cShiftArg, cShift);
7103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7104 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7105 IEM_MC_FETCH_EFLAGS(EFlags);
7106 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7107
7108 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7109 IEM_MC_COMMIT_EFLAGS(EFlags);
7110 IEM_MC_ADVANCE_RIP_AND_FINISH();
7111 IEM_MC_END();
7112 }
7113}
7114
7115
7116/**
7117 * @opcode 0xc1
7118 */
7119FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
7120{
7121 IEMOP_HLP_MIN_186();
7122 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7123 PCIEMOPSHIFTSIZES pImpl;
7124 switch (IEM_GET_MODRM_REG_8(bRm))
7125 {
7126 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
7127 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
7128 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
7129 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
7130 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
7131 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
7132 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
7133 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7134 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
7135 }
7136 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7137
7138 if (IEM_IS_MODRM_REG_MODE(bRm))
7139 {
7140 /* register */
7141 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7142 switch (pVCpu->iem.s.enmEffOpSize)
7143 {
7144 case IEMMODE_16BIT:
7145 IEM_MC_BEGIN(3, 0);
7146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7147 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7148 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7149 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7150 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7151 IEM_MC_REF_EFLAGS(pEFlags);
7152 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7153 IEM_MC_ADVANCE_RIP_AND_FINISH();
7154 IEM_MC_END();
7155 break;
7156
7157 case IEMMODE_32BIT:
7158 IEM_MC_BEGIN(3, 0);
7159 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7160 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7161 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7162 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7163 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7164 IEM_MC_REF_EFLAGS(pEFlags);
7165 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7166 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7167 IEM_MC_ADVANCE_RIP_AND_FINISH();
7168 IEM_MC_END();
7169 break;
7170
7171 case IEMMODE_64BIT:
7172 IEM_MC_BEGIN(3, 0);
7173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7174 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7175 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7176 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7177 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7178 IEM_MC_REF_EFLAGS(pEFlags);
7179 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7180 IEM_MC_ADVANCE_RIP_AND_FINISH();
7181 IEM_MC_END();
7182 break;
7183
7184 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7185 }
7186 }
7187 else
7188 {
7189 /* memory */
7190 switch (pVCpu->iem.s.enmEffOpSize)
7191 {
7192 case IEMMODE_16BIT:
7193 IEM_MC_BEGIN(3, 2);
7194 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7195 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7196 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7197 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7198
7199 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7200 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7201 IEM_MC_ASSIGN(cShiftArg, cShift);
7202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7203 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7204 IEM_MC_FETCH_EFLAGS(EFlags);
7205 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7206
7207 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7208 IEM_MC_COMMIT_EFLAGS(EFlags);
7209 IEM_MC_ADVANCE_RIP_AND_FINISH();
7210 IEM_MC_END();
7211 break;
7212
7213 case IEMMODE_32BIT:
7214 IEM_MC_BEGIN(3, 2);
7215 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7216 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7217 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7218 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7219
7220 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7221 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7222 IEM_MC_ASSIGN(cShiftArg, cShift);
7223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7224 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7225 IEM_MC_FETCH_EFLAGS(EFlags);
7226 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7227
7228 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7229 IEM_MC_COMMIT_EFLAGS(EFlags);
7230 IEM_MC_ADVANCE_RIP_AND_FINISH();
7231 IEM_MC_END();
7232 break;
7233
7234 case IEMMODE_64BIT:
7235 IEM_MC_BEGIN(3, 2);
7236 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7237 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7238 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7240
7241 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7242 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7243 IEM_MC_ASSIGN(cShiftArg, cShift);
7244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7245 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7246 IEM_MC_FETCH_EFLAGS(EFlags);
7247 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7248
7249 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7250 IEM_MC_COMMIT_EFLAGS(EFlags);
7251 IEM_MC_ADVANCE_RIP_AND_FINISH();
7252 IEM_MC_END();
7253 break;
7254
7255 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7256 }
7257 }
7258}
7259
7260
7261/**
7262 * @opcode 0xc2
7263 */
7264FNIEMOP_DEF(iemOp_retn_Iw)
7265{
7266 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
7267 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7268 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7270 switch (pVCpu->iem.s.enmEffOpSize)
7271 {
7272 case IEMMODE_16BIT:
7273 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_iw_16, u16Imm);
7274 case IEMMODE_32BIT:
7275 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_iw_32, u16Imm);
7276 case IEMMODE_64BIT:
7277 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_iw_64, u16Imm);
7278 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7279 }
7280}
7281
7282
7283/**
7284 * @opcode 0xc3
7285 */
7286FNIEMOP_DEF(iemOp_retn)
7287{
7288 IEMOP_MNEMONIC(retn, "retn");
7289 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7291 switch (pVCpu->iem.s.enmEffOpSize)
7292 {
7293 case IEMMODE_16BIT:
7294 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_16);
7295 case IEMMODE_32BIT:
7296 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_32);
7297 case IEMMODE_64BIT:
7298 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_64);
7299 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7300 }
7301}
7302
7303
7304/**
7305 * @opcode 0xc4
7306 */
7307FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
7308{
7309 /* The LDS instruction is invalid 64-bit mode. In legacy and
7310 compatability mode it is invalid with MOD=3.
7311 The use as a VEX prefix is made possible by assigning the inverted
7312 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
7313 outside of 64-bit mode. VEX is not available in real or v86 mode. */
7314 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7315 if ( IEM_IS_64BIT_CODE(pVCpu)
7316 || IEM_IS_MODRM_REG_MODE(bRm) )
7317 {
7318 IEMOP_MNEMONIC(vex3_prefix, "vex3");
7319 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
7320 {
7321 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
7322 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
7323 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
7324 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
7325 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
7326 if ((bVex2 & 0x80 /* VEX.W */) && IEM_IS_64BIT_CODE(pVCpu))
7327 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
7328 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
7329 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
7330 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
7331 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
7332 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
7333 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
7334
7335 switch (bRm & 0x1f)
7336 {
7337 case 1: /* 0x0f lead opcode byte. */
7338#ifdef IEM_WITH_VEX
7339 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7340#else
7341 IEMOP_BITCH_ABOUT_STUB();
7342 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7343#endif
7344
7345 case 2: /* 0x0f 0x38 lead opcode bytes. */
7346#ifdef IEM_WITH_VEX
7347 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7348#else
7349 IEMOP_BITCH_ABOUT_STUB();
7350 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7351#endif
7352
7353 case 3: /* 0x0f 0x3a lead opcode bytes. */
7354#ifdef IEM_WITH_VEX
7355 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7356#else
7357 IEMOP_BITCH_ABOUT_STUB();
7358 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7359#endif
7360
7361 default:
7362 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
7363 IEMOP_RAISE_INVALID_OPCODE_RET();
7364 }
7365 }
7366 Log(("VEX3: VEX support disabled!\n"));
7367 IEMOP_RAISE_INVALID_OPCODE_RET();
7368 }
7369
7370 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
7371 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
7372}
7373
7374
7375/**
7376 * @opcode 0xc5
7377 */
7378FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
7379{
7380 /* The LES instruction is invalid 64-bit mode. In legacy and
7381 compatability mode it is invalid with MOD=3.
7382 The use as a VEX prefix is made possible by assigning the inverted
7383 REX.R to the top MOD bit, and the top bit in the inverted register
7384 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
7385 to accessing registers 0..7 in this VEX form. */
7386 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7387 if ( IEM_IS_64BIT_CODE(pVCpu)
7388 || IEM_IS_MODRM_REG_MODE(bRm))
7389 {
7390 IEMOP_MNEMONIC(vex2_prefix, "vex2");
7391 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
7392 {
7393 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
7394 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
7395 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
7396 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
7397 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
7398 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
7399 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
7400 pVCpu->iem.s.idxPrefix = bRm & 0x3;
7401
7402#ifdef IEM_WITH_VEX
7403 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7404#else
7405 IEMOP_BITCH_ABOUT_STUB();
7406 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7407#endif
7408 }
7409
7410 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
7411 Log(("VEX2: VEX support disabled!\n"));
7412 IEMOP_RAISE_INVALID_OPCODE_RET();
7413 }
7414
7415 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
7416 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
7417}
7418
7419
7420/**
7421 * @opcode 0xc6
7422 */
7423FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
7424{
7425 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7426 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
7427 IEMOP_RAISE_INVALID_OPCODE_RET();
7428 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
7429
7430 if (IEM_IS_MODRM_REG_MODE(bRm))
7431 {
7432 /* register access */
7433 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7434 IEM_MC_BEGIN(0, 0);
7435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7436 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Imm);
7437 IEM_MC_ADVANCE_RIP_AND_FINISH();
7438 IEM_MC_END();
7439 }
7440 else
7441 {
7442 /* memory access. */
7443 IEM_MC_BEGIN(0, 1);
7444 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7445 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7446 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7448 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
7449 IEM_MC_ADVANCE_RIP_AND_FINISH();
7450 IEM_MC_END();
7451 }
7452}
7453
7454
7455/**
7456 * @opcode 0xc7
7457 */
7458FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
7459{
7460 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7461 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
7462 IEMOP_RAISE_INVALID_OPCODE_RET();
7463 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
7464
7465 if (IEM_IS_MODRM_REG_MODE(bRm))
7466 {
7467 /* register access */
7468 switch (pVCpu->iem.s.enmEffOpSize)
7469 {
7470 case IEMMODE_16BIT:
7471 IEM_MC_BEGIN(0, 0);
7472 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7474 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Imm);
7475 IEM_MC_ADVANCE_RIP_AND_FINISH();
7476 IEM_MC_END();
7477 break;
7478
7479 case IEMMODE_32BIT:
7480 IEM_MC_BEGIN(0, 0);
7481 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
7482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7483 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Imm);
7484 IEM_MC_ADVANCE_RIP_AND_FINISH();
7485 IEM_MC_END();
7486 break;
7487
7488 case IEMMODE_64BIT:
7489 IEM_MC_BEGIN(0, 0);
7490 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
7491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7492 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Imm);
7493 IEM_MC_ADVANCE_RIP_AND_FINISH();
7494 IEM_MC_END();
7495 break;
7496
7497 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7498 }
7499 }
7500 else
7501 {
7502 /* memory access. */
7503 switch (pVCpu->iem.s.enmEffOpSize)
7504 {
7505 case IEMMODE_16BIT:
7506 IEM_MC_BEGIN(0, 1);
7507 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7508 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
7509 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7511 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
7512 IEM_MC_ADVANCE_RIP_AND_FINISH();
7513 IEM_MC_END();
7514 break;
7515
7516 case IEMMODE_32BIT:
7517 IEM_MC_BEGIN(0, 1);
7518 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7519 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
7520 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
7521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7522 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
7523 IEM_MC_ADVANCE_RIP_AND_FINISH();
7524 IEM_MC_END();
7525 break;
7526
7527 case IEMMODE_64BIT:
7528 IEM_MC_BEGIN(0, 1);
7529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7530 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
7531 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
7532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7533 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
7534 IEM_MC_ADVANCE_RIP_AND_FINISH();
7535 IEM_MC_END();
7536 break;
7537
7538 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7539 }
7540 }
7541}
7542
7543
7544
7545
7546/**
7547 * @opcode 0xc8
7548 */
7549FNIEMOP_DEF(iemOp_enter_Iw_Ib)
7550{
7551 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
7552 IEMOP_HLP_MIN_186();
7553 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7554 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
7555 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
7556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7557 IEM_MC_DEFER_TO_CIMPL_3_RET(0, iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
7558}
7559
7560
7561/**
7562 * @opcode 0xc9
7563 */
7564FNIEMOP_DEF(iemOp_leave)
7565{
7566 IEMOP_MNEMONIC(leave, "leave");
7567 IEMOP_HLP_MIN_186();
7568 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7570 IEM_MC_DEFER_TO_CIMPL_1_RET(0, iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
7571}
7572
7573
7574/**
7575 * @opcode 0xca
7576 */
7577FNIEMOP_DEF(iemOp_retf_Iw)
7578{
7579 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
7580 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7582 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_MODE,
7583 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
7584}
7585
7586
7587/**
7588 * @opcode 0xcb
7589 */
7590FNIEMOP_DEF(iemOp_retf)
7591{
7592 IEMOP_MNEMONIC(retf, "retf");
7593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7594 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_MODE,
7595 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
7596}
7597
7598
7599/**
7600 * @opcode 0xcc
7601 */
7602FNIEMOP_DEF(iemOp_int3)
7603{
7604 IEMOP_MNEMONIC(int3, "int3");
7605 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7606 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
7607 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
7608 iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
7609}
7610
7611
7612/**
7613 * @opcode 0xcd
7614 */
7615FNIEMOP_DEF(iemOp_int_Ib)
7616{
7617 IEMOP_MNEMONIC(int_Ib, "int Ib");
7618 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
7619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7620 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
7621 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
7622 iemCImpl_int, u8Int, IEMINT_INTN);
7623}
7624
7625
7626/**
7627 * @opcode 0xce
7628 */
7629FNIEMOP_DEF(iemOp_into)
7630{
7631 IEMOP_MNEMONIC(into, "into");
7632 IEMOP_HLP_NO_64BIT();
7633 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_CONDITIONAL
7634 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
7635 iemCImpl_int, X86_XCPT_OF, IEMINT_INTO);
7636}
7637
7638
7639/**
7640 * @opcode 0xcf
7641 */
7642FNIEMOP_DEF(iemOp_iret)
7643{
7644 IEMOP_MNEMONIC(iret, "iret");
7645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7646 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
7647 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE | IEM_CIMPL_F_VMEXIT,
7648 iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
7649}
7650
7651
7652/**
7653 * @opcode 0xd0
7654 */
7655FNIEMOP_DEF(iemOp_Grp2_Eb_1)
7656{
7657 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7658 PCIEMOPSHIFTSIZES pImpl;
7659 switch (IEM_GET_MODRM_REG_8(bRm))
7660 {
7661 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
7662 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
7663 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
7664 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
7665 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
7666 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
7667 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
7668 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7669 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
7670 }
7671 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7672
7673 if (IEM_IS_MODRM_REG_MODE(bRm))
7674 {
7675 /* register */
7676 IEM_MC_BEGIN(3, 0);
7677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7678 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7679 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
7680 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7681 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7682 IEM_MC_REF_EFLAGS(pEFlags);
7683 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7684 IEM_MC_ADVANCE_RIP_AND_FINISH();
7685 IEM_MC_END();
7686 }
7687 else
7688 {
7689 /* memory */
7690 IEM_MC_BEGIN(3, 2);
7691 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7692 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
7693 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7694 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7695
7696 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7698 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7699 IEM_MC_FETCH_EFLAGS(EFlags);
7700 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7701
7702 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7703 IEM_MC_COMMIT_EFLAGS(EFlags);
7704 IEM_MC_ADVANCE_RIP_AND_FINISH();
7705 IEM_MC_END();
7706 }
7707}
7708
7709
7710
7711/**
7712 * @opcode 0xd1
7713 */
7714FNIEMOP_DEF(iemOp_Grp2_Ev_1)
7715{
7716 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7717 PCIEMOPSHIFTSIZES pImpl;
7718 switch (IEM_GET_MODRM_REG_8(bRm))
7719 {
7720 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
7721 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
7722 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
7723 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
7724 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
7725 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
7726 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
7727 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7728 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
7729 }
7730 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7731
7732 if (IEM_IS_MODRM_REG_MODE(bRm))
7733 {
7734 /* register */
7735 switch (pVCpu->iem.s.enmEffOpSize)
7736 {
7737 case IEMMODE_16BIT:
7738 IEM_MC_BEGIN(3, 0);
7739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7740 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7741 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7742 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7743 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7744 IEM_MC_REF_EFLAGS(pEFlags);
7745 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7746 IEM_MC_ADVANCE_RIP_AND_FINISH();
7747 IEM_MC_END();
7748 break;
7749
7750 case IEMMODE_32BIT:
7751 IEM_MC_BEGIN(3, 0);
7752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7753 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7754 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7755 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7756 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7757 IEM_MC_REF_EFLAGS(pEFlags);
7758 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7759 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7760 IEM_MC_ADVANCE_RIP_AND_FINISH();
7761 IEM_MC_END();
7762 break;
7763
7764 case IEMMODE_64BIT:
7765 IEM_MC_BEGIN(3, 0);
7766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7767 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7768 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7769 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7770 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7771 IEM_MC_REF_EFLAGS(pEFlags);
7772 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7773 IEM_MC_ADVANCE_RIP_AND_FINISH();
7774 IEM_MC_END();
7775 break;
7776
7777 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7778 }
7779 }
7780 else
7781 {
7782 /* memory */
7783 switch (pVCpu->iem.s.enmEffOpSize)
7784 {
7785 case IEMMODE_16BIT:
7786 IEM_MC_BEGIN(3, 2);
7787 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7788 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7789 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7790 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7791
7792 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7794 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7795 IEM_MC_FETCH_EFLAGS(EFlags);
7796 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7797
7798 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7799 IEM_MC_COMMIT_EFLAGS(EFlags);
7800 IEM_MC_ADVANCE_RIP_AND_FINISH();
7801 IEM_MC_END();
7802 break;
7803
7804 case IEMMODE_32BIT:
7805 IEM_MC_BEGIN(3, 2);
7806 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7807 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7808 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7809 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7810
7811 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7813 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7814 IEM_MC_FETCH_EFLAGS(EFlags);
7815 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7816
7817 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7818 IEM_MC_COMMIT_EFLAGS(EFlags);
7819 IEM_MC_ADVANCE_RIP_AND_FINISH();
7820 IEM_MC_END();
7821 break;
7822
7823 case IEMMODE_64BIT:
7824 IEM_MC_BEGIN(3, 2);
7825 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7826 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7827 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7828 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7829
7830 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7832 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7833 IEM_MC_FETCH_EFLAGS(EFlags);
7834 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7835
7836 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7837 IEM_MC_COMMIT_EFLAGS(EFlags);
7838 IEM_MC_ADVANCE_RIP_AND_FINISH();
7839 IEM_MC_END();
7840 break;
7841
7842 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7843 }
7844 }
7845}
7846
7847
7848/**
7849 * @opcode 0xd2
7850 */
7851FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
7852{
7853 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7854 PCIEMOPSHIFTSIZES pImpl;
7855 switch (IEM_GET_MODRM_REG_8(bRm))
7856 {
7857 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
7858 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
7859 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
7860 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
7861 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
7862 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
7863 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
7864 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7865 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
7866 }
7867 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7868
7869 if (IEM_IS_MODRM_REG_MODE(bRm))
7870 {
7871 /* register */
7872 IEM_MC_BEGIN(3, 0);
7873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7874 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7875 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7876 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7877 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7878 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7879 IEM_MC_REF_EFLAGS(pEFlags);
7880 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7881 IEM_MC_ADVANCE_RIP_AND_FINISH();
7882 IEM_MC_END();
7883 }
7884 else
7885 {
7886 /* memory */
7887 IEM_MC_BEGIN(3, 2);
7888 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7889 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7890 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7891 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7892
7893 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7895 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7896 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7897 IEM_MC_FETCH_EFLAGS(EFlags);
7898 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7899
7900 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7901 IEM_MC_COMMIT_EFLAGS(EFlags);
7902 IEM_MC_ADVANCE_RIP_AND_FINISH();
7903 IEM_MC_END();
7904 }
7905}
7906
7907
7908/**
7909 * @opcode 0xd3
7910 */
7911FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
7912{
7913 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7914 PCIEMOPSHIFTSIZES pImpl;
7915 switch (IEM_GET_MODRM_REG_8(bRm))
7916 {
7917 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
7918 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
7919 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
7920 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
7921 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
7922 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
7923 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
7924 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7925 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
7926 }
7927 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7928
7929 if (IEM_IS_MODRM_REG_MODE(bRm))
7930 {
7931 /* register */
7932 switch (pVCpu->iem.s.enmEffOpSize)
7933 {
7934 case IEMMODE_16BIT:
7935 IEM_MC_BEGIN(3, 0);
7936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7937 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7938 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7939 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7940 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7941 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7942 IEM_MC_REF_EFLAGS(pEFlags);
7943 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7944 IEM_MC_ADVANCE_RIP_AND_FINISH();
7945 IEM_MC_END();
7946 break;
7947
7948 case IEMMODE_32BIT:
7949 IEM_MC_BEGIN(3, 0);
7950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7951 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7952 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7953 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7954 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7955 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7956 IEM_MC_REF_EFLAGS(pEFlags);
7957 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7958 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7959 IEM_MC_ADVANCE_RIP_AND_FINISH();
7960 IEM_MC_END();
7961 break;
7962
7963 case IEMMODE_64BIT:
7964 IEM_MC_BEGIN(3, 0);
7965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7966 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7967 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7968 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7969 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7970 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7971 IEM_MC_REF_EFLAGS(pEFlags);
7972 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7973 IEM_MC_ADVANCE_RIP_AND_FINISH();
7974 IEM_MC_END();
7975 break;
7976
7977 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7978 }
7979 }
7980 else
7981 {
7982 /* memory */
7983 switch (pVCpu->iem.s.enmEffOpSize)
7984 {
7985 case IEMMODE_16BIT:
7986 IEM_MC_BEGIN(3, 2);
7987 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7988 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7989 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7990 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7991
7992 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7994 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7995 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7996 IEM_MC_FETCH_EFLAGS(EFlags);
7997 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7998
7999 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
8000 IEM_MC_COMMIT_EFLAGS(EFlags);
8001 IEM_MC_ADVANCE_RIP_AND_FINISH();
8002 IEM_MC_END();
8003 break;
8004
8005 case IEMMODE_32BIT:
8006 IEM_MC_BEGIN(3, 2);
8007 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8008 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8009 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8010 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8011
8012 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8013 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8014 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8015 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8016 IEM_MC_FETCH_EFLAGS(EFlags);
8017 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8018
8019 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
8020 IEM_MC_COMMIT_EFLAGS(EFlags);
8021 IEM_MC_ADVANCE_RIP_AND_FINISH();
8022 IEM_MC_END();
8023 break;
8024
8025 case IEMMODE_64BIT:
8026 IEM_MC_BEGIN(3, 2);
8027 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8028 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8029 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8030 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8031
8032 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8034 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8035 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
8036 IEM_MC_FETCH_EFLAGS(EFlags);
8037 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8038
8039 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
8040 IEM_MC_COMMIT_EFLAGS(EFlags);
8041 IEM_MC_ADVANCE_RIP_AND_FINISH();
8042 IEM_MC_END();
8043 break;
8044
8045 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8046 }
8047 }
8048}
8049
8050/**
8051 * @opcode 0xd4
8052 */
8053FNIEMOP_DEF(iemOp_aam_Ib)
8054{
8055 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
8056 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
8057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8058 IEMOP_HLP_NO_64BIT();
8059 if (!bImm)
8060 IEMOP_RAISE_DIVIDE_ERROR_RET();
8061 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aam, bImm);
8062}
8063
8064
8065/**
8066 * @opcode 0xd5
8067 */
8068FNIEMOP_DEF(iemOp_aad_Ib)
8069{
8070 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
8071 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
8072 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8073 IEMOP_HLP_NO_64BIT();
8074 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aad, bImm);
8075}
8076
8077
8078/**
8079 * @opcode 0xd6
8080 */
8081FNIEMOP_DEF(iemOp_salc)
8082{
8083 IEMOP_MNEMONIC(salc, "salc");
8084 IEMOP_HLP_NO_64BIT();
8085
8086 IEM_MC_BEGIN(0, 0);
8087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8088 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8089 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
8090 } IEM_MC_ELSE() {
8091 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
8092 } IEM_MC_ENDIF();
8093 IEM_MC_ADVANCE_RIP_AND_FINISH();
8094 IEM_MC_END();
8095}
8096
8097
8098/**
8099 * @opcode 0xd7
8100 */
8101FNIEMOP_DEF(iemOp_xlat)
8102{
8103 IEMOP_MNEMONIC(xlat, "xlat");
8104 switch (pVCpu->iem.s.enmEffAddrMode)
8105 {
8106 case IEMMODE_16BIT:
8107 IEM_MC_BEGIN(2, 0);
8108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8109 IEM_MC_LOCAL(uint8_t, u8Tmp);
8110 IEM_MC_LOCAL(uint16_t, u16Addr);
8111 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
8112 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
8113 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
8114 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
8115 IEM_MC_ADVANCE_RIP_AND_FINISH();
8116 IEM_MC_END();
8117 break;
8118
8119 case IEMMODE_32BIT:
8120 IEM_MC_BEGIN(2, 0);
8121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8122 IEM_MC_LOCAL(uint8_t, u8Tmp);
8123 IEM_MC_LOCAL(uint32_t, u32Addr);
8124 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
8125 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
8126 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
8127 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
8128 IEM_MC_ADVANCE_RIP_AND_FINISH();
8129 IEM_MC_END();
8130 break;
8131
8132 case IEMMODE_64BIT:
8133 IEM_MC_BEGIN(2, 0);
8134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8135 IEM_MC_LOCAL(uint8_t, u8Tmp);
8136 IEM_MC_LOCAL(uint64_t, u64Addr);
8137 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
8138 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
8139 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
8140 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
8141 IEM_MC_ADVANCE_RIP_AND_FINISH();
8142 IEM_MC_END();
8143 break;
8144
8145 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8146 }
8147}
8148
8149
8150/**
8151 * Common worker for FPU instructions working on ST0 and STn, and storing the
8152 * result in ST0.
8153 *
8154 * @param bRm Mod R/M byte.
8155 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8156 */
8157FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
8158{
8159 IEM_MC_BEGIN(3, 1);
8160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8161 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8162 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8163 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8164 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8165
8166 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8167 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8168 IEM_MC_PREPARE_FPU_USAGE();
8169 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8170 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
8171 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
8172 } IEM_MC_ELSE() {
8173 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
8174 } IEM_MC_ENDIF();
8175 IEM_MC_ADVANCE_RIP_AND_FINISH();
8176
8177 IEM_MC_END();
8178}
8179
8180
8181/**
8182 * Common worker for FPU instructions working on ST0 and STn, and only affecting
8183 * flags.
8184 *
8185 * @param bRm Mod R/M byte.
8186 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8187 */
8188FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8189{
8190 IEM_MC_BEGIN(3, 1);
8191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8192 IEM_MC_LOCAL(uint16_t, u16Fsw);
8193 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8194 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8195 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8196
8197 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8198 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8199 IEM_MC_PREPARE_FPU_USAGE();
8200 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8201 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8202 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
8203 } IEM_MC_ELSE() {
8204 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
8205 } IEM_MC_ENDIF();
8206 IEM_MC_ADVANCE_RIP_AND_FINISH();
8207
8208 IEM_MC_END();
8209}
8210
8211
8212/**
8213 * Common worker for FPU instructions working on ST0 and STn, only affecting
8214 * flags, and popping when done.
8215 *
8216 * @param bRm Mod R/M byte.
8217 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8218 */
8219FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8220{
8221 IEM_MC_BEGIN(3, 1);
8222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8223 IEM_MC_LOCAL(uint16_t, u16Fsw);
8224 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8225 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8226 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8227
8228 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8229 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8230 IEM_MC_PREPARE_FPU_USAGE();
8231 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8232 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8233 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
8234 } IEM_MC_ELSE() {
8235 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
8236 } IEM_MC_ENDIF();
8237 IEM_MC_ADVANCE_RIP_AND_FINISH();
8238
8239 IEM_MC_END();
8240}
8241
8242
8243/** Opcode 0xd8 11/0. */
8244FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
8245{
8246 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
8247 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
8248}
8249
8250
8251/** Opcode 0xd8 11/1. */
8252FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
8253{
8254 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
8255 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
8256}
8257
8258
8259/** Opcode 0xd8 11/2. */
8260FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
8261{
8262 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
8263 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
8264}
8265
8266
8267/** Opcode 0xd8 11/3. */
8268FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
8269{
8270 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
8271 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
8272}
8273
8274
8275/** Opcode 0xd8 11/4. */
8276FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
8277{
8278 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
8279 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
8280}
8281
8282
8283/** Opcode 0xd8 11/5. */
8284FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
8285{
8286 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
8287 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
8288}
8289
8290
8291/** Opcode 0xd8 11/6. */
8292FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
8293{
8294 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
8295 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
8296}
8297
8298
8299/** Opcode 0xd8 11/7. */
8300FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
8301{
8302 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
8303 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
8304}
8305
8306
8307/**
8308 * Common worker for FPU instructions working on ST0 and an m32r, and storing
8309 * the result in ST0.
8310 *
8311 * @param bRm Mod R/M byte.
8312 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8313 */
8314FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
8315{
8316 IEM_MC_BEGIN(3, 3);
8317 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8318 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8319 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8320 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8321 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8322 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8323
8324 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8326
8327 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8328 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8329 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8330
8331 IEM_MC_PREPARE_FPU_USAGE();
8332 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8333 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
8334 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
8335 } IEM_MC_ELSE() {
8336 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
8337 } IEM_MC_ENDIF();
8338 IEM_MC_ADVANCE_RIP_AND_FINISH();
8339
8340 IEM_MC_END();
8341}
8342
8343
8344/** Opcode 0xd8 !11/0. */
8345FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
8346{
8347 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
8348 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
8349}
8350
8351
8352/** Opcode 0xd8 !11/1. */
8353FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
8354{
8355 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
8356 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
8357}
8358
8359
8360/** Opcode 0xd8 !11/2. */
8361FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
8362{
8363 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
8364
8365 IEM_MC_BEGIN(3, 3);
8366 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8367 IEM_MC_LOCAL(uint16_t, u16Fsw);
8368 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8369 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8370 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8371 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8372
8373 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8375
8376 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8377 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8378 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8379
8380 IEM_MC_PREPARE_FPU_USAGE();
8381 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8382 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
8383 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8384 } IEM_MC_ELSE() {
8385 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8386 } IEM_MC_ENDIF();
8387 IEM_MC_ADVANCE_RIP_AND_FINISH();
8388
8389 IEM_MC_END();
8390}
8391
8392
8393/** Opcode 0xd8 !11/3. */
8394FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
8395{
8396 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
8397
8398 IEM_MC_BEGIN(3, 3);
8399 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8400 IEM_MC_LOCAL(uint16_t, u16Fsw);
8401 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8402 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8403 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8404 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8405
8406 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8408
8409 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8410 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8411 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8412
8413 IEM_MC_PREPARE_FPU_USAGE();
8414 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8415 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
8416 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8417 } IEM_MC_ELSE() {
8418 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8419 } IEM_MC_ENDIF();
8420 IEM_MC_ADVANCE_RIP_AND_FINISH();
8421
8422 IEM_MC_END();
8423}
8424
8425
8426/** Opcode 0xd8 !11/4. */
8427FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
8428{
8429 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
8430 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
8431}
8432
8433
8434/** Opcode 0xd8 !11/5. */
8435FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
8436{
8437 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
8438 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
8439}
8440
8441
8442/** Opcode 0xd8 !11/6. */
8443FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
8444{
8445 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
8446 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
8447}
8448
8449
8450/** Opcode 0xd8 !11/7. */
8451FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
8452{
8453 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
8454 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
8455}
8456
8457
8458/**
8459 * @opcode 0xd8
8460 */
8461FNIEMOP_DEF(iemOp_EscF0)
8462{
8463 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8464 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
8465
8466 if (IEM_IS_MODRM_REG_MODE(bRm))
8467 {
8468 switch (IEM_GET_MODRM_REG_8(bRm))
8469 {
8470 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
8471 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
8472 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
8473 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
8474 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
8475 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
8476 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
8477 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
8478 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8479 }
8480 }
8481 else
8482 {
8483 switch (IEM_GET_MODRM_REG_8(bRm))
8484 {
8485 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
8486 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
8487 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
8488 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
8489 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
8490 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
8491 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
8492 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
8493 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8494 }
8495 }
8496}
8497
8498
8499/** Opcode 0xd9 /0 mem32real
8500 * @sa iemOp_fld_m64r */
8501FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
8502{
8503 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
8504
8505 IEM_MC_BEGIN(2, 3);
8506 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8507 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8508 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
8509 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8510 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
8511
8512 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8514
8515 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8516 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8517 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8518 IEM_MC_PREPARE_FPU_USAGE();
8519 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
8520 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r32, pFpuRes, pr32Val);
8521 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8522 } IEM_MC_ELSE() {
8523 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8524 } IEM_MC_ENDIF();
8525 IEM_MC_ADVANCE_RIP_AND_FINISH();
8526
8527 IEM_MC_END();
8528}
8529
8530
8531/** Opcode 0xd9 !11/2 mem32real */
8532FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
8533{
8534 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
8535 IEM_MC_BEGIN(3, 2);
8536 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8537 IEM_MC_LOCAL(uint16_t, u16Fsw);
8538 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8539 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
8540 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8541
8542 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8544 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8545 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8546
8547 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8548 IEM_MC_PREPARE_FPU_USAGE();
8549 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8550 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
8551 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8552 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
8553 } IEM_MC_ELSE() {
8554 IEM_MC_IF_FCW_IM() {
8555 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
8556 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
8557 } IEM_MC_ENDIF();
8558 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
8559 } IEM_MC_ENDIF();
8560 IEM_MC_ADVANCE_RIP_AND_FINISH();
8561
8562 IEM_MC_END();
8563}
8564
8565
8566/** Opcode 0xd9 !11/3 */
8567FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
8568{
8569 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
8570 IEM_MC_BEGIN(3, 2);
8571 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8572 IEM_MC_LOCAL(uint16_t, u16Fsw);
8573 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8574 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
8575 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8576
8577 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8579 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8580 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8581
8582 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8583 IEM_MC_PREPARE_FPU_USAGE();
8584 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8585 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
8586 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8587 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
8588 } IEM_MC_ELSE() {
8589 IEM_MC_IF_FCW_IM() {
8590 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
8591 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
8592 } IEM_MC_ENDIF();
8593 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
8594 } IEM_MC_ENDIF();
8595 IEM_MC_ADVANCE_RIP_AND_FINISH();
8596
8597 IEM_MC_END();
8598}
8599
8600
8601/** Opcode 0xd9 !11/4 */
8602FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
8603{
8604 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
8605 IEM_MC_BEGIN(3, 0);
8606 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
8607 IEM_MC_ARG(uint8_t, iEffSeg, 1);
8608 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
8609 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8611 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8612 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8613 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8614 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
8615 IEM_MC_END();
8616}
8617
8618
8619/** Opcode 0xd9 !11/5 */
8620FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
8621{
8622 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
8623 IEM_MC_BEGIN(1, 1);
8624 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8625 IEM_MC_ARG(uint16_t, u16Fsw, 0);
8626 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8628 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8629 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8630 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8631 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_FPU, iemCImpl_fldcw, u16Fsw);
8632 IEM_MC_END();
8633}
8634
8635
8636/** Opcode 0xd9 !11/6 */
8637FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
8638{
8639 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
8640 IEM_MC_BEGIN(3, 0);
8641 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
8642 IEM_MC_ARG(uint8_t, iEffSeg, 1);
8643 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
8644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8646 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8647 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8648 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8649 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
8650 IEM_MC_END();
8651}
8652
8653
8654/** Opcode 0xd9 !11/7 */
8655FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
8656{
8657 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
8658 IEM_MC_BEGIN(2, 0);
8659 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8660 IEM_MC_LOCAL(uint16_t, u16Fcw);
8661 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8663 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8664 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8665 IEM_MC_FETCH_FCW(u16Fcw);
8666 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
8667 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
8668 IEM_MC_END();
8669}
8670
8671
8672/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
8673FNIEMOP_DEF(iemOp_fnop)
8674{
8675 IEMOP_MNEMONIC(fnop, "fnop");
8676 IEM_MC_BEGIN(0, 0);
8677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8678 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8679 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8680 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8681 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
8682 * intel optimizations. Investigate. */
8683 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
8684 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
8685 IEM_MC_END();
8686}
8687
8688
8689/** Opcode 0xd9 11/0 stN */
8690FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
8691{
8692 IEMOP_MNEMONIC(fld_stN, "fld stN");
8693 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
8694 * indicates that it does. */
8695 IEM_MC_BEGIN(0, 2);
8696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8697 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
8698 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8699 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8700 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8701
8702 IEM_MC_PREPARE_FPU_USAGE();
8703 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, IEM_GET_MODRM_RM_8(bRm)) {
8704 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
8705 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
8706 } IEM_MC_ELSE() {
8707 IEM_MC_FPU_STACK_PUSH_UNDERFLOW(pVCpu->iem.s.uFpuOpcode);
8708 } IEM_MC_ENDIF();
8709
8710 IEM_MC_ADVANCE_RIP_AND_FINISH();
8711 IEM_MC_END();
8712}
8713
8714
8715/** Opcode 0xd9 11/3 stN */
8716FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
8717{
8718 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
8719 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
8720 * indicates that it does. */
8721 IEM_MC_BEGIN(2, 3);
8722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8723 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
8724 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
8725 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8726 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ IEM_GET_MODRM_RM_8(bRm), 0);
8727 IEM_MC_ARG_CONST(uint16_t, uFpuOpcode, /*=*/ pVCpu->iem.s.uFpuOpcode, 1);
8728 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8729 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8730
8731 IEM_MC_PREPARE_FPU_USAGE();
8732 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8733 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
8734 IEM_MC_STORE_FPUREG_R80_SRC_REF(IEM_GET_MODRM_RM_8(bRm), pr80Value1);
8735 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
8736 } IEM_MC_ELSE() {
8737 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, iemCImpl_fxch_underflow, iStReg, uFpuOpcode);
8738 } IEM_MC_ENDIF();
8739
8740 IEM_MC_ADVANCE_RIP_AND_FINISH();
8741 IEM_MC_END();
8742}
8743
8744
8745/** Opcode 0xd9 11/4, 0xdd 11/2. */
8746FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
8747{
8748 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
8749
8750 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
8751 uint8_t const iDstReg = IEM_GET_MODRM_RM_8(bRm);
8752 if (!iDstReg)
8753 {
8754 IEM_MC_BEGIN(0, 1);
8755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8756 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
8757 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8758 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8759
8760 IEM_MC_PREPARE_FPU_USAGE();
8761 IEM_MC_IF_FPUREG_NOT_EMPTY(0) {
8762 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
8763 } IEM_MC_ELSE() {
8764 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0, pVCpu->iem.s.uFpuOpcode);
8765 } IEM_MC_ENDIF();
8766
8767 IEM_MC_ADVANCE_RIP_AND_FINISH();
8768 IEM_MC_END();
8769 }
8770 else
8771 {
8772 IEM_MC_BEGIN(0, 2);
8773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8774 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
8775 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8776 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8777 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8778
8779 IEM_MC_PREPARE_FPU_USAGE();
8780 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8781 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
8782 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg, pVCpu->iem.s.uFpuOpcode);
8783 } IEM_MC_ELSE() {
8784 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg, pVCpu->iem.s.uFpuOpcode);
8785 } IEM_MC_ENDIF();
8786
8787 IEM_MC_ADVANCE_RIP_AND_FINISH();
8788 IEM_MC_END();
8789 }
8790}
8791
8792
8793/**
8794 * Common worker for FPU instructions working on ST0 and replaces it with the
8795 * result, i.e. unary operators.
8796 *
8797 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8798 */
8799FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
8800{
8801 IEM_MC_BEGIN(2, 1);
8802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8803 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8804 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8805 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
8806
8807 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8808 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8809 IEM_MC_PREPARE_FPU_USAGE();
8810 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8811 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
8812 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
8813 } IEM_MC_ELSE() {
8814 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
8815 } IEM_MC_ENDIF();
8816 IEM_MC_ADVANCE_RIP_AND_FINISH();
8817
8818 IEM_MC_END();
8819}
8820
8821
8822/** Opcode 0xd9 0xe0. */
8823FNIEMOP_DEF(iemOp_fchs)
8824{
8825 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
8826 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
8827}
8828
8829
8830/** Opcode 0xd9 0xe1. */
8831FNIEMOP_DEF(iemOp_fabs)
8832{
8833 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
8834 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
8835}
8836
8837
8838/** Opcode 0xd9 0xe4. */
8839FNIEMOP_DEF(iemOp_ftst)
8840{
8841 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
8842 IEM_MC_BEGIN(2, 1);
8843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8844 IEM_MC_LOCAL(uint16_t, u16Fsw);
8845 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8846 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
8847
8848 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8849 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8850 IEM_MC_PREPARE_FPU_USAGE();
8851 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8852 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_ftst_r80, pu16Fsw, pr80Value);
8853 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
8854 } IEM_MC_ELSE() {
8855 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
8856 } IEM_MC_ENDIF();
8857 IEM_MC_ADVANCE_RIP_AND_FINISH();
8858
8859 IEM_MC_END();
8860}
8861
8862
8863/** Opcode 0xd9 0xe5. */
8864FNIEMOP_DEF(iemOp_fxam)
8865{
8866 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
8867 IEM_MC_BEGIN(2, 1);
8868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8869 IEM_MC_LOCAL(uint16_t, u16Fsw);
8870 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8871 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
8872
8873 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8874 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8875 IEM_MC_PREPARE_FPU_USAGE();
8876 IEM_MC_REF_FPUREG(pr80Value, 0);
8877 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fxam_r80, pu16Fsw, pr80Value);
8878 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
8879 IEM_MC_ADVANCE_RIP_AND_FINISH();
8880
8881 IEM_MC_END();
8882}
8883
8884
8885/**
8886 * Common worker for FPU instructions pushing a constant onto the FPU stack.
8887 *
8888 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8889 */
8890FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
8891{
8892 IEM_MC_BEGIN(1, 1);
8893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8894 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8895 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8896
8897 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8898 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8899 IEM_MC_PREPARE_FPU_USAGE();
8900 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
8901 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
8902 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
8903 } IEM_MC_ELSE() {
8904 IEM_MC_FPU_STACK_PUSH_OVERFLOW(pVCpu->iem.s.uFpuOpcode);
8905 } IEM_MC_ENDIF();
8906 IEM_MC_ADVANCE_RIP_AND_FINISH();
8907
8908 IEM_MC_END();
8909}
8910
8911
8912/** Opcode 0xd9 0xe8. */
8913FNIEMOP_DEF(iemOp_fld1)
8914{
8915 IEMOP_MNEMONIC(fld1, "fld1");
8916 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
8917}
8918
8919
8920/** Opcode 0xd9 0xe9. */
8921FNIEMOP_DEF(iemOp_fldl2t)
8922{
8923 IEMOP_MNEMONIC(fldl2t, "fldl2t");
8924 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
8925}
8926
8927
8928/** Opcode 0xd9 0xea. */
8929FNIEMOP_DEF(iemOp_fldl2e)
8930{
8931 IEMOP_MNEMONIC(fldl2e, "fldl2e");
8932 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
8933}
8934
8935/** Opcode 0xd9 0xeb. */
8936FNIEMOP_DEF(iemOp_fldpi)
8937{
8938 IEMOP_MNEMONIC(fldpi, "fldpi");
8939 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
8940}
8941
8942
8943/** Opcode 0xd9 0xec. */
8944FNIEMOP_DEF(iemOp_fldlg2)
8945{
8946 IEMOP_MNEMONIC(fldlg2, "fldlg2");
8947 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
8948}
8949
8950/** Opcode 0xd9 0xed. */
8951FNIEMOP_DEF(iemOp_fldln2)
8952{
8953 IEMOP_MNEMONIC(fldln2, "fldln2");
8954 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
8955}
8956
8957
8958/** Opcode 0xd9 0xee. */
8959FNIEMOP_DEF(iemOp_fldz)
8960{
8961 IEMOP_MNEMONIC(fldz, "fldz");
8962 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
8963}
8964
8965
8966/** Opcode 0xd9 0xf0.
8967 *
8968 * The f2xm1 instruction works on values +1.0 thru -1.0, currently (the range on
8969 * 287 & 8087 was +0.5 thru 0.0 according to docs). In addition is does appear
8970 * to produce proper results for +Inf and -Inf.
8971 *
8972 * This is probably usful in the implementation pow() and similar.
8973 */
8974FNIEMOP_DEF(iemOp_f2xm1)
8975{
8976 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
8977 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
8978}
8979
8980
8981/**
8982 * Common worker for FPU instructions working on STn and ST0, storing the result
8983 * in STn, and popping the stack unless IE, DE or ZE was raised.
8984 *
8985 * @param bRm Mod R/M byte.
8986 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8987 */
8988FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
8989{
8990 IEM_MC_BEGIN(3, 1);
8991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8992 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8993 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8994 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8995 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8996
8997 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8998 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8999
9000 IEM_MC_PREPARE_FPU_USAGE();
9001 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
9002 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
9003 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
9004 } IEM_MC_ELSE() {
9005 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
9006 } IEM_MC_ENDIF();
9007 IEM_MC_ADVANCE_RIP_AND_FINISH();
9008
9009 IEM_MC_END();
9010}
9011
9012
9013/** Opcode 0xd9 0xf1. */
9014FNIEMOP_DEF(iemOp_fyl2x)
9015{
9016 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
9017 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
9018}
9019
9020
9021/**
9022 * Common worker for FPU instructions working on ST0 and having two outputs, one
9023 * replacing ST0 and one pushed onto the stack.
9024 *
9025 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9026 */
9027FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
9028{
9029 IEM_MC_BEGIN(2, 1);
9030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9031 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
9032 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
9033 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9034
9035 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9036 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9037 IEM_MC_PREPARE_FPU_USAGE();
9038 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9039 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
9040 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo, pVCpu->iem.s.uFpuOpcode);
9041 } IEM_MC_ELSE() {
9042 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO(pVCpu->iem.s.uFpuOpcode);
9043 } IEM_MC_ENDIF();
9044 IEM_MC_ADVANCE_RIP_AND_FINISH();
9045
9046 IEM_MC_END();
9047}
9048
9049
9050/** Opcode 0xd9 0xf2. */
9051FNIEMOP_DEF(iemOp_fptan)
9052{
9053 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
9054 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
9055}
9056
9057
9058/** Opcode 0xd9 0xf3. */
9059FNIEMOP_DEF(iemOp_fpatan)
9060{
9061 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
9062 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
9063}
9064
9065
9066/** Opcode 0xd9 0xf4. */
9067FNIEMOP_DEF(iemOp_fxtract)
9068{
9069 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
9070 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
9071}
9072
9073
9074/** Opcode 0xd9 0xf5. */
9075FNIEMOP_DEF(iemOp_fprem1)
9076{
9077 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
9078 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
9079}
9080
9081
9082/** Opcode 0xd9 0xf6. */
9083FNIEMOP_DEF(iemOp_fdecstp)
9084{
9085 IEMOP_MNEMONIC(fdecstp, "fdecstp");
9086 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
9087 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
9088 * FINCSTP and FDECSTP. */
9089 IEM_MC_BEGIN(0,0);
9090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9091
9092 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9093 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9094
9095 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9096 IEM_MC_FPU_STACK_DEC_TOP();
9097 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
9098
9099 IEM_MC_ADVANCE_RIP_AND_FINISH();
9100 IEM_MC_END();
9101}
9102
9103
9104/** Opcode 0xd9 0xf7. */
9105FNIEMOP_DEF(iemOp_fincstp)
9106{
9107 IEMOP_MNEMONIC(fincstp, "fincstp");
9108 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
9109 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
9110 * FINCSTP and FDECSTP. */
9111 IEM_MC_BEGIN(0,0);
9112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9113
9114 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9115 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9116
9117 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9118 IEM_MC_FPU_STACK_INC_TOP();
9119 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
9120
9121 IEM_MC_ADVANCE_RIP_AND_FINISH();
9122 IEM_MC_END();
9123}
9124
9125
9126/** Opcode 0xd9 0xf8. */
9127FNIEMOP_DEF(iemOp_fprem)
9128{
9129 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
9130 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
9131}
9132
9133
9134/** Opcode 0xd9 0xf9. */
9135FNIEMOP_DEF(iemOp_fyl2xp1)
9136{
9137 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
9138 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
9139}
9140
9141
9142/** Opcode 0xd9 0xfa. */
9143FNIEMOP_DEF(iemOp_fsqrt)
9144{
9145 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
9146 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
9147}
9148
9149
9150/** Opcode 0xd9 0xfb. */
9151FNIEMOP_DEF(iemOp_fsincos)
9152{
9153 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
9154 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
9155}
9156
9157
9158/** Opcode 0xd9 0xfc. */
9159FNIEMOP_DEF(iemOp_frndint)
9160{
9161 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
9162 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
9163}
9164
9165
9166/** Opcode 0xd9 0xfd. */
9167FNIEMOP_DEF(iemOp_fscale)
9168{
9169 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
9170 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
9171}
9172
9173
9174/** Opcode 0xd9 0xfe. */
9175FNIEMOP_DEF(iemOp_fsin)
9176{
9177 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
9178 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
9179}
9180
9181
9182/** Opcode 0xd9 0xff. */
9183FNIEMOP_DEF(iemOp_fcos)
9184{
9185 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
9186 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
9187}
9188
9189
9190/** Used by iemOp_EscF1. */
9191IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
9192{
9193 /* 0xe0 */ iemOp_fchs,
9194 /* 0xe1 */ iemOp_fabs,
9195 /* 0xe2 */ iemOp_Invalid,
9196 /* 0xe3 */ iemOp_Invalid,
9197 /* 0xe4 */ iemOp_ftst,
9198 /* 0xe5 */ iemOp_fxam,
9199 /* 0xe6 */ iemOp_Invalid,
9200 /* 0xe7 */ iemOp_Invalid,
9201 /* 0xe8 */ iemOp_fld1,
9202 /* 0xe9 */ iemOp_fldl2t,
9203 /* 0xea */ iemOp_fldl2e,
9204 /* 0xeb */ iemOp_fldpi,
9205 /* 0xec */ iemOp_fldlg2,
9206 /* 0xed */ iemOp_fldln2,
9207 /* 0xee */ iemOp_fldz,
9208 /* 0xef */ iemOp_Invalid,
9209 /* 0xf0 */ iemOp_f2xm1,
9210 /* 0xf1 */ iemOp_fyl2x,
9211 /* 0xf2 */ iemOp_fptan,
9212 /* 0xf3 */ iemOp_fpatan,
9213 /* 0xf4 */ iemOp_fxtract,
9214 /* 0xf5 */ iemOp_fprem1,
9215 /* 0xf6 */ iemOp_fdecstp,
9216 /* 0xf7 */ iemOp_fincstp,
9217 /* 0xf8 */ iemOp_fprem,
9218 /* 0xf9 */ iemOp_fyl2xp1,
9219 /* 0xfa */ iemOp_fsqrt,
9220 /* 0xfb */ iemOp_fsincos,
9221 /* 0xfc */ iemOp_frndint,
9222 /* 0xfd */ iemOp_fscale,
9223 /* 0xfe */ iemOp_fsin,
9224 /* 0xff */ iemOp_fcos
9225};
9226
9227
9228/**
9229 * @opcode 0xd9
9230 */
9231FNIEMOP_DEF(iemOp_EscF1)
9232{
9233 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9234 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
9235
9236 if (IEM_IS_MODRM_REG_MODE(bRm))
9237 {
9238 switch (IEM_GET_MODRM_REG_8(bRm))
9239 {
9240 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
9241 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
9242 case 2:
9243 if (bRm == 0xd0)
9244 return FNIEMOP_CALL(iemOp_fnop);
9245 IEMOP_RAISE_INVALID_OPCODE_RET();
9246 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
9247 case 4:
9248 case 5:
9249 case 6:
9250 case 7:
9251 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
9252 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
9253 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9254 }
9255 }
9256 else
9257 {
9258 switch (IEM_GET_MODRM_REG_8(bRm))
9259 {
9260 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
9261 case 1: IEMOP_RAISE_INVALID_OPCODE_RET();
9262 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
9263 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
9264 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
9265 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
9266 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
9267 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
9268 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9269 }
9270 }
9271}
9272
9273
9274/** Opcode 0xda 11/0. */
9275FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
9276{
9277 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
9278 IEM_MC_BEGIN(0, 1);
9279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9280 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9281
9282 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9283 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9284
9285 IEM_MC_PREPARE_FPU_USAGE();
9286 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9287 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9288 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9289 } IEM_MC_ENDIF();
9290 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9291 } IEM_MC_ELSE() {
9292 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9293 } IEM_MC_ENDIF();
9294 IEM_MC_ADVANCE_RIP_AND_FINISH();
9295
9296 IEM_MC_END();
9297}
9298
9299
9300/** Opcode 0xda 11/1. */
9301FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
9302{
9303 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
9304 IEM_MC_BEGIN(0, 1);
9305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9306 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9307
9308 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9309 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9310
9311 IEM_MC_PREPARE_FPU_USAGE();
9312 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9313 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9314 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9315 } IEM_MC_ENDIF();
9316 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9317 } IEM_MC_ELSE() {
9318 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9319 } IEM_MC_ENDIF();
9320 IEM_MC_ADVANCE_RIP_AND_FINISH();
9321
9322 IEM_MC_END();
9323}
9324
9325
9326/** Opcode 0xda 11/2. */
9327FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
9328{
9329 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
9330 IEM_MC_BEGIN(0, 1);
9331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9332 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9333
9334 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9335 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9336
9337 IEM_MC_PREPARE_FPU_USAGE();
9338 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9339 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9340 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9341 } IEM_MC_ENDIF();
9342 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9343 } IEM_MC_ELSE() {
9344 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9345 } IEM_MC_ENDIF();
9346 IEM_MC_ADVANCE_RIP_AND_FINISH();
9347
9348 IEM_MC_END();
9349}
9350
9351
9352/** Opcode 0xda 11/3. */
9353FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
9354{
9355 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
9356 IEM_MC_BEGIN(0, 1);
9357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9358 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9359
9360 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9361 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9362
9363 IEM_MC_PREPARE_FPU_USAGE();
9364 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9365 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9366 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9367 } IEM_MC_ENDIF();
9368 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9369 } IEM_MC_ELSE() {
9370 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9371 } IEM_MC_ENDIF();
9372 IEM_MC_ADVANCE_RIP_AND_FINISH();
9373
9374 IEM_MC_END();
9375}
9376
9377
9378/**
9379 * Common worker for FPU instructions working on ST0 and ST1, only affecting
9380 * flags, and popping twice when done.
9381 *
9382 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9383 */
9384FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
9385{
9386 IEM_MC_BEGIN(3, 1);
9387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9388 IEM_MC_LOCAL(uint16_t, u16Fsw);
9389 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9390 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9391 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9392
9393 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9394 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9395
9396 IEM_MC_PREPARE_FPU_USAGE();
9397 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1) {
9398 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
9399 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9400 } IEM_MC_ELSE() {
9401 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP(pVCpu->iem.s.uFpuOpcode);
9402 } IEM_MC_ENDIF();
9403 IEM_MC_ADVANCE_RIP_AND_FINISH();
9404
9405 IEM_MC_END();
9406}
9407
9408
9409/** Opcode 0xda 0xe9. */
9410FNIEMOP_DEF(iemOp_fucompp)
9411{
9412 IEMOP_MNEMONIC(fucompp, "fucompp");
9413 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fucom_r80_by_r80);
9414}
9415
9416
9417/**
9418 * Common worker for FPU instructions working on ST0 and an m32i, and storing
9419 * the result in ST0.
9420 *
9421 * @param bRm Mod R/M byte.
9422 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9423 */
9424FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
9425{
9426 IEM_MC_BEGIN(3, 3);
9427 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9428 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9429 IEM_MC_LOCAL(int32_t, i32Val2);
9430 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9431 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9432 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
9433
9434 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9436
9437 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9438 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9439 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9440
9441 IEM_MC_PREPARE_FPU_USAGE();
9442 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9443 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
9444 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9445 } IEM_MC_ELSE() {
9446 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9447 } IEM_MC_ENDIF();
9448 IEM_MC_ADVANCE_RIP_AND_FINISH();
9449
9450 IEM_MC_END();
9451}
9452
9453
9454/** Opcode 0xda !11/0. */
9455FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
9456{
9457 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
9458 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
9459}
9460
9461
9462/** Opcode 0xda !11/1. */
9463FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
9464{
9465 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
9466 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
9467}
9468
9469
9470/** Opcode 0xda !11/2. */
9471FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
9472{
9473 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
9474
9475 IEM_MC_BEGIN(3, 3);
9476 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9477 IEM_MC_LOCAL(uint16_t, u16Fsw);
9478 IEM_MC_LOCAL(int32_t, i32Val2);
9479 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9480 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9481 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
9482
9483 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9485
9486 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9487 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9488 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9489
9490 IEM_MC_PREPARE_FPU_USAGE();
9491 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9492 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
9493 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9494 } IEM_MC_ELSE() {
9495 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9496 } IEM_MC_ENDIF();
9497 IEM_MC_ADVANCE_RIP_AND_FINISH();
9498
9499 IEM_MC_END();
9500}
9501
9502
9503/** Opcode 0xda !11/3. */
9504FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
9505{
9506 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
9507
9508 IEM_MC_BEGIN(3, 3);
9509 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9510 IEM_MC_LOCAL(uint16_t, u16Fsw);
9511 IEM_MC_LOCAL(int32_t, i32Val2);
9512 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9513 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9514 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
9515
9516 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9518
9519 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9520 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9521 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9522
9523 IEM_MC_PREPARE_FPU_USAGE();
9524 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9525 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
9526 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9527 } IEM_MC_ELSE() {
9528 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9529 } IEM_MC_ENDIF();
9530 IEM_MC_ADVANCE_RIP_AND_FINISH();
9531
9532 IEM_MC_END();
9533}
9534
9535
9536/** Opcode 0xda !11/4. */
9537FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
9538{
9539 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
9540 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
9541}
9542
9543
9544/** Opcode 0xda !11/5. */
9545FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
9546{
9547 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
9548 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
9549}
9550
9551
9552/** Opcode 0xda !11/6. */
9553FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
9554{
9555 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
9556 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
9557}
9558
9559
9560/** Opcode 0xda !11/7. */
9561FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
9562{
9563 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
9564 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
9565}
9566
9567
9568/**
9569 * @opcode 0xda
9570 */
9571FNIEMOP_DEF(iemOp_EscF2)
9572{
9573 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9574 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
9575 if (IEM_IS_MODRM_REG_MODE(bRm))
9576 {
9577 switch (IEM_GET_MODRM_REG_8(bRm))
9578 {
9579 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
9580 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
9581 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
9582 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
9583 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
9584 case 5:
9585 if (bRm == 0xe9)
9586 return FNIEMOP_CALL(iemOp_fucompp);
9587 IEMOP_RAISE_INVALID_OPCODE_RET();
9588 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9589 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
9590 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9591 }
9592 }
9593 else
9594 {
9595 switch (IEM_GET_MODRM_REG_8(bRm))
9596 {
9597 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
9598 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
9599 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
9600 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
9601 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
9602 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
9603 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
9604 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
9605 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9606 }
9607 }
9608}
9609
9610
9611/** Opcode 0xdb !11/0. */
9612FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
9613{
9614 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
9615
9616 IEM_MC_BEGIN(2, 3);
9617 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9618 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9619 IEM_MC_LOCAL(int32_t, i32Val);
9620 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9621 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
9622
9623 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9625
9626 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9627 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9628 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9629
9630 IEM_MC_PREPARE_FPU_USAGE();
9631 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
9632 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i32, pFpuRes, pi32Val);
9633 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9634 } IEM_MC_ELSE() {
9635 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9636 } IEM_MC_ENDIF();
9637 IEM_MC_ADVANCE_RIP_AND_FINISH();
9638
9639 IEM_MC_END();
9640}
9641
9642
9643/** Opcode 0xdb !11/1. */
9644FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
9645{
9646 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
9647 IEM_MC_BEGIN(3, 2);
9648 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9649 IEM_MC_LOCAL(uint16_t, u16Fsw);
9650 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9651 IEM_MC_ARG(int32_t *, pi32Dst, 1);
9652 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9653
9654 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9656 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9657 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9658
9659 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9660 IEM_MC_PREPARE_FPU_USAGE();
9661 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9662 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
9663 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9664 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9665 } IEM_MC_ELSE() {
9666 IEM_MC_IF_FCW_IM() {
9667 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
9668 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
9669 } IEM_MC_ENDIF();
9670 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9671 } IEM_MC_ENDIF();
9672 IEM_MC_ADVANCE_RIP_AND_FINISH();
9673
9674 IEM_MC_END();
9675}
9676
9677
9678/** Opcode 0xdb !11/2. */
9679FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
9680{
9681 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
9682 IEM_MC_BEGIN(3, 2);
9683 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9684 IEM_MC_LOCAL(uint16_t, u16Fsw);
9685 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9686 IEM_MC_ARG(int32_t *, pi32Dst, 1);
9687 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9688
9689 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9691 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9692 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9693
9694 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9695 IEM_MC_PREPARE_FPU_USAGE();
9696 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9697 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
9698 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9699 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9700 } IEM_MC_ELSE() {
9701 IEM_MC_IF_FCW_IM() {
9702 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
9703 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
9704 } IEM_MC_ENDIF();
9705 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9706 } IEM_MC_ENDIF();
9707 IEM_MC_ADVANCE_RIP_AND_FINISH();
9708
9709 IEM_MC_END();
9710}
9711
9712
9713/** Opcode 0xdb !11/3. */
9714FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
9715{
9716 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
9717 IEM_MC_BEGIN(3, 2);
9718 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9719 IEM_MC_LOCAL(uint16_t, u16Fsw);
9720 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9721 IEM_MC_ARG(int32_t *, pi32Dst, 1);
9722 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9723
9724 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9726 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9727 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9728
9729 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9730 IEM_MC_PREPARE_FPU_USAGE();
9731 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9732 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
9733 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9734 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9735 } IEM_MC_ELSE() {
9736 IEM_MC_IF_FCW_IM() {
9737 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
9738 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
9739 } IEM_MC_ENDIF();
9740 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9741 } IEM_MC_ENDIF();
9742 IEM_MC_ADVANCE_RIP_AND_FINISH();
9743
9744 IEM_MC_END();
9745}
9746
9747
9748/** Opcode 0xdb !11/5. */
9749FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
9750{
9751 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
9752
9753 IEM_MC_BEGIN(2, 3);
9754 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9755 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9756 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
9757 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9758 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
9759
9760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9762
9763 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9764 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9765 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9766
9767 IEM_MC_PREPARE_FPU_USAGE();
9768 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
9769 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
9770 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9771 } IEM_MC_ELSE() {
9772 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9773 } IEM_MC_ENDIF();
9774 IEM_MC_ADVANCE_RIP_AND_FINISH();
9775
9776 IEM_MC_END();
9777}
9778
9779
9780/** Opcode 0xdb !11/7. */
9781FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
9782{
9783 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
9784 IEM_MC_BEGIN(3, 2);
9785 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9786 IEM_MC_LOCAL(uint16_t, u16Fsw);
9787 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9788 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
9789 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9790
9791 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9793 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9794 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9795
9796 IEM_MC_MEM_MAP_EX(pr80Dst, IEM_ACCESS_DATA_W, sizeof(*pr80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
9797 IEM_MC_PREPARE_FPU_USAGE();
9798 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9799 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
9800 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
9801 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9802 } IEM_MC_ELSE() {
9803 IEM_MC_IF_FCW_IM() {
9804 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
9805 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
9806 } IEM_MC_ENDIF();
9807 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9808 } IEM_MC_ENDIF();
9809 IEM_MC_ADVANCE_RIP_AND_FINISH();
9810
9811 IEM_MC_END();
9812}
9813
9814
9815/** Opcode 0xdb 11/0. */
9816FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
9817{
9818 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
9819 IEM_MC_BEGIN(0, 1);
9820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9821 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9822
9823 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9824 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9825
9826 IEM_MC_PREPARE_FPU_USAGE();
9827 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9828 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF) {
9829 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9830 } IEM_MC_ENDIF();
9831 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9832 } IEM_MC_ELSE() {
9833 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9834 } IEM_MC_ENDIF();
9835 IEM_MC_ADVANCE_RIP_AND_FINISH();
9836
9837 IEM_MC_END();
9838}
9839
9840
9841/** Opcode 0xdb 11/1. */
9842FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
9843{
9844 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
9845 IEM_MC_BEGIN(0, 1);
9846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9847 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9848
9849 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9850 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9851
9852 IEM_MC_PREPARE_FPU_USAGE();
9853 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9854 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
9855 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9856 } IEM_MC_ENDIF();
9857 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9858 } IEM_MC_ELSE() {
9859 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9860 } IEM_MC_ENDIF();
9861 IEM_MC_ADVANCE_RIP_AND_FINISH();
9862
9863 IEM_MC_END();
9864}
9865
9866
9867/** Opcode 0xdb 11/2. */
9868FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
9869{
9870 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
9871 IEM_MC_BEGIN(0, 1);
9872 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9873 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9874
9875 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9876 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9877
9878 IEM_MC_PREPARE_FPU_USAGE();
9879 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9880 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9881 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9882 } IEM_MC_ENDIF();
9883 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9884 } IEM_MC_ELSE() {
9885 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9886 } IEM_MC_ENDIF();
9887 IEM_MC_ADVANCE_RIP_AND_FINISH();
9888
9889 IEM_MC_END();
9890}
9891
9892
9893/** Opcode 0xdb 11/3. */
9894FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
9895{
9896 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
9897 IEM_MC_BEGIN(0, 1);
9898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9899 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9900
9901 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9902 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9903
9904 IEM_MC_PREPARE_FPU_USAGE();
9905 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9906 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF) {
9907 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9908 } IEM_MC_ENDIF();
9909 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9910 } IEM_MC_ELSE() {
9911 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9912 } IEM_MC_ENDIF();
9913 IEM_MC_ADVANCE_RIP_AND_FINISH();
9914
9915 IEM_MC_END();
9916}
9917
9918
9919/** Opcode 0xdb 0xe0. */
9920FNIEMOP_DEF(iemOp_fneni)
9921{
9922 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
9923 IEM_MC_BEGIN(0,0);
9924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9925 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9926 IEM_MC_ADVANCE_RIP_AND_FINISH();
9927 IEM_MC_END();
9928}
9929
9930
9931/** Opcode 0xdb 0xe1. */
9932FNIEMOP_DEF(iemOp_fndisi)
9933{
9934 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
9935 IEM_MC_BEGIN(0,0);
9936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9937 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9938 IEM_MC_ADVANCE_RIP_AND_FINISH();
9939 IEM_MC_END();
9940}
9941
9942
9943/** Opcode 0xdb 0xe2. */
9944FNIEMOP_DEF(iemOp_fnclex)
9945{
9946 IEMOP_MNEMONIC(fnclex, "fnclex");
9947 IEM_MC_BEGIN(0,0);
9948 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9949 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9950 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9951 IEM_MC_CLEAR_FSW_EX();
9952 IEM_MC_ADVANCE_RIP_AND_FINISH();
9953 IEM_MC_END();
9954}
9955
9956
9957/** Opcode 0xdb 0xe3. */
9958FNIEMOP_DEF(iemOp_fninit)
9959{
9960 IEMOP_MNEMONIC(fninit, "fninit");
9961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9962 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_FPU, iemCImpl_finit, false /*fCheckXcpts*/);
9963}
9964
9965
9966/** Opcode 0xdb 0xe4. */
9967FNIEMOP_DEF(iemOp_fnsetpm)
9968{
9969 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
9970 IEM_MC_BEGIN(0,0);
9971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9972 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9973 IEM_MC_ADVANCE_RIP_AND_FINISH();
9974 IEM_MC_END();
9975}
9976
9977
9978/** Opcode 0xdb 0xe5. */
9979FNIEMOP_DEF(iemOp_frstpm)
9980{
9981 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
9982#if 0 /* #UDs on newer CPUs */
9983 IEM_MC_BEGIN(0,0);
9984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9985 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9986 IEM_MC_ADVANCE_RIP_AND_FINISH();
9987 IEM_MC_END();
9988 return VINF_SUCCESS;
9989#else
9990 IEMOP_RAISE_INVALID_OPCODE_RET();
9991#endif
9992}
9993
9994
9995/** Opcode 0xdb 11/5. */
9996FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
9997{
9998 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
9999 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
10000 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fucomi_r80_by_r80,
10001 0 /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
10002}
10003
10004
10005/** Opcode 0xdb 11/6. */
10006FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
10007{
10008 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
10009 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
10010 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80,
10011 false /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
10012}
10013
10014
10015/**
10016 * @opcode 0xdb
10017 */
10018FNIEMOP_DEF(iemOp_EscF3)
10019{
10020 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10021 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
10022 if (IEM_IS_MODRM_REG_MODE(bRm))
10023 {
10024 switch (IEM_GET_MODRM_REG_8(bRm))
10025 {
10026 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
10027 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
10028 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
10029 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
10030 case 4:
10031 switch (bRm)
10032 {
10033 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
10034 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
10035 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
10036 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
10037 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
10038 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
10039 case 0xe6: IEMOP_RAISE_INVALID_OPCODE_RET();
10040 case 0xe7: IEMOP_RAISE_INVALID_OPCODE_RET();
10041 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10042 }
10043 break;
10044 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
10045 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
10046 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
10047 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10048 }
10049 }
10050 else
10051 {
10052 switch (IEM_GET_MODRM_REG_8(bRm))
10053 {
10054 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
10055 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
10056 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
10057 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
10058 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
10059 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
10060 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10061 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
10062 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10063 }
10064 }
10065}
10066
10067
10068/**
10069 * Common worker for FPU instructions working on STn and ST0, and storing the
10070 * result in STn unless IE, DE or ZE was raised.
10071 *
10072 * @param bRm Mod R/M byte.
10073 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10074 */
10075FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
10076{
10077 IEM_MC_BEGIN(3, 1);
10078 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10079 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10080 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10081 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10082 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10083
10084 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10085 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10086
10087 IEM_MC_PREPARE_FPU_USAGE();
10088 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
10089 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
10090 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10091 } IEM_MC_ELSE() {
10092 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10093 } IEM_MC_ENDIF();
10094 IEM_MC_ADVANCE_RIP_AND_FINISH();
10095
10096 IEM_MC_END();
10097}
10098
10099
10100/** Opcode 0xdc 11/0. */
10101FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
10102{
10103 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
10104 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
10105}
10106
10107
10108/** Opcode 0xdc 11/1. */
10109FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
10110{
10111 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
10112 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
10113}
10114
10115
10116/** Opcode 0xdc 11/4. */
10117FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
10118{
10119 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
10120 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
10121}
10122
10123
10124/** Opcode 0xdc 11/5. */
10125FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
10126{
10127 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
10128 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
10129}
10130
10131
10132/** Opcode 0xdc 11/6. */
10133FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
10134{
10135 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
10136 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
10137}
10138
10139
10140/** Opcode 0xdc 11/7. */
10141FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
10142{
10143 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
10144 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
10145}
10146
10147
10148/**
10149 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
10150 * memory operand, and storing the result in ST0.
10151 *
10152 * @param bRm Mod R/M byte.
10153 * @param pfnImpl Pointer to the instruction implementation (assembly).
10154 */
10155FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
10156{
10157 IEM_MC_BEGIN(3, 3);
10158 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10159 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10160 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
10161 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10162 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
10163 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
10164
10165 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10167 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10168 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10169
10170 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10171 IEM_MC_PREPARE_FPU_USAGE();
10172 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0) {
10173 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
10174 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10175 } IEM_MC_ELSE() {
10176 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10177 } IEM_MC_ENDIF();
10178 IEM_MC_ADVANCE_RIP_AND_FINISH();
10179
10180 IEM_MC_END();
10181}
10182
10183
10184/** Opcode 0xdc !11/0. */
10185FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
10186{
10187 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
10188 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
10189}
10190
10191
10192/** Opcode 0xdc !11/1. */
10193FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
10194{
10195 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
10196 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
10197}
10198
10199
10200/** Opcode 0xdc !11/2. */
10201FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
10202{
10203 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
10204
10205 IEM_MC_BEGIN(3, 3);
10206 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10207 IEM_MC_LOCAL(uint16_t, u16Fsw);
10208 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
10209 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10210 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10211 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
10212
10213 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10215
10216 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10217 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10218 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10219
10220 IEM_MC_PREPARE_FPU_USAGE();
10221 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10222 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
10223 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10224 } IEM_MC_ELSE() {
10225 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10226 } IEM_MC_ENDIF();
10227 IEM_MC_ADVANCE_RIP_AND_FINISH();
10228
10229 IEM_MC_END();
10230}
10231
10232
10233/** Opcode 0xdc !11/3. */
10234FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
10235{
10236 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
10237
10238 IEM_MC_BEGIN(3, 3);
10239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10240 IEM_MC_LOCAL(uint16_t, u16Fsw);
10241 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
10242 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10243 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10244 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
10245
10246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10248
10249 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10250 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10251 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10252
10253 IEM_MC_PREPARE_FPU_USAGE();
10254 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10255 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
10256 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10257 } IEM_MC_ELSE() {
10258 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10259 } IEM_MC_ENDIF();
10260 IEM_MC_ADVANCE_RIP_AND_FINISH();
10261
10262 IEM_MC_END();
10263}
10264
10265
10266/** Opcode 0xdc !11/4. */
10267FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
10268{
10269 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
10270 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
10271}
10272
10273
10274/** Opcode 0xdc !11/5. */
10275FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
10276{
10277 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
10278 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
10279}
10280
10281
10282/** Opcode 0xdc !11/6. */
10283FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
10284{
10285 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
10286 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
10287}
10288
10289
10290/** Opcode 0xdc !11/7. */
10291FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
10292{
10293 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
10294 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
10295}
10296
10297
10298/**
10299 * @opcode 0xdc
10300 */
10301FNIEMOP_DEF(iemOp_EscF4)
10302{
10303 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10304 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
10305 if (IEM_IS_MODRM_REG_MODE(bRm))
10306 {
10307 switch (IEM_GET_MODRM_REG_8(bRm))
10308 {
10309 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
10310 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
10311 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
10312 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
10313 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
10314 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
10315 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
10316 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
10317 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10318 }
10319 }
10320 else
10321 {
10322 switch (IEM_GET_MODRM_REG_8(bRm))
10323 {
10324 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
10325 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
10326 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
10327 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
10328 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
10329 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
10330 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
10331 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
10332 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10333 }
10334 }
10335}
10336
10337
10338/** Opcode 0xdd !11/0.
10339 * @sa iemOp_fld_m32r */
10340FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
10341{
10342 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
10343
10344 IEM_MC_BEGIN(2, 3);
10345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10346 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10347 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
10348 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10349 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
10350
10351 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10353 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10354 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10355
10356 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10357 IEM_MC_PREPARE_FPU_USAGE();
10358 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10359 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r64, pFpuRes, pr64Val);
10360 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10361 } IEM_MC_ELSE() {
10362 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10363 } IEM_MC_ENDIF();
10364 IEM_MC_ADVANCE_RIP_AND_FINISH();
10365
10366 IEM_MC_END();
10367}
10368
10369
10370/** Opcode 0xdd !11/0. */
10371FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
10372{
10373 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
10374 IEM_MC_BEGIN(3, 2);
10375 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10376 IEM_MC_LOCAL(uint16_t, u16Fsw);
10377 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10378 IEM_MC_ARG(int64_t *, pi64Dst, 1);
10379 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10380
10381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10382 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10383 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10384 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10385
10386 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10387 IEM_MC_PREPARE_FPU_USAGE();
10388 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10389 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
10390 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10391 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10392 } IEM_MC_ELSE() {
10393 IEM_MC_IF_FCW_IM() {
10394 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
10395 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
10396 } IEM_MC_ENDIF();
10397 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10398 } IEM_MC_ENDIF();
10399 IEM_MC_ADVANCE_RIP_AND_FINISH();
10400
10401 IEM_MC_END();
10402}
10403
10404
10405/** Opcode 0xdd !11/0. */
10406FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
10407{
10408 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
10409 IEM_MC_BEGIN(3, 2);
10410 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10411 IEM_MC_LOCAL(uint16_t, u16Fsw);
10412 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10413 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
10414 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10415
10416 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10418 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10419 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10420
10421 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10422 IEM_MC_PREPARE_FPU_USAGE();
10423 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10424 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
10425 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10426 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10427 } IEM_MC_ELSE() {
10428 IEM_MC_IF_FCW_IM() {
10429 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
10430 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
10431 } IEM_MC_ENDIF();
10432 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10433 } IEM_MC_ENDIF();
10434 IEM_MC_ADVANCE_RIP_AND_FINISH();
10435
10436 IEM_MC_END();
10437}
10438
10439
10440
10441
10442/** Opcode 0xdd !11/0. */
10443FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
10444{
10445 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
10446 IEM_MC_BEGIN(3, 2);
10447 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10448 IEM_MC_LOCAL(uint16_t, u16Fsw);
10449 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10450 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
10451 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10452
10453 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10455 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10456 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10457
10458 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10459 IEM_MC_PREPARE_FPU_USAGE();
10460 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10461 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
10462 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10463 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10464 } IEM_MC_ELSE() {
10465 IEM_MC_IF_FCW_IM() {
10466 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
10467 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
10468 } IEM_MC_ENDIF();
10469 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10470 } IEM_MC_ENDIF();
10471 IEM_MC_ADVANCE_RIP_AND_FINISH();
10472
10473 IEM_MC_END();
10474}
10475
10476
10477/** Opcode 0xdd !11/0. */
10478FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
10479{
10480 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
10481 IEM_MC_BEGIN(3, 0);
10482 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
10483 IEM_MC_ARG(uint8_t, iEffSeg, 1);
10484 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
10485 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10487 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10488 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10489 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
10490 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
10491 IEM_MC_END();
10492}
10493
10494
10495/** Opcode 0xdd !11/0. */
10496FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
10497{
10498 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
10499 IEM_MC_BEGIN(3, 0);
10500 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
10501 IEM_MC_ARG(uint8_t, iEffSeg, 1);
10502 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
10503 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10505 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10506 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); /* Note! Implicit fninit after the save, do not use FOR_READ here! */
10507 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
10508 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
10509 IEM_MC_END();
10510}
10511
10512/** Opcode 0xdd !11/0. */
10513FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
10514{
10515 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
10516
10517 IEM_MC_BEGIN(0, 2);
10518 IEM_MC_LOCAL(uint16_t, u16Tmp);
10519 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10520
10521 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10523 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10524
10525 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
10526 IEM_MC_FETCH_FSW(u16Tmp);
10527 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
10528 IEM_MC_ADVANCE_RIP_AND_FINISH();
10529
10530/** @todo Debug / drop a hint to the verifier that things may differ
10531 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
10532 * NT4SP1. (X86_FSW_PE) */
10533 IEM_MC_END();
10534}
10535
10536
10537/** Opcode 0xdd 11/0. */
10538FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
10539{
10540 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
10541 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
10542 unmodified. */
10543 IEM_MC_BEGIN(0, 0);
10544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10545
10546 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10547 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10548
10549 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10550 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
10551 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10552
10553 IEM_MC_ADVANCE_RIP_AND_FINISH();
10554 IEM_MC_END();
10555}
10556
10557
10558/** Opcode 0xdd 11/1. */
10559FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
10560{
10561 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
10562 IEM_MC_BEGIN(0, 2);
10563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10564 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
10565 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10566 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10567 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10568
10569 IEM_MC_PREPARE_FPU_USAGE();
10570 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10571 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
10572 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10573 } IEM_MC_ELSE() {
10574 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10575 } IEM_MC_ENDIF();
10576
10577 IEM_MC_ADVANCE_RIP_AND_FINISH();
10578 IEM_MC_END();
10579}
10580
10581
10582/** Opcode 0xdd 11/3. */
10583FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
10584{
10585 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
10586 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
10587}
10588
10589
10590/** Opcode 0xdd 11/4. */
10591FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
10592{
10593 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
10594 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
10595}
10596
10597
10598/**
10599 * @opcode 0xdd
10600 */
10601FNIEMOP_DEF(iemOp_EscF5)
10602{
10603 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10604 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
10605 if (IEM_IS_MODRM_REG_MODE(bRm))
10606 {
10607 switch (IEM_GET_MODRM_REG_8(bRm))
10608 {
10609 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
10610 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
10611 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
10612 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
10613 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
10614 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
10615 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10616 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
10617 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10618 }
10619 }
10620 else
10621 {
10622 switch (IEM_GET_MODRM_REG_8(bRm))
10623 {
10624 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
10625 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
10626 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
10627 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
10628 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
10629 case 5: IEMOP_RAISE_INVALID_OPCODE_RET();
10630 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
10631 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
10632 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10633 }
10634 }
10635}
10636
10637
10638/** Opcode 0xde 11/0. */
10639FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
10640{
10641 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
10642 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
10643}
10644
10645
10646/** Opcode 0xde 11/0. */
10647FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
10648{
10649 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
10650 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
10651}
10652
10653
10654/** Opcode 0xde 0xd9. */
10655FNIEMOP_DEF(iemOp_fcompp)
10656{
10657 IEMOP_MNEMONIC(fcompp, "fcompp");
10658 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fcom_r80_by_r80);
10659}
10660
10661
10662/** Opcode 0xde 11/4. */
10663FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
10664{
10665 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
10666 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
10667}
10668
10669
10670/** Opcode 0xde 11/5. */
10671FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
10672{
10673 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
10674 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
10675}
10676
10677
10678/** Opcode 0xde 11/6. */
10679FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
10680{
10681 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
10682 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
10683}
10684
10685
10686/** Opcode 0xde 11/7. */
10687FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
10688{
10689 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
10690 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
10691}
10692
10693
10694/**
10695 * Common worker for FPU instructions working on ST0 and an m16i, and storing
10696 * the result in ST0.
10697 *
10698 * @param bRm Mod R/M byte.
10699 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10700 */
10701FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
10702{
10703 IEM_MC_BEGIN(3, 3);
10704 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10705 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10706 IEM_MC_LOCAL(int16_t, i16Val2);
10707 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10708 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10709 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
10710
10711 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10713
10714 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10715 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10716 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10717
10718 IEM_MC_PREPARE_FPU_USAGE();
10719 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10720 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
10721 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10722 } IEM_MC_ELSE() {
10723 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10724 } IEM_MC_ENDIF();
10725 IEM_MC_ADVANCE_RIP_AND_FINISH();
10726
10727 IEM_MC_END();
10728}
10729
10730
10731/** Opcode 0xde !11/0. */
10732FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
10733{
10734 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
10735 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
10736}
10737
10738
10739/** Opcode 0xde !11/1. */
10740FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
10741{
10742 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
10743 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
10744}
10745
10746
10747/** Opcode 0xde !11/2. */
10748FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
10749{
10750 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
10751
10752 IEM_MC_BEGIN(3, 3);
10753 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10754 IEM_MC_LOCAL(uint16_t, u16Fsw);
10755 IEM_MC_LOCAL(int16_t, i16Val2);
10756 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10757 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10758 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
10759
10760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10762
10763 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10764 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10765 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10766
10767 IEM_MC_PREPARE_FPU_USAGE();
10768 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10769 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
10770 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10771 } IEM_MC_ELSE() {
10772 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10773 } IEM_MC_ENDIF();
10774 IEM_MC_ADVANCE_RIP_AND_FINISH();
10775
10776 IEM_MC_END();
10777}
10778
10779
10780/** Opcode 0xde !11/3. */
10781FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
10782{
10783 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
10784
10785 IEM_MC_BEGIN(3, 3);
10786 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10787 IEM_MC_LOCAL(uint16_t, u16Fsw);
10788 IEM_MC_LOCAL(int16_t, i16Val2);
10789 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10790 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10791 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
10792
10793 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10795
10796 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10797 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10798 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10799
10800 IEM_MC_PREPARE_FPU_USAGE();
10801 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10802 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
10803 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10804 } IEM_MC_ELSE() {
10805 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10806 } IEM_MC_ENDIF();
10807 IEM_MC_ADVANCE_RIP_AND_FINISH();
10808
10809 IEM_MC_END();
10810}
10811
10812
10813/** Opcode 0xde !11/4. */
10814FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
10815{
10816 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
10817 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
10818}
10819
10820
10821/** Opcode 0xde !11/5. */
10822FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
10823{
10824 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
10825 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
10826}
10827
10828
10829/** Opcode 0xde !11/6. */
10830FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
10831{
10832 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
10833 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
10834}
10835
10836
10837/** Opcode 0xde !11/7. */
10838FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
10839{
10840 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
10841 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
10842}
10843
10844
10845/**
10846 * @opcode 0xde
10847 */
10848FNIEMOP_DEF(iemOp_EscF6)
10849{
10850 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10851 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
10852 if (IEM_IS_MODRM_REG_MODE(bRm))
10853 {
10854 switch (IEM_GET_MODRM_REG_8(bRm))
10855 {
10856 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
10857 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
10858 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
10859 case 3: if (bRm == 0xd9)
10860 return FNIEMOP_CALL(iemOp_fcompp);
10861 IEMOP_RAISE_INVALID_OPCODE_RET();
10862 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
10863 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
10864 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
10865 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
10866 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10867 }
10868 }
10869 else
10870 {
10871 switch (IEM_GET_MODRM_REG_8(bRm))
10872 {
10873 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
10874 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
10875 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
10876 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
10877 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
10878 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
10879 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
10880 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
10881 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10882 }
10883 }
10884}
10885
10886
10887/** Opcode 0xdf 11/0.
10888 * Undocument instruction, assumed to work like ffree + fincstp. */
10889FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
10890{
10891 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
10892 IEM_MC_BEGIN(0, 0);
10893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10894
10895 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10896 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10897
10898 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10899 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
10900 IEM_MC_FPU_STACK_INC_TOP();
10901 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10902
10903 IEM_MC_ADVANCE_RIP_AND_FINISH();
10904 IEM_MC_END();
10905}
10906
10907
10908/** Opcode 0xdf 0xe0. */
10909FNIEMOP_DEF(iemOp_fnstsw_ax)
10910{
10911 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
10912 IEM_MC_BEGIN(0, 1);
10913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10914 IEM_MC_LOCAL(uint16_t, u16Tmp);
10915 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10916 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
10917 IEM_MC_FETCH_FSW(u16Tmp);
10918 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
10919 IEM_MC_ADVANCE_RIP_AND_FINISH();
10920 IEM_MC_END();
10921}
10922
10923
10924/** Opcode 0xdf 11/5. */
10925FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
10926{
10927 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
10928 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
10929 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80,
10930 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
10931}
10932
10933
10934/** Opcode 0xdf 11/6. */
10935FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
10936{
10937 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
10938 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
10939 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80,
10940 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
10941}
10942
10943
10944/** Opcode 0xdf !11/0. */
10945FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
10946{
10947 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
10948
10949 IEM_MC_BEGIN(2, 3);
10950 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10951 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10952 IEM_MC_LOCAL(int16_t, i16Val);
10953 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10954 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
10955
10956 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10958
10959 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10960 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10961 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10962
10963 IEM_MC_PREPARE_FPU_USAGE();
10964 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10965 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i16, pFpuRes, pi16Val);
10966 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10967 } IEM_MC_ELSE() {
10968 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10969 } IEM_MC_ENDIF();
10970 IEM_MC_ADVANCE_RIP_AND_FINISH();
10971
10972 IEM_MC_END();
10973}
10974
10975
10976/** Opcode 0xdf !11/1. */
10977FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
10978{
10979 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
10980 IEM_MC_BEGIN(3, 2);
10981 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10982 IEM_MC_LOCAL(uint16_t, u16Fsw);
10983 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10984 IEM_MC_ARG(int16_t *, pi16Dst, 1);
10985 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10986
10987 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10989 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10990 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10991
10992 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10993 IEM_MC_PREPARE_FPU_USAGE();
10994 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10995 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
10996 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
10997 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10998 } IEM_MC_ELSE() {
10999 IEM_MC_IF_FCW_IM() {
11000 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
11001 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
11002 } IEM_MC_ENDIF();
11003 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11004 } IEM_MC_ENDIF();
11005 IEM_MC_ADVANCE_RIP_AND_FINISH();
11006
11007 IEM_MC_END();
11008}
11009
11010
11011/** Opcode 0xdf !11/2. */
11012FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
11013{
11014 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
11015 IEM_MC_BEGIN(3, 2);
11016 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11017 IEM_MC_LOCAL(uint16_t, u16Fsw);
11018 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11019 IEM_MC_ARG(int16_t *, pi16Dst, 1);
11020 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11021
11022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11024 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11025 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11026
11027 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11028 IEM_MC_PREPARE_FPU_USAGE();
11029 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11030 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
11031 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
11032 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11033 } IEM_MC_ELSE() {
11034 IEM_MC_IF_FCW_IM() {
11035 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
11036 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
11037 } IEM_MC_ENDIF();
11038 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11039 } IEM_MC_ENDIF();
11040 IEM_MC_ADVANCE_RIP_AND_FINISH();
11041
11042 IEM_MC_END();
11043}
11044
11045
11046/** Opcode 0xdf !11/3. */
11047FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
11048{
11049 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
11050 IEM_MC_BEGIN(3, 2);
11051 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11052 IEM_MC_LOCAL(uint16_t, u16Fsw);
11053 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11054 IEM_MC_ARG(int16_t *, pi16Dst, 1);
11055 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11056
11057 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11059 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11060 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11061
11062 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11063 IEM_MC_PREPARE_FPU_USAGE();
11064 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11065 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
11066 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
11067 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11068 } IEM_MC_ELSE() {
11069 IEM_MC_IF_FCW_IM() {
11070 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
11071 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
11072 } IEM_MC_ENDIF();
11073 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11074 } IEM_MC_ENDIF();
11075 IEM_MC_ADVANCE_RIP_AND_FINISH();
11076
11077 IEM_MC_END();
11078}
11079
11080
11081/** Opcode 0xdf !11/4. */
11082FNIEMOP_DEF_1(iemOp_fbld_m80d, uint8_t, bRm)
11083{
11084 IEMOP_MNEMONIC(fbld_m80d, "fbld m80d");
11085
11086 IEM_MC_BEGIN(2, 3);
11087 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11088 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11089 IEM_MC_LOCAL(RTPBCD80U, d80Val);
11090 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11091 IEM_MC_ARG_LOCAL_REF(PCRTPBCD80U, pd80Val, d80Val, 1);
11092
11093 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11095
11096 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11097 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11098 IEM_MC_FETCH_MEM_D80(d80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11099
11100 IEM_MC_PREPARE_FPU_USAGE();
11101 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11102 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_d80, pFpuRes, pd80Val);
11103 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11104 } IEM_MC_ELSE() {
11105 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11106 } IEM_MC_ENDIF();
11107 IEM_MC_ADVANCE_RIP_AND_FINISH();
11108
11109 IEM_MC_END();
11110}
11111
11112
11113/** Opcode 0xdf !11/5. */
11114FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
11115{
11116 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
11117
11118 IEM_MC_BEGIN(2, 3);
11119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11120 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11121 IEM_MC_LOCAL(int64_t, i64Val);
11122 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11123 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
11124
11125 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11126 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11127
11128 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11129 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11130 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11131
11132 IEM_MC_PREPARE_FPU_USAGE();
11133 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11134 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i64, pFpuRes, pi64Val);
11135 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11136 } IEM_MC_ELSE() {
11137 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11138 } IEM_MC_ENDIF();
11139 IEM_MC_ADVANCE_RIP_AND_FINISH();
11140
11141 IEM_MC_END();
11142}
11143
11144
11145/** Opcode 0xdf !11/6. */
11146FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
11147{
11148 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
11149 IEM_MC_BEGIN(3, 2);
11150 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11151 IEM_MC_LOCAL(uint16_t, u16Fsw);
11152 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11153 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
11154 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11155
11156 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11158 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11159 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11160
11161 IEM_MC_MEM_MAP_EX(pd80Dst, IEM_ACCESS_DATA_W, sizeof(*pd80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
11162 IEM_MC_PREPARE_FPU_USAGE();
11163 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11164 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
11165 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pd80Dst, IEM_ACCESS_DATA_W, u16Fsw);
11166 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11167 } IEM_MC_ELSE() {
11168 IEM_MC_IF_FCW_IM() {
11169 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
11170 IEM_MC_MEM_COMMIT_AND_UNMAP(pd80Dst, IEM_ACCESS_DATA_W);
11171 } IEM_MC_ENDIF();
11172 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11173 } IEM_MC_ENDIF();
11174 IEM_MC_ADVANCE_RIP_AND_FINISH();
11175
11176 IEM_MC_END();
11177}
11178
11179
11180/** Opcode 0xdf !11/7. */
11181FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
11182{
11183 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
11184 IEM_MC_BEGIN(3, 2);
11185 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11186 IEM_MC_LOCAL(uint16_t, u16Fsw);
11187 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11188 IEM_MC_ARG(int64_t *, pi64Dst, 1);
11189 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11190
11191 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11193 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11194 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11195
11196 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11197 IEM_MC_PREPARE_FPU_USAGE();
11198 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11199 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
11200 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11201 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11202 } IEM_MC_ELSE() {
11203 IEM_MC_IF_FCW_IM() {
11204 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
11205 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
11206 } IEM_MC_ENDIF();
11207 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11208 } IEM_MC_ENDIF();
11209 IEM_MC_ADVANCE_RIP_AND_FINISH();
11210
11211 IEM_MC_END();
11212}
11213
11214
11215/**
11216 * @opcode 0xdf
11217 */
11218FNIEMOP_DEF(iemOp_EscF7)
11219{
11220 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11221 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdf & 0x7);
11222 if (IEM_IS_MODRM_REG_MODE(bRm))
11223 {
11224 switch (IEM_GET_MODRM_REG_8(bRm))
11225 {
11226 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
11227 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
11228 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
11229 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
11230 case 4: if (bRm == 0xe0)
11231 return FNIEMOP_CALL(iemOp_fnstsw_ax);
11232 IEMOP_RAISE_INVALID_OPCODE_RET();
11233 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
11234 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
11235 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11236 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11237 }
11238 }
11239 else
11240 {
11241 switch (IEM_GET_MODRM_REG_8(bRm))
11242 {
11243 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
11244 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
11245 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
11246 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
11247 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
11248 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
11249 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
11250 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
11251 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11252 }
11253 }
11254}
11255
11256
11257/**
11258 * @opcode 0xe0
11259 */
11260FNIEMOP_DEF(iemOp_loopne_Jb)
11261{
11262 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
11263 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11264 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11265
11266 switch (pVCpu->iem.s.enmEffAddrMode)
11267 {
11268 case IEMMODE_16BIT:
11269 IEM_MC_BEGIN(0,0);
11270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11271 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11272 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11273 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11274 } IEM_MC_ELSE() {
11275 IEM_MC_ADVANCE_RIP_AND_FINISH();
11276 } IEM_MC_ENDIF();
11277 IEM_MC_END();
11278 break;
11279
11280 case IEMMODE_32BIT:
11281 IEM_MC_BEGIN(0,0);
11282 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11283 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11284 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11285 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11286 } IEM_MC_ELSE() {
11287 IEM_MC_ADVANCE_RIP_AND_FINISH();
11288 } IEM_MC_ENDIF();
11289 IEM_MC_END();
11290 break;
11291
11292 case IEMMODE_64BIT:
11293 IEM_MC_BEGIN(0,0);
11294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11295 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
11296 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11297 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11298 } IEM_MC_ELSE() {
11299 IEM_MC_ADVANCE_RIP_AND_FINISH();
11300 } IEM_MC_ENDIF();
11301 IEM_MC_END();
11302 break;
11303
11304 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11305 }
11306}
11307
11308
11309/**
11310 * @opcode 0xe1
11311 */
11312FNIEMOP_DEF(iemOp_loope_Jb)
11313{
11314 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
11315 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11316 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11317
11318 switch (pVCpu->iem.s.enmEffAddrMode)
11319 {
11320 case IEMMODE_16BIT:
11321 IEM_MC_BEGIN(0,0);
11322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11323 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11324 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11325 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11326 } IEM_MC_ELSE() {
11327 IEM_MC_ADVANCE_RIP_AND_FINISH();
11328 } IEM_MC_ENDIF();
11329 IEM_MC_END();
11330 break;
11331
11332 case IEMMODE_32BIT:
11333 IEM_MC_BEGIN(0,0);
11334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11335 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11336 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11337 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11338 } IEM_MC_ELSE() {
11339 IEM_MC_ADVANCE_RIP_AND_FINISH();
11340 } IEM_MC_ENDIF();
11341 IEM_MC_END();
11342 break;
11343
11344 case IEMMODE_64BIT:
11345 IEM_MC_BEGIN(0,0);
11346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11347 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
11348 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11349 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11350 } IEM_MC_ELSE() {
11351 IEM_MC_ADVANCE_RIP_AND_FINISH();
11352 } IEM_MC_ENDIF();
11353 IEM_MC_END();
11354 break;
11355
11356 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11357 }
11358}
11359
11360
11361/**
11362 * @opcode 0xe2
11363 */
11364FNIEMOP_DEF(iemOp_loop_Jb)
11365{
11366 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
11367 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11368 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11369
11370 /** @todo Check out the \#GP case if EIP < CS.Base or EIP > CS.Limit when
11371 * using the 32-bit operand size override. How can that be restarted? See
11372 * weird pseudo code in intel manual. */
11373
11374 /* NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
11375 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
11376 * the loop causes guest crashes, but when logging it's nice to skip a few million
11377 * lines of useless output. */
11378#if defined(LOG_ENABLED)
11379 if ((LogIs3Enabled() || LogIs4Enabled()) && -(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm)
11380 switch (pVCpu->iem.s.enmEffAddrMode)
11381 {
11382 case IEMMODE_16BIT:
11383 IEM_MC_BEGIN(0,0);
11384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11385 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
11386 IEM_MC_ADVANCE_RIP_AND_FINISH();
11387 IEM_MC_END();
11388 break;
11389
11390 case IEMMODE_32BIT:
11391 IEM_MC_BEGIN(0,0);
11392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11393 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
11394 IEM_MC_ADVANCE_RIP_AND_FINISH();
11395 IEM_MC_END();
11396 break;
11397
11398 case IEMMODE_64BIT:
11399 IEM_MC_BEGIN(0,0);
11400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11401 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
11402 IEM_MC_ADVANCE_RIP_AND_FINISH();
11403 IEM_MC_END();
11404 break;
11405
11406 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11407 }
11408#endif
11409
11410 switch (pVCpu->iem.s.enmEffAddrMode)
11411 {
11412 case IEMMODE_16BIT:
11413 IEM_MC_BEGIN(0,0);
11414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11415 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11416 IEM_MC_IF_CX_IS_NZ() {
11417 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11418 } IEM_MC_ELSE() {
11419 IEM_MC_ADVANCE_RIP_AND_FINISH();
11420 } IEM_MC_ENDIF();
11421 IEM_MC_END();
11422 break;
11423
11424 case IEMMODE_32BIT:
11425 IEM_MC_BEGIN(0,0);
11426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11427 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11428 IEM_MC_IF_ECX_IS_NZ() {
11429 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11430 } IEM_MC_ELSE() {
11431 IEM_MC_ADVANCE_RIP_AND_FINISH();
11432 } IEM_MC_ENDIF();
11433 IEM_MC_END();
11434 break;
11435
11436 case IEMMODE_64BIT:
11437 IEM_MC_BEGIN(0,0);
11438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11439 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
11440 IEM_MC_IF_RCX_IS_NZ() {
11441 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11442 } IEM_MC_ELSE() {
11443 IEM_MC_ADVANCE_RIP_AND_FINISH();
11444 } IEM_MC_ENDIF();
11445 IEM_MC_END();
11446 break;
11447
11448 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11449 }
11450}
11451
11452
11453/**
11454 * @opcode 0xe3
11455 */
11456FNIEMOP_DEF(iemOp_jecxz_Jb)
11457{
11458 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
11459 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11460 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11461
11462 switch (pVCpu->iem.s.enmEffAddrMode)
11463 {
11464 case IEMMODE_16BIT:
11465 IEM_MC_BEGIN(0,0);
11466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11467 IEM_MC_IF_CX_IS_NZ() {
11468 IEM_MC_ADVANCE_RIP_AND_FINISH();
11469 } IEM_MC_ELSE() {
11470 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11471 } IEM_MC_ENDIF();
11472 IEM_MC_END();
11473 break;
11474
11475 case IEMMODE_32BIT:
11476 IEM_MC_BEGIN(0,0);
11477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11478 IEM_MC_IF_ECX_IS_NZ() {
11479 IEM_MC_ADVANCE_RIP_AND_FINISH();
11480 } IEM_MC_ELSE() {
11481 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11482 } IEM_MC_ENDIF();
11483 IEM_MC_END();
11484 break;
11485
11486 case IEMMODE_64BIT:
11487 IEM_MC_BEGIN(0,0);
11488 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11489 IEM_MC_IF_RCX_IS_NZ() {
11490 IEM_MC_ADVANCE_RIP_AND_FINISH();
11491 } IEM_MC_ELSE() {
11492 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11493 } IEM_MC_ENDIF();
11494 IEM_MC_END();
11495 break;
11496
11497 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11498 }
11499}
11500
11501
11502/** Opcode 0xe4 */
11503FNIEMOP_DEF(iemOp_in_AL_Ib)
11504{
11505 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
11506 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11508 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
11509 iemCImpl_in, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
11510}
11511
11512
11513/** Opcode 0xe5 */
11514FNIEMOP_DEF(iemOp_in_eAX_Ib)
11515{
11516 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
11517 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11519 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
11520 iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
11521 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
11522}
11523
11524
11525/** Opcode 0xe6 */
11526FNIEMOP_DEF(iemOp_out_Ib_AL)
11527{
11528 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
11529 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11531 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
11532 iemCImpl_out, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
11533}
11534
11535
11536/** Opcode 0xe7 */
11537FNIEMOP_DEF(iemOp_out_Ib_eAX)
11538{
11539 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
11540 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11542 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
11543 iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
11544 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
11545}
11546
11547
11548/**
11549 * @opcode 0xe8
11550 */
11551FNIEMOP_DEF(iemOp_call_Jv)
11552{
11553 IEMOP_MNEMONIC(call_Jv, "call Jv");
11554 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
11555 switch (pVCpu->iem.s.enmEffOpSize)
11556 {
11557 case IEMMODE_16BIT:
11558 {
11559 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
11560 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE, iemCImpl_call_rel_16, (int16_t)u16Imm);
11561 }
11562
11563 case IEMMODE_32BIT:
11564 {
11565 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
11566 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE, iemCImpl_call_rel_32, (int32_t)u32Imm);
11567 }
11568
11569 case IEMMODE_64BIT:
11570 {
11571 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
11572 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE, iemCImpl_call_rel_64, u64Imm);
11573 }
11574
11575 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11576 }
11577}
11578
11579
11580/**
11581 * @opcode 0xe9
11582 */
11583FNIEMOP_DEF(iemOp_jmp_Jv)
11584{
11585 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
11586 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
11587 switch (pVCpu->iem.s.enmEffOpSize)
11588 {
11589 case IEMMODE_16BIT:
11590 {
11591 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
11592 IEM_MC_BEGIN(0, 0);
11593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11594 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
11595 IEM_MC_END();
11596 break;
11597 }
11598
11599 case IEMMODE_64BIT:
11600 case IEMMODE_32BIT:
11601 {
11602 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
11603 IEM_MC_BEGIN(0, 0);
11604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11605 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
11606 IEM_MC_END();
11607 break;
11608 }
11609
11610 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11611 }
11612}
11613
11614
11615/**
11616 * @opcode 0xea
11617 */
11618FNIEMOP_DEF(iemOp_jmp_Ap)
11619{
11620 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
11621 IEMOP_HLP_NO_64BIT();
11622
11623 /* Decode the far pointer address and pass it on to the far call C implementation. */
11624 uint32_t off32Seg;
11625 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
11626 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
11627 else
11628 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
11629 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
11630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11631 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
11632 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
11633 iemCImpl_FarJmp, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
11634}
11635
11636
11637/**
11638 * @opcode 0xeb
11639 */
11640FNIEMOP_DEF(iemOp_jmp_Jb)
11641{
11642 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
11643 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11644 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
11645
11646 IEM_MC_BEGIN(0, 0);
11647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11648 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11649 IEM_MC_END();
11650}
11651
11652
11653/** Opcode 0xec */
11654FNIEMOP_DEF(iemOp_in_AL_DX)
11655{
11656 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
11657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11658 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
11659 iemCImpl_in_eAX_DX, 1, pVCpu->iem.s.enmEffAddrMode);
11660}
11661
11662
11663/** Opcode 0xed */
11664FNIEMOP_DEF(iemOp_in_eAX_DX)
11665{
11666 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
11667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11668 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
11669 iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
11670 pVCpu->iem.s.enmEffAddrMode);
11671}
11672
11673
11674/** Opcode 0xee */
11675FNIEMOP_DEF(iemOp_out_DX_AL)
11676{
11677 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
11678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11679 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
11680 iemCImpl_out_DX_eAX, 1, pVCpu->iem.s.enmEffAddrMode);
11681}
11682
11683
11684/** Opcode 0xef */
11685FNIEMOP_DEF(iemOp_out_DX_eAX)
11686{
11687 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
11688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11689 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
11690 iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
11691 pVCpu->iem.s.enmEffAddrMode);
11692}
11693
11694
11695/**
11696 * @opcode 0xf0
11697 */
11698FNIEMOP_DEF(iemOp_lock)
11699{
11700 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
11701 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
11702 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
11703
11704 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
11705 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
11706}
11707
11708
11709/**
11710 * @opcode 0xf1
11711 */
11712FNIEMOP_DEF(iemOp_int1)
11713{
11714 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
11715 /** @todo Does not generate \#UD on 286, or so they say... Was allegedly a
11716 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
11717 * LOADALL memo. Needs some testing. */
11718 IEMOP_HLP_MIN_386();
11719 /** @todo testcase! */
11720 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
11721 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
11722 iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
11723}
11724
11725
11726/**
11727 * @opcode 0xf2
11728 */
11729FNIEMOP_DEF(iemOp_repne)
11730{
11731 /* This overrides any previous REPE prefix. */
11732 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
11733 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
11734 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
11735
11736 /* For the 4 entry opcode tables, REPNZ overrides any previous
11737 REPZ and operand size prefixes. */
11738 pVCpu->iem.s.idxPrefix = 3;
11739
11740 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
11741 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
11742}
11743
11744
11745/**
11746 * @opcode 0xf3
11747 */
11748FNIEMOP_DEF(iemOp_repe)
11749{
11750 /* This overrides any previous REPNE prefix. */
11751 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
11752 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
11753 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
11754
11755 /* For the 4 entry opcode tables, REPNZ overrides any previous
11756 REPNZ and operand size prefixes. */
11757 pVCpu->iem.s.idxPrefix = 2;
11758
11759 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
11760 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
11761}
11762
11763
11764/**
11765 * @opcode 0xf4
11766 */
11767FNIEMOP_DEF(iemOp_hlt)
11768{
11769 IEMOP_MNEMONIC(hlt, "hlt");
11770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11771 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, iemCImpl_hlt);
11772}
11773
11774
11775/**
11776 * @opcode 0xf5
11777 */
11778FNIEMOP_DEF(iemOp_cmc)
11779{
11780 IEMOP_MNEMONIC(cmc, "cmc");
11781 IEM_MC_BEGIN(0, 0);
11782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11783 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
11784 IEM_MC_ADVANCE_RIP_AND_FINISH();
11785 IEM_MC_END();
11786}
11787
11788
11789/**
11790 * Body for of 'inc/dec/not/neg Eb'.
11791 */
11792#define IEMOP_BODY_UNARY_Eb(a_bRm, a_fnNormalU8, a_fnLockedU8) \
11793 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
11794 { \
11795 /* register access */ \
11796 IEM_MC_BEGIN(2, 0); \
11797 IEMOP_HLP_DONE_DECODING(); \
11798 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
11799 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
11800 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
11801 IEM_MC_REF_EFLAGS(pEFlags); \
11802 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
11803 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11804 IEM_MC_END(); \
11805 } \
11806 else \
11807 { \
11808 /* memory access. */ \
11809 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
11810 { \
11811 IEM_MC_BEGIN(2, 2); \
11812 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
11813 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11814 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11815 \
11816 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
11817 IEMOP_HLP_DONE_DECODING(); \
11818 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11819 IEM_MC_FETCH_EFLAGS(EFlags); \
11820 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
11821 \
11822 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW); \
11823 IEM_MC_COMMIT_EFLAGS(EFlags); \
11824 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11825 IEM_MC_END(); \
11826 } \
11827 else \
11828 { \
11829 IEM_MC_BEGIN(2, 2); \
11830 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
11831 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11832 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11833 \
11834 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
11835 IEMOP_HLP_DONE_DECODING(); \
11836 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11837 IEM_MC_FETCH_EFLAGS(EFlags); \
11838 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU8, pu8Dst, pEFlags); \
11839 \
11840 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW); \
11841 IEM_MC_COMMIT_EFLAGS(EFlags); \
11842 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11843 IEM_MC_END(); \
11844 } \
11845 } \
11846 (void)0
11847
11848
11849/**
11850 * Body for 'inc/dec/not/neg Ev' (groups 3 and 5).
11851 */
11852#define IEMOP_BODY_UNARY_Ev(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
11853 if (IEM_IS_MODRM_REG_MODE(bRm)) \
11854 { \
11855 /* \
11856 * Register target \
11857 */ \
11858 switch (pVCpu->iem.s.enmEffOpSize) \
11859 { \
11860 case IEMMODE_16BIT: \
11861 IEM_MC_BEGIN(2, 0); \
11862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11863 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
11864 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
11865 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11866 IEM_MC_REF_EFLAGS(pEFlags); \
11867 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
11868 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11869 IEM_MC_END(); \
11870 break; \
11871 \
11872 case IEMMODE_32BIT: \
11873 IEM_MC_BEGIN(2, 0); \
11874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11875 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
11876 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
11877 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11878 IEM_MC_REF_EFLAGS(pEFlags); \
11879 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
11880 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
11881 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11882 IEM_MC_END(); \
11883 break; \
11884 \
11885 case IEMMODE_64BIT: \
11886 IEM_MC_BEGIN(2, 0); \
11887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11888 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
11889 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
11890 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11891 IEM_MC_REF_EFLAGS(pEFlags); \
11892 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
11893 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11894 IEM_MC_END(); \
11895 break; \
11896 \
11897 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11898 } \
11899 } \
11900 else \
11901 { \
11902 /* \
11903 * Memory target. \
11904 */ \
11905 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
11906 { \
11907 switch (pVCpu->iem.s.enmEffOpSize) \
11908 { \
11909 case IEMMODE_16BIT: \
11910 IEM_MC_BEGIN(2, 2); \
11911 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
11912 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11913 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11914 \
11915 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11917 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11918 IEM_MC_FETCH_EFLAGS(EFlags); \
11919 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
11920 \
11921 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
11922 IEM_MC_COMMIT_EFLAGS(EFlags); \
11923 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11924 IEM_MC_END(); \
11925 break; \
11926 \
11927 case IEMMODE_32BIT: \
11928 IEM_MC_BEGIN(2, 2); \
11929 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
11930 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11931 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11932 \
11933 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11935 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11936 IEM_MC_FETCH_EFLAGS(EFlags); \
11937 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
11938 \
11939 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); \
11940 IEM_MC_COMMIT_EFLAGS(EFlags); \
11941 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11942 IEM_MC_END(); \
11943 break; \
11944 \
11945 case IEMMODE_64BIT: \
11946 IEM_MC_BEGIN(2, 2); \
11947 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
11948 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11949 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11950 \
11951 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11953 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11954 IEM_MC_FETCH_EFLAGS(EFlags); \
11955 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
11956 \
11957 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
11958 IEM_MC_COMMIT_EFLAGS(EFlags); \
11959 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11960 IEM_MC_END(); \
11961 break; \
11962 \
11963 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11964 } \
11965 } \
11966 else \
11967 { \
11968 (void)0
11969
11970#define IEMOP_BODY_UNARY_Ev_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
11971 switch (pVCpu->iem.s.enmEffOpSize) \
11972 { \
11973 case IEMMODE_16BIT: \
11974 IEM_MC_BEGIN(2, 2); \
11975 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
11976 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11977 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11978 \
11979 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11980 IEMOP_HLP_DONE_DECODING(); \
11981 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11982 IEM_MC_FETCH_EFLAGS(EFlags); \
11983 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU16, pu16Dst, pEFlags); \
11984 \
11985 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
11986 IEM_MC_COMMIT_EFLAGS(EFlags); \
11987 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11988 IEM_MC_END(); \
11989 break; \
11990 \
11991 case IEMMODE_32BIT: \
11992 IEM_MC_BEGIN(2, 2); \
11993 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
11994 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11995 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11996 \
11997 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11998 IEMOP_HLP_DONE_DECODING(); \
11999 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
12000 IEM_MC_FETCH_EFLAGS(EFlags); \
12001 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU32, pu32Dst, pEFlags); \
12002 \
12003 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); \
12004 IEM_MC_COMMIT_EFLAGS(EFlags); \
12005 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12006 IEM_MC_END(); \
12007 break; \
12008 \
12009 case IEMMODE_64BIT: \
12010 IEM_MC_BEGIN(2, 2); \
12011 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
12012 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12013 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12014 \
12015 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
12016 IEMOP_HLP_DONE_DECODING(); \
12017 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
12018 IEM_MC_FETCH_EFLAGS(EFlags); \
12019 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU64, pu64Dst, pEFlags); \
12020 \
12021 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
12022 IEM_MC_COMMIT_EFLAGS(EFlags); \
12023 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12024 IEM_MC_END(); \
12025 break; \
12026 \
12027 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12028 } \
12029 } \
12030 } \
12031 (void)0
12032
12033
12034/**
12035 * @opmaps grp3_f6
12036 * @opcode /0
12037 * @todo also /1
12038 */
12039FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
12040{
12041 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
12042 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12043
12044 if (IEM_IS_MODRM_REG_MODE(bRm))
12045 {
12046 /* register access */
12047 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12048 IEM_MC_BEGIN(3, 0);
12049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12050 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12051 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
12052 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12053 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12054 IEM_MC_REF_EFLAGS(pEFlags);
12055 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
12056 IEM_MC_ADVANCE_RIP_AND_FINISH();
12057 IEM_MC_END();
12058 }
12059 else
12060 {
12061 /* memory access. */
12062 IEM_MC_BEGIN(3, 2);
12063 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
12064 IEM_MC_ARG(uint8_t, u8Src, 1);
12065 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
12066 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12067
12068 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12069 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12070 IEM_MC_ASSIGN(u8Src, u8Imm);
12071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12072 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12073 IEM_MC_FETCH_EFLAGS(EFlags);
12074 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
12075
12076 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
12077 IEM_MC_COMMIT_EFLAGS(EFlags);
12078 IEM_MC_ADVANCE_RIP_AND_FINISH();
12079 IEM_MC_END();
12080 }
12081}
12082
12083
12084/** Opcode 0xf6 /4, /5, /6 and /7. */
12085FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
12086{
12087 if (IEM_IS_MODRM_REG_MODE(bRm))
12088 {
12089 /* register access */
12090 IEM_MC_BEGIN(3, 1);
12091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12092 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12093 IEM_MC_ARG(uint8_t, u8Value, 1);
12094 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12095 IEM_MC_LOCAL(int32_t, rc);
12096
12097 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12098 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12099 IEM_MC_REF_EFLAGS(pEFlags);
12100 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
12101 IEM_MC_IF_LOCAL_IS_Z(rc) {
12102 IEM_MC_ADVANCE_RIP_AND_FINISH();
12103 } IEM_MC_ELSE() {
12104 IEM_MC_RAISE_DIVIDE_ERROR();
12105 } IEM_MC_ENDIF();
12106
12107 IEM_MC_END();
12108 }
12109 else
12110 {
12111 /* memory access. */
12112 IEM_MC_BEGIN(3, 2);
12113 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12114 IEM_MC_ARG(uint8_t, u8Value, 1);
12115 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12116 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12117 IEM_MC_LOCAL(int32_t, rc);
12118
12119 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12121 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12122 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12123 IEM_MC_REF_EFLAGS(pEFlags);
12124 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
12125 IEM_MC_IF_LOCAL_IS_Z(rc) {
12126 IEM_MC_ADVANCE_RIP_AND_FINISH();
12127 } IEM_MC_ELSE() {
12128 IEM_MC_RAISE_DIVIDE_ERROR();
12129 } IEM_MC_ENDIF();
12130
12131 IEM_MC_END();
12132 }
12133}
12134
12135
12136/** Opcode 0xf7 /4, /5, /6 and /7. */
12137FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
12138{
12139 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12140
12141 if (IEM_IS_MODRM_REG_MODE(bRm))
12142 {
12143 /* register access */
12144 switch (pVCpu->iem.s.enmEffOpSize)
12145 {
12146 case IEMMODE_16BIT:
12147 {
12148 IEM_MC_BEGIN(4, 1);
12149 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12150 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12151 IEM_MC_ARG(uint16_t *, pu16DX, 1);
12152 IEM_MC_ARG(uint16_t, u16Value, 2);
12153 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12154 IEM_MC_LOCAL(int32_t, rc);
12155
12156 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12157 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12158 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
12159 IEM_MC_REF_EFLAGS(pEFlags);
12160 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
12161 IEM_MC_IF_LOCAL_IS_Z(rc) {
12162 IEM_MC_ADVANCE_RIP_AND_FINISH();
12163 } IEM_MC_ELSE() {
12164 IEM_MC_RAISE_DIVIDE_ERROR();
12165 } IEM_MC_ENDIF();
12166
12167 IEM_MC_END();
12168 break;
12169 }
12170
12171 case IEMMODE_32BIT:
12172 {
12173 IEM_MC_BEGIN(4, 1);
12174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12175 IEM_MC_ARG(uint32_t *, pu32AX, 0);
12176 IEM_MC_ARG(uint32_t *, pu32DX, 1);
12177 IEM_MC_ARG(uint32_t, u32Value, 2);
12178 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12179 IEM_MC_LOCAL(int32_t, rc);
12180
12181 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12182 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
12183 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
12184 IEM_MC_REF_EFLAGS(pEFlags);
12185 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
12186 IEM_MC_IF_LOCAL_IS_Z(rc) {
12187 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
12188 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
12189 IEM_MC_ADVANCE_RIP_AND_FINISH();
12190 } IEM_MC_ELSE() {
12191 IEM_MC_RAISE_DIVIDE_ERROR();
12192 } IEM_MC_ENDIF();
12193
12194 IEM_MC_END();
12195 break;
12196 }
12197
12198 case IEMMODE_64BIT:
12199 {
12200 IEM_MC_BEGIN(4, 1);
12201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12202 IEM_MC_ARG(uint64_t *, pu64AX, 0);
12203 IEM_MC_ARG(uint64_t *, pu64DX, 1);
12204 IEM_MC_ARG(uint64_t, u64Value, 2);
12205 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12206 IEM_MC_LOCAL(int32_t, rc);
12207
12208 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12209 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
12210 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
12211 IEM_MC_REF_EFLAGS(pEFlags);
12212 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
12213 IEM_MC_IF_LOCAL_IS_Z(rc) {
12214 IEM_MC_ADVANCE_RIP_AND_FINISH();
12215 } IEM_MC_ELSE() {
12216 IEM_MC_RAISE_DIVIDE_ERROR();
12217 } IEM_MC_ENDIF();
12218
12219 IEM_MC_END();
12220 break;
12221 }
12222
12223 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12224 }
12225 }
12226 else
12227 {
12228 /* memory access. */
12229 switch (pVCpu->iem.s.enmEffOpSize)
12230 {
12231 case IEMMODE_16BIT:
12232 {
12233 IEM_MC_BEGIN(4, 2);
12234 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12235 IEM_MC_ARG(uint16_t *, pu16DX, 1);
12236 IEM_MC_ARG(uint16_t, u16Value, 2);
12237 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12238 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12239 IEM_MC_LOCAL(int32_t, rc);
12240
12241 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12243 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12244 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12245 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
12246 IEM_MC_REF_EFLAGS(pEFlags);
12247 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
12248 IEM_MC_IF_LOCAL_IS_Z(rc) {
12249 IEM_MC_ADVANCE_RIP_AND_FINISH();
12250 } IEM_MC_ELSE() {
12251 IEM_MC_RAISE_DIVIDE_ERROR();
12252 } IEM_MC_ENDIF();
12253
12254 IEM_MC_END();
12255 break;
12256 }
12257
12258 case IEMMODE_32BIT:
12259 {
12260 IEM_MC_BEGIN(4, 2);
12261 IEM_MC_ARG(uint32_t *, pu32AX, 0);
12262 IEM_MC_ARG(uint32_t *, pu32DX, 1);
12263 IEM_MC_ARG(uint32_t, u32Value, 2);
12264 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12265 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12266 IEM_MC_LOCAL(int32_t, rc);
12267
12268 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12270 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12271 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
12272 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
12273 IEM_MC_REF_EFLAGS(pEFlags);
12274 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
12275 IEM_MC_IF_LOCAL_IS_Z(rc) {
12276 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
12277 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
12278 IEM_MC_ADVANCE_RIP_AND_FINISH();
12279 } IEM_MC_ELSE() {
12280 IEM_MC_RAISE_DIVIDE_ERROR();
12281 } IEM_MC_ENDIF();
12282
12283 IEM_MC_END();
12284 break;
12285 }
12286
12287 case IEMMODE_64BIT:
12288 {
12289 IEM_MC_BEGIN(4, 2);
12290 IEM_MC_ARG(uint64_t *, pu64AX, 0);
12291 IEM_MC_ARG(uint64_t *, pu64DX, 1);
12292 IEM_MC_ARG(uint64_t, u64Value, 2);
12293 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12294 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12295 IEM_MC_LOCAL(int32_t, rc);
12296
12297 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12299 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12300 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
12301 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
12302 IEM_MC_REF_EFLAGS(pEFlags);
12303 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
12304 IEM_MC_IF_LOCAL_IS_Z(rc) {
12305 IEM_MC_ADVANCE_RIP_AND_FINISH();
12306 } IEM_MC_ELSE() {
12307 IEM_MC_RAISE_DIVIDE_ERROR();
12308 } IEM_MC_ENDIF();
12309
12310 IEM_MC_END();
12311 break;
12312 }
12313
12314 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12315 }
12316 }
12317}
12318
12319
12320/**
12321 * @opmaps grp3_f6
12322 * @opcode /2
12323 */
12324FNIEMOP_DEF_1(iemOp_grp3_not_Eb, uint8_t, bRm)
12325{
12326 IEMOP_MNEMONIC(not_Eb, "not Eb");
12327 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_not_u8, iemAImpl_not_u8_locked);
12328}
12329
12330
12331/**
12332 * @opmaps grp3_f6
12333 * @opcode /3
12334 */
12335FNIEMOP_DEF_1(iemOp_grp3_neg_Eb, uint8_t, bRm)
12336{
12337 IEMOP_MNEMONIC(net_Eb, "neg Eb");
12338 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_neg_u8, iemAImpl_neg_u8_locked);
12339}
12340
12341
12342/**
12343 * @opcode 0xf6
12344 */
12345FNIEMOP_DEF(iemOp_Grp3_Eb)
12346{
12347 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12348 switch (IEM_GET_MODRM_REG_8(bRm))
12349 {
12350 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
12351 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
12352 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Eb, bRm);
12353 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Eb, bRm);
12354 case 4:
12355 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
12356 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12357 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
12358 case 5:
12359 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
12360 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12361 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
12362 case 6:
12363 IEMOP_MNEMONIC(div_Eb, "div Eb");
12364 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12365 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
12366 case 7:
12367 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
12368 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12369 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
12370 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12371 }
12372}
12373
12374
12375/** Opcode 0xf7 /0. */
12376FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
12377{
12378 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
12379 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12380
12381 if (IEM_IS_MODRM_REG_MODE(bRm))
12382 {
12383 /* register access */
12384 switch (pVCpu->iem.s.enmEffOpSize)
12385 {
12386 case IEMMODE_16BIT:
12387 {
12388 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12389 IEM_MC_BEGIN(3, 0);
12390 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12391 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12392 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
12393 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12394 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12395 IEM_MC_REF_EFLAGS(pEFlags);
12396 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
12397 IEM_MC_ADVANCE_RIP_AND_FINISH();
12398 IEM_MC_END();
12399 break;
12400 }
12401
12402 case IEMMODE_32BIT:
12403 {
12404 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12405 IEM_MC_BEGIN(3, 0);
12406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12407 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12408 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
12409 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12410 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12411 IEM_MC_REF_EFLAGS(pEFlags);
12412 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
12413 /* No clearing the high dword here - test doesn't write back the result. */
12414 IEM_MC_ADVANCE_RIP_AND_FINISH();
12415 IEM_MC_END();
12416 break;
12417 }
12418
12419 case IEMMODE_64BIT:
12420 {
12421 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12422 IEM_MC_BEGIN(3, 0);
12423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12424 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12425 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
12426 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12427 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12428 IEM_MC_REF_EFLAGS(pEFlags);
12429 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
12430 IEM_MC_ADVANCE_RIP_AND_FINISH();
12431 IEM_MC_END();
12432 break;
12433 }
12434
12435 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12436 }
12437 }
12438 else
12439 {
12440 /* memory access. */
12441 switch (pVCpu->iem.s.enmEffOpSize)
12442 {
12443 case IEMMODE_16BIT:
12444 {
12445 IEM_MC_BEGIN(3, 2);
12446 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12447 IEM_MC_ARG(uint16_t, u16Src, 1);
12448 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
12449 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12450
12451 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
12452 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12453 IEM_MC_ASSIGN(u16Src, u16Imm);
12454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12455 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12456 IEM_MC_FETCH_EFLAGS(EFlags);
12457 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
12458
12459 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
12460 IEM_MC_COMMIT_EFLAGS(EFlags);
12461 IEM_MC_ADVANCE_RIP_AND_FINISH();
12462 IEM_MC_END();
12463 break;
12464 }
12465
12466 case IEMMODE_32BIT:
12467 {
12468 IEM_MC_BEGIN(3, 2);
12469 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12470 IEM_MC_ARG(uint32_t, u32Src, 1);
12471 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
12472 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12473
12474 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12475 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12476 IEM_MC_ASSIGN(u32Src, u32Imm);
12477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12478 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12479 IEM_MC_FETCH_EFLAGS(EFlags);
12480 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
12481
12482 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
12483 IEM_MC_COMMIT_EFLAGS(EFlags);
12484 IEM_MC_ADVANCE_RIP_AND_FINISH();
12485 IEM_MC_END();
12486 break;
12487 }
12488
12489 case IEMMODE_64BIT:
12490 {
12491 IEM_MC_BEGIN(3, 2);
12492 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12493 IEM_MC_ARG(uint64_t, u64Src, 1);
12494 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
12495 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12496
12497 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12498 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12499 IEM_MC_ASSIGN(u64Src, u64Imm);
12500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12501 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12502 IEM_MC_FETCH_EFLAGS(EFlags);
12503 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
12504
12505 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
12506 IEM_MC_COMMIT_EFLAGS(EFlags);
12507 IEM_MC_ADVANCE_RIP_AND_FINISH();
12508 IEM_MC_END();
12509 break;
12510 }
12511
12512 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12513 }
12514 }
12515}
12516
12517
12518/** Opcode 0xf7 /2. */
12519FNIEMOP_DEF_1(iemOp_grp3_not_Ev, uint8_t, bRm)
12520{
12521 IEMOP_MNEMONIC(not_Ev, "not Ev");
12522 IEMOP_BODY_UNARY_Ev( iemAImpl_not_u16, iemAImpl_not_u32, iemAImpl_not_u64);
12523 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_not_u16_locked, iemAImpl_not_u32_locked, iemAImpl_not_u64_locked);
12524}
12525
12526
12527/** Opcode 0xf7 /3. */
12528FNIEMOP_DEF_1(iemOp_grp3_neg_Ev, uint8_t, bRm)
12529{
12530 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
12531 IEMOP_BODY_UNARY_Ev( iemAImpl_neg_u16, iemAImpl_neg_u32, iemAImpl_neg_u64);
12532 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_neg_u16_locked, iemAImpl_neg_u32_locked, iemAImpl_neg_u64_locked);
12533}
12534
12535
12536/**
12537 * @opcode 0xf7
12538 */
12539FNIEMOP_DEF(iemOp_Grp3_Ev)
12540{
12541 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12542 switch (IEM_GET_MODRM_REG_8(bRm))
12543 {
12544 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
12545 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
12546 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Ev, bRm);
12547 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Ev, bRm);
12548 case 4:
12549 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
12550 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12551 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
12552 case 5:
12553 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
12554 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12555 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
12556 case 6:
12557 IEMOP_MNEMONIC(div_Ev, "div Ev");
12558 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12559 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
12560 case 7:
12561 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
12562 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12563 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
12564 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12565 }
12566}
12567
12568
12569/**
12570 * @opcode 0xf8
12571 */
12572FNIEMOP_DEF(iemOp_clc)
12573{
12574 IEMOP_MNEMONIC(clc, "clc");
12575 IEM_MC_BEGIN(0, 0);
12576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12577 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
12578 IEM_MC_ADVANCE_RIP_AND_FINISH();
12579 IEM_MC_END();
12580}
12581
12582
12583/**
12584 * @opcode 0xf9
12585 */
12586FNIEMOP_DEF(iemOp_stc)
12587{
12588 IEMOP_MNEMONIC(stc, "stc");
12589 IEM_MC_BEGIN(0, 0);
12590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12591 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
12592 IEM_MC_ADVANCE_RIP_AND_FINISH();
12593 IEM_MC_END();
12594}
12595
12596
12597/**
12598 * @opcode 0xfa
12599 */
12600FNIEMOP_DEF(iemOp_cli)
12601{
12602 IEMOP_MNEMONIC(cli, "cli");
12603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12604 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_CHECK_IRQ_BEFORE, iemCImpl_cli);
12605}
12606
12607
12608FNIEMOP_DEF(iemOp_sti)
12609{
12610 IEMOP_MNEMONIC(sti, "sti");
12611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12612 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_AFTER | IEM_CIMPL_F_VMEXIT, iemCImpl_sti);
12613}
12614
12615
12616/**
12617 * @opcode 0xfc
12618 */
12619FNIEMOP_DEF(iemOp_cld)
12620{
12621 IEMOP_MNEMONIC(cld, "cld");
12622 IEM_MC_BEGIN(0, 0);
12623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12624 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
12625 IEM_MC_ADVANCE_RIP_AND_FINISH();
12626 IEM_MC_END();
12627}
12628
12629
12630/**
12631 * @opcode 0xfd
12632 */
12633FNIEMOP_DEF(iemOp_std)
12634{
12635 IEMOP_MNEMONIC(std, "std");
12636 IEM_MC_BEGIN(0, 0);
12637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12638 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
12639 IEM_MC_ADVANCE_RIP_AND_FINISH();
12640 IEM_MC_END();
12641}
12642
12643
12644/**
12645 * @opmaps grp4
12646 * @opcode /0
12647 */
12648FNIEMOP_DEF_1(iemOp_Grp4_inc_Eb, uint8_t, bRm)
12649{
12650 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
12651 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_inc_u8, iemAImpl_inc_u8_locked);
12652}
12653
12654
12655/**
12656 * @opmaps grp4
12657 * @opcode /1
12658 */
12659FNIEMOP_DEF_1(iemOp_Grp4_dec_Eb, uint8_t, bRm)
12660{
12661 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
12662 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_dec_u8, iemAImpl_dec_u8_locked);
12663}
12664
12665
12666/**
12667 * @opcode 0xfe
12668 */
12669FNIEMOP_DEF(iemOp_Grp4)
12670{
12671 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12672 switch (IEM_GET_MODRM_REG_8(bRm))
12673 {
12674 case 0: return FNIEMOP_CALL_1(iemOp_Grp4_inc_Eb, bRm);
12675 case 1: return FNIEMOP_CALL_1(iemOp_Grp4_dec_Eb, bRm);
12676 default:
12677 /** @todo is the eff-addr decoded? */
12678 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
12679 IEMOP_RAISE_INVALID_OPCODE_RET();
12680 }
12681}
12682
12683/** Opcode 0xff /0. */
12684FNIEMOP_DEF_1(iemOp_Grp5_inc_Ev, uint8_t, bRm)
12685{
12686 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
12687 IEMOP_BODY_UNARY_Ev( iemAImpl_inc_u16, iemAImpl_inc_u32, iemAImpl_inc_u64);
12688 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_inc_u16_locked, iemAImpl_inc_u32_locked, iemAImpl_inc_u64_locked);
12689}
12690
12691
12692/** Opcode 0xff /1. */
12693FNIEMOP_DEF_1(iemOp_Grp5_dec_Ev, uint8_t, bRm)
12694{
12695 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
12696 IEMOP_BODY_UNARY_Ev( iemAImpl_dec_u16, iemAImpl_dec_u32, iemAImpl_dec_u64);
12697 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_dec_u16_locked, iemAImpl_dec_u32_locked, iemAImpl_dec_u64_locked);
12698}
12699
12700
12701/**
12702 * Opcode 0xff /2.
12703 * @param bRm The RM byte.
12704 */
12705FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
12706{
12707 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
12708 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12709
12710 if (IEM_IS_MODRM_REG_MODE(bRm))
12711 {
12712 /* The new RIP is taken from a register. */
12713 switch (pVCpu->iem.s.enmEffOpSize)
12714 {
12715 case IEMMODE_16BIT:
12716 IEM_MC_BEGIN(1, 0);
12717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12718 IEM_MC_ARG(uint16_t, u16Target, 0);
12719 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12720 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_16, u16Target);
12721 IEM_MC_END();
12722 break;
12723
12724 case IEMMODE_32BIT:
12725 IEM_MC_BEGIN(1, 0);
12726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12727 IEM_MC_ARG(uint32_t, u32Target, 0);
12728 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12729 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_32, u32Target);
12730 IEM_MC_END();
12731 break;
12732
12733 case IEMMODE_64BIT:
12734 IEM_MC_BEGIN(1, 0);
12735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12736 IEM_MC_ARG(uint64_t, u64Target, 0);
12737 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12738 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_64, u64Target);
12739 IEM_MC_END();
12740 break;
12741
12742 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12743 }
12744 }
12745 else
12746 {
12747 /* The new RIP is taken from a register. */
12748 switch (pVCpu->iem.s.enmEffOpSize)
12749 {
12750 case IEMMODE_16BIT:
12751 IEM_MC_BEGIN(1, 1);
12752 IEM_MC_ARG(uint16_t, u16Target, 0);
12753 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12754 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12756 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12757 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_16, u16Target);
12758 IEM_MC_END();
12759 break;
12760
12761 case IEMMODE_32BIT:
12762 IEM_MC_BEGIN(1, 1);
12763 IEM_MC_ARG(uint32_t, u32Target, 0);
12764 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12767 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12768 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_32, u32Target);
12769 IEM_MC_END();
12770 break;
12771
12772 case IEMMODE_64BIT:
12773 IEM_MC_BEGIN(1, 1);
12774 IEM_MC_ARG(uint64_t, u64Target, 0);
12775 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12776 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12778 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12779 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_64, u64Target);
12780 IEM_MC_END();
12781 break;
12782
12783 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12784 }
12785 }
12786}
12787
12788#define IEMOP_BODY_GRP5_FAR_EP(a_bRm, a_fnCImpl) \
12789 /* Registers? How?? */ \
12790 if (RT_LIKELY(IEM_IS_MODRM_MEM_MODE(a_bRm))) \
12791 { /* likely */ } \
12792 else \
12793 IEMOP_RAISE_INVALID_OPCODE_RET(); /* callf eax is not legal */ \
12794 \
12795 /* 64-bit mode: Default is 32-bit, but only intel respects a REX.W prefix. */ \
12796 /** @todo what does VIA do? */ \
12797 if (!IEM_IS_64BIT_CODE(pVCpu) || pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT || IEM_IS_GUEST_CPU_INTEL(pVCpu)) \
12798 { /* likely */ } \
12799 else \
12800 pVCpu->iem.s.enmEffOpSize = IEMMODE_32BIT; \
12801 \
12802 /* Far pointer loaded from memory. */ \
12803 switch (pVCpu->iem.s.enmEffOpSize) \
12804 { \
12805 case IEMMODE_16BIT: \
12806 IEM_MC_BEGIN(3, 1); \
12807 IEM_MC_ARG(uint16_t, u16Sel, 0); \
12808 IEM_MC_ARG(uint16_t, offSeg, 1); \
12809 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2); \
12810 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
12811 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
12812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12813 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
12814 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2); \
12815 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR \
12816 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, \
12817 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
12818 IEM_MC_END(); \
12819 break; \
12820 \
12821 case IEMMODE_32BIT: \
12822 IEM_MC_BEGIN(3, 1); \
12823 IEM_MC_ARG(uint16_t, u16Sel, 0); \
12824 IEM_MC_ARG(uint32_t, offSeg, 1); \
12825 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2); \
12826 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
12827 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
12828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12829 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
12830 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4); \
12831 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR \
12832 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, \
12833 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
12834 IEM_MC_END(); \
12835 break; \
12836 \
12837 case IEMMODE_64BIT: \
12838 Assert(!IEM_IS_GUEST_CPU_AMD(pVCpu)); \
12839 IEM_MC_BEGIN(3, 1); \
12840 IEM_MC_ARG(uint16_t, u16Sel, 0); \
12841 IEM_MC_ARG(uint64_t, offSeg, 1); \
12842 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_64BIT, 2); \
12843 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
12844 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
12845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12846 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
12847 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8); \
12848 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_MODE /* no gates */, \
12849 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
12850 IEM_MC_END(); \
12851 break; \
12852 \
12853 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12854 } do {} while (0)
12855
12856
12857/**
12858 * Opcode 0xff /3.
12859 * @param bRm The RM byte.
12860 */
12861FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
12862{
12863 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
12864 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_callf);
12865}
12866
12867
12868/**
12869 * Opcode 0xff /4.
12870 * @param bRm The RM byte.
12871 */
12872FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
12873{
12874 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
12875 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12876
12877 if (IEM_IS_MODRM_REG_MODE(bRm))
12878 {
12879 /* The new RIP is taken from a register. */
12880 switch (pVCpu->iem.s.enmEffOpSize)
12881 {
12882 case IEMMODE_16BIT:
12883 IEM_MC_BEGIN(0, 1);
12884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12885 IEM_MC_LOCAL(uint16_t, u16Target);
12886 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12887 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
12888 IEM_MC_END();
12889 break;
12890
12891 case IEMMODE_32BIT:
12892 IEM_MC_BEGIN(0, 1);
12893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12894 IEM_MC_LOCAL(uint32_t, u32Target);
12895 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12896 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
12897 IEM_MC_END();
12898 break;
12899
12900 case IEMMODE_64BIT:
12901 IEM_MC_BEGIN(0, 1);
12902 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12903 IEM_MC_LOCAL(uint64_t, u64Target);
12904 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12905 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
12906 IEM_MC_END();
12907 break;
12908
12909 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12910 }
12911 }
12912 else
12913 {
12914 /* The new RIP is taken from a memory location. */
12915 switch (pVCpu->iem.s.enmEffOpSize)
12916 {
12917 case IEMMODE_16BIT:
12918 IEM_MC_BEGIN(0, 2);
12919 IEM_MC_LOCAL(uint16_t, u16Target);
12920 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12921 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12923 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12924 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
12925 IEM_MC_END();
12926 break;
12927
12928 case IEMMODE_32BIT:
12929 IEM_MC_BEGIN(0, 2);
12930 IEM_MC_LOCAL(uint32_t, u32Target);
12931 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12932 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12934 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12935 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
12936 IEM_MC_END();
12937 break;
12938
12939 case IEMMODE_64BIT:
12940 IEM_MC_BEGIN(0, 2);
12941 IEM_MC_LOCAL(uint64_t, u64Target);
12942 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12943 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12945 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12946 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
12947 IEM_MC_END();
12948 break;
12949
12950 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12951 }
12952 }
12953}
12954
12955
12956/**
12957 * Opcode 0xff /5.
12958 * @param bRm The RM byte.
12959 */
12960FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
12961{
12962 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
12963 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_FarJmp);
12964}
12965
12966
12967/**
12968 * Opcode 0xff /6.
12969 * @param bRm The RM byte.
12970 */
12971FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
12972{
12973 IEMOP_MNEMONIC(push_Ev, "push Ev");
12974
12975 /* Registers are handled by a common worker. */
12976 if (IEM_IS_MODRM_REG_MODE(bRm))
12977 return FNIEMOP_CALL_1(iemOpCommonPushGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
12978
12979 /* Memory we do here. */
12980 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12981 switch (pVCpu->iem.s.enmEffOpSize)
12982 {
12983 case IEMMODE_16BIT:
12984 IEM_MC_BEGIN(0, 2);
12985 IEM_MC_LOCAL(uint16_t, u16Src);
12986 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12987 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12989 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12990 IEM_MC_PUSH_U16(u16Src);
12991 IEM_MC_ADVANCE_RIP_AND_FINISH();
12992 IEM_MC_END();
12993 break;
12994
12995 case IEMMODE_32BIT:
12996 IEM_MC_BEGIN(0, 2);
12997 IEM_MC_LOCAL(uint32_t, u32Src);
12998 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12999 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13001 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13002 IEM_MC_PUSH_U32(u32Src);
13003 IEM_MC_ADVANCE_RIP_AND_FINISH();
13004 IEM_MC_END();
13005 break;
13006
13007 case IEMMODE_64BIT:
13008 IEM_MC_BEGIN(0, 2);
13009 IEM_MC_LOCAL(uint64_t, u64Src);
13010 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13011 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13013 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13014 IEM_MC_PUSH_U64(u64Src);
13015 IEM_MC_ADVANCE_RIP_AND_FINISH();
13016 IEM_MC_END();
13017 break;
13018
13019 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13020 }
13021}
13022
13023
13024/**
13025 * @opcode 0xff
13026 */
13027FNIEMOP_DEF(iemOp_Grp5)
13028{
13029 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13030 switch (IEM_GET_MODRM_REG_8(bRm))
13031 {
13032 case 0:
13033 return FNIEMOP_CALL_1(iemOp_Grp5_inc_Ev, bRm);
13034 case 1:
13035 return FNIEMOP_CALL_1(iemOp_Grp5_dec_Ev, bRm);
13036 case 2:
13037 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
13038 case 3:
13039 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
13040 case 4:
13041 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
13042 case 5:
13043 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
13044 case 6:
13045 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
13046 case 7:
13047 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
13048 IEMOP_RAISE_INVALID_OPCODE_RET();
13049 }
13050 AssertFailedReturn(VERR_IEM_IPE_3);
13051}
13052
13053
13054
13055const PFNIEMOP g_apfnOneByteMap[256] =
13056{
13057 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
13058 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
13059 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
13060 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
13061 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
13062 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
13063 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
13064 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
13065 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
13066 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
13067 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
13068 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
13069 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
13070 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
13071 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
13072 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
13073 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
13074 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
13075 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
13076 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
13077 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
13078 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
13079 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
13080 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
13081 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
13082 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
13083 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
13084 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
13085 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
13086 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
13087 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
13088 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
13089 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
13090 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
13091 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
13092 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
13093 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
13094 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
13095 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
13096 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
13097 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
13098 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
13099 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
13100 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
13101 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
13102 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
13103 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
13104 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
13105 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
13106 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
13107 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
13108 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
13109 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
13110 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
13111 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
13112 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
13113 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
13114 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
13115 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
13116 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
13117 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
13118 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
13119 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
13120 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
13121};
13122
13123
13124/** @} */
13125
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette