VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstOneByte.cpp.h@ 100733

Last change on this file since 100733 was 100733, checked in by vboxsync, 16 months ago

VMM/IEM,ValKit: Shortened the IEMAllInstruction* file names to IEMAllInst*. This makes it easier to see the distinguishing bits of the name in the narrow buffer list widget here (this has been driving me nuts for months). Sorry for any conflicts this causes. bugref:10369

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 456.1 KB
Line 
1/* $Id: IEMAllInstOneByte.cpp.h 100733 2023-07-28 22:51:16Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*******************************************************************************
30* Global Variables *
31*******************************************************************************/
32extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
33
34/* Instruction group definitions: */
35
36/** @defgroup og_gen General
37 * @{ */
38 /** @defgroup og_gen_arith Arithmetic
39 * @{ */
40 /** @defgroup og_gen_arith_bin Binary numbers */
41 /** @defgroup og_gen_arith_dec Decimal numbers */
42 /** @} */
43/** @} */
44
45/** @defgroup og_stack Stack
46 * @{ */
47 /** @defgroup og_stack_sreg Segment registers */
48/** @} */
49
50/** @defgroup og_prefix Prefixes */
51/** @defgroup og_escapes Escape bytes */
52
53
54
55/** @name One byte opcodes.
56 * @{
57 */
58
59/**
60 * Body for instructions like ADD, AND, OR, TEST, CMP, ++ with a byte
61 * memory/register as the destination.
62 *
63 * Used with IEMOP_BODY_BINARY_rm_r8_NO_LOCK or IEMOP_BODY_BINARY_rm_r8_LOCKED.
64 */
65#define IEMOP_BODY_BINARY_rm_r8(a_fnNormalU8, a_fRW) \
66 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
67 \
68 /* \
69 * If rm is denoting a register, no more instruction bytes. \
70 */ \
71 if (IEM_IS_MODRM_REG_MODE(bRm)) \
72 { \
73 IEM_MC_BEGIN(3, 0); \
74 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
75 IEM_MC_ARG(uint8_t, u8Src, 1); \
76 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
77 \
78 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
79 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
80 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
81 IEM_MC_REF_EFLAGS(pEFlags); \
82 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
83 \
84 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
85 IEM_MC_END(); \
86 } \
87 else \
88 { \
89 /* \
90 * We're accessing memory. \
91 * Note! We're putting the eflags on the stack here so we can commit them \
92 * after the memory. \
93 */ \
94 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
95 { \
96 IEM_MC_BEGIN(3, 2); \
97 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
98 IEM_MC_ARG(uint8_t, u8Src, 1); \
99 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
101 \
102 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
103 IEMOP_HLP_DONE_DECODING(); \
104 IEM_MC_MEM_MAP(pu8Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
105 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
106 IEM_MC_FETCH_EFLAGS(EFlags); \
107 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
108 \
109 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, a_fRW); \
110 IEM_MC_COMMIT_EFLAGS(EFlags); \
111 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
112 IEM_MC_END(); \
113 } \
114 else \
115 { \
116 (void)0
117
118#define IEMOP_BODY_BINARY_rm_r8_NO_LOCK() \
119 IEMOP_HLP_DONE_DECODING(); \
120 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
121 } \
122 } \
123 (void)0
124
125#define IEMOP_BODY_BINARY_rm_r8_LOCKED(a_fnLockedU8) \
126 IEM_MC_BEGIN(3, 2); \
127 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
128 IEM_MC_ARG(uint8_t, u8Src, 1); \
129 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
131 \
132 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
133 IEMOP_HLP_DONE_DECODING(); \
134 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
135 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
136 IEM_MC_FETCH_EFLAGS(EFlags); \
137 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
138 \
139 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW); \
140 IEM_MC_COMMIT_EFLAGS(EFlags); \
141 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
142 IEM_MC_END(); \
143 } \
144 } \
145 (void)0
146
147/**
148 * Body for byte instructions like ADD, AND, OR, ++ with a register as the
149 * destination.
150 */
151#define IEMOP_BODY_BINARY_r8_rm(a_fnNormalU8) \
152 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
153 \
154 /* \
155 * If rm is denoting a register, no more instruction bytes. \
156 */ \
157 if (IEM_IS_MODRM_REG_MODE(bRm)) \
158 { \
159 IEM_MC_BEGIN(3, 0); \
160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
161 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
162 IEM_MC_ARG(uint8_t, u8Src, 1); \
163 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
164 \
165 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
166 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
167 IEM_MC_REF_EFLAGS(pEFlags); \
168 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
169 \
170 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
171 IEM_MC_END(); \
172 } \
173 else \
174 { \
175 /* \
176 * We're accessing memory. \
177 */ \
178 IEM_MC_BEGIN(3, 1); \
179 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
180 IEM_MC_ARG(uint8_t, u8Src, 1); \
181 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
182 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
183 \
184 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
186 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
187 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
188 IEM_MC_REF_EFLAGS(pEFlags); \
189 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
190 \
191 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
192 IEM_MC_END(); \
193 } \
194 (void)0
195
196
197/**
198 * Body for word/dword/qword instructions like ADD, AND, OR, ++ with
199 * memory/register as the destination.
200 */
201#define IEMOP_BODY_BINARY_rm_rv(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fRW) \
202 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
203 \
204 /* \
205 * If rm is denoting a register, no more instruction bytes. \
206 */ \
207 if (IEM_IS_MODRM_REG_MODE(bRm)) \
208 { \
209 switch (pVCpu->iem.s.enmEffOpSize) \
210 { \
211 case IEMMODE_16BIT: \
212 IEM_MC_BEGIN(3, 0); \
213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
214 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
215 IEM_MC_ARG(uint16_t, u16Src, 1); \
216 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
217 \
218 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
219 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
220 IEM_MC_REF_EFLAGS(pEFlags); \
221 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
222 \
223 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
224 IEM_MC_END(); \
225 break; \
226 \
227 case IEMMODE_32BIT: \
228 IEM_MC_BEGIN(3, 0); \
229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
230 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
231 IEM_MC_ARG(uint32_t, u32Src, 1); \
232 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
233 \
234 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
235 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
236 IEM_MC_REF_EFLAGS(pEFlags); \
237 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
238 \
239 if ((a_fRW) == IEM_ACCESS_DATA_RW) /* not TEST and CMP */ \
240 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
241 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
242 IEM_MC_END(); \
243 break; \
244 \
245 case IEMMODE_64BIT: \
246 IEM_MC_BEGIN(3, 0); \
247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
248 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
249 IEM_MC_ARG(uint64_t, u64Src, 1); \
250 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
251 \
252 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
253 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
254 IEM_MC_REF_EFLAGS(pEFlags); \
255 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
256 \
257 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
258 IEM_MC_END(); \
259 break; \
260 \
261 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
262 } \
263 } \
264 else \
265 { \
266 /* \
267 * We're accessing memory. \
268 * Note! We're putting the eflags on the stack here so we can commit them \
269 * after the memory. \
270 */ \
271 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
272 { \
273 switch (pVCpu->iem.s.enmEffOpSize) \
274 { \
275 case IEMMODE_16BIT: \
276 IEM_MC_BEGIN(3, 2); \
277 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
278 IEM_MC_ARG(uint16_t, u16Src, 1); \
279 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
280 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
281 \
282 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
283 IEMOP_HLP_DONE_DECODING(); \
284 IEM_MC_MEM_MAP(pu16Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
285 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
286 IEM_MC_FETCH_EFLAGS(EFlags); \
287 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
288 \
289 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, a_fRW); \
290 IEM_MC_COMMIT_EFLAGS(EFlags); \
291 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
292 IEM_MC_END(); \
293 break; \
294 \
295 case IEMMODE_32BIT: \
296 IEM_MC_BEGIN(3, 2); \
297 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
298 IEM_MC_ARG(uint32_t, u32Src, 1); \
299 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
300 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
301 \
302 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
303 IEMOP_HLP_DONE_DECODING(); \
304 IEM_MC_MEM_MAP(pu32Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
305 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
306 IEM_MC_FETCH_EFLAGS(EFlags); \
307 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
308 \
309 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, a_fRW); \
310 IEM_MC_COMMIT_EFLAGS(EFlags); \
311 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
312 IEM_MC_END(); \
313 break; \
314 \
315 case IEMMODE_64BIT: \
316 IEM_MC_BEGIN(3, 2); \
317 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
318 IEM_MC_ARG(uint64_t, u64Src, 1); \
319 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
320 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
321 \
322 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
323 IEMOP_HLP_DONE_DECODING(); \
324 IEM_MC_MEM_MAP(pu64Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
325 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
326 IEM_MC_FETCH_EFLAGS(EFlags); \
327 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
328 \
329 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, a_fRW); \
330 IEM_MC_COMMIT_EFLAGS(EFlags); \
331 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
332 IEM_MC_END(); \
333 break; \
334 \
335 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
336 } \
337 } \
338 else \
339 { \
340 (void)0
341
342#define IEMOP_BODY_BINARY_rm_rv_NO_LOCK() \
343 IEMOP_HLP_DONE_DECODING(); \
344 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
345 } \
346 } \
347 (void)0
348
349#define IEMOP_BODY_BINARY_rm_rv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
350 switch (pVCpu->iem.s.enmEffOpSize) \
351 { \
352 case IEMMODE_16BIT: \
353 IEM_MC_BEGIN(3, 2); \
354 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
355 IEM_MC_ARG(uint16_t, u16Src, 1); \
356 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
357 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
358 \
359 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
360 IEMOP_HLP_DONE_DECODING(); \
361 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
362 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
363 IEM_MC_FETCH_EFLAGS(EFlags); \
364 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
365 \
366 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
367 IEM_MC_COMMIT_EFLAGS(EFlags); \
368 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
369 IEM_MC_END(); \
370 break; \
371 \
372 case IEMMODE_32BIT: \
373 IEM_MC_BEGIN(3, 2); \
374 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
375 IEM_MC_ARG(uint32_t, u32Src, 1); \
376 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
377 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
378 \
379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
380 IEMOP_HLP_DONE_DECODING(); \
381 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
382 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
383 IEM_MC_FETCH_EFLAGS(EFlags); \
384 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
385 \
386 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW /* CMP,TEST */); \
387 IEM_MC_COMMIT_EFLAGS(EFlags); \
388 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
389 IEM_MC_END(); \
390 break; \
391 \
392 case IEMMODE_64BIT: \
393 IEM_MC_BEGIN(3, 2); \
394 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
395 IEM_MC_ARG(uint64_t, u64Src, 1); \
396 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
398 \
399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
400 IEMOP_HLP_DONE_DECODING(); \
401 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
402 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
403 IEM_MC_FETCH_EFLAGS(EFlags); \
404 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
405 \
406 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
407 IEM_MC_COMMIT_EFLAGS(EFlags); \
408 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
409 IEM_MC_END(); \
410 break; \
411 \
412 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
413 } \
414 } \
415 } \
416 (void)0
417
418
419/**
420 * Body for instructions like ADD, AND, OR, ++ with working on AL with
421 * a byte immediate.
422 */
423#define IEMOP_BODY_BINARY_AL_Ib(a_fnNormalU8) \
424 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
425 \
426 IEM_MC_BEGIN(3, 0); \
427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
428 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
429 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1); \
430 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
431 \
432 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX); \
433 IEM_MC_REF_EFLAGS(pEFlags); \
434 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
435 \
436 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
437 IEM_MC_END()
438
439/**
440 * Body for instructions like ADD, AND, OR, ++ with working on
441 * AX/EAX/RAX with a word/dword immediate.
442 */
443#define IEMOP_BODY_BINARY_rAX_Iz(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fModifiesDstReg) \
444 switch (pVCpu->iem.s.enmEffOpSize) \
445 { \
446 case IEMMODE_16BIT: \
447 { \
448 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
449 \
450 IEM_MC_BEGIN(3, 0); \
451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
452 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
453 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1); \
454 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
455 \
456 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX); \
457 IEM_MC_REF_EFLAGS(pEFlags); \
458 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
459 \
460 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
461 IEM_MC_END(); \
462 } \
463 \
464 case IEMMODE_32BIT: \
465 { \
466 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
467 \
468 IEM_MC_BEGIN(3, 0); \
469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
470 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
471 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1); \
472 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
473 \
474 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX); \
475 IEM_MC_REF_EFLAGS(pEFlags); \
476 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
477 \
478 if (a_fModifiesDstReg) \
479 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
480 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
481 IEM_MC_END(); \
482 } \
483 \
484 case IEMMODE_64BIT: \
485 { \
486 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
487 \
488 IEM_MC_BEGIN(3, 0); \
489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
490 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
491 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1); \
492 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
493 \
494 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX); \
495 IEM_MC_REF_EFLAGS(pEFlags); \
496 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
497 \
498 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
499 IEM_MC_END(); \
500 } \
501 \
502 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
503 } \
504 (void)0
505
506
507
508/* Instruction specification format - work in progress: */
509
510/**
511 * @opcode 0x00
512 * @opmnemonic add
513 * @op1 rm:Eb
514 * @op2 reg:Gb
515 * @opmaps one
516 * @openc ModR/M
517 * @opflmodify cf,pf,af,zf,sf,of
518 * @ophints harmless ignores_op_sizes
519 * @opstats add_Eb_Gb
520 * @opgroup og_gen_arith_bin
521 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
522 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
523 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
524 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
525 */
526FNIEMOP_DEF(iemOp_add_Eb_Gb)
527{
528 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
529 IEMOP_BODY_BINARY_rm_r8( iemAImpl_add_u8, IEM_ACCESS_DATA_RW);
530 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_add_u8_locked);
531}
532
533
534/**
535 * @opcode 0x01
536 * @opgroup og_gen_arith_bin
537 * @opflmodify cf,pf,af,zf,sf,of
538 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
539 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
540 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
541 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
542 */
543FNIEMOP_DEF(iemOp_add_Ev_Gv)
544{
545 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
546 IEMOP_BODY_BINARY_rm_rv( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, IEM_ACCESS_DATA_RW);
547 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
548}
549
550
551/**
552 * @opcode 0x02
553 * @opgroup og_gen_arith_bin
554 * @opflmodify cf,pf,af,zf,sf,of
555 * @opcopytests iemOp_add_Eb_Gb
556 */
557FNIEMOP_DEF(iemOp_add_Gb_Eb)
558{
559 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
560 IEMOP_BODY_BINARY_r8_rm(iemAImpl_add_u8);
561}
562
563
564/**
565 * @opcode 0x03
566 * @opgroup og_gen_arith_bin
567 * @opflmodify cf,pf,af,zf,sf,of
568 * @opcopytests iemOp_add_Ev_Gv
569 */
570FNIEMOP_DEF(iemOp_add_Gv_Ev)
571{
572 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
573 IEMOP_BODY_BINARY_rv_rm(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
574}
575
576
577/**
578 * @opcode 0x04
579 * @opgroup og_gen_arith_bin
580 * @opflmodify cf,pf,af,zf,sf,of
581 * @opcopytests iemOp_add_Eb_Gb
582 */
583FNIEMOP_DEF(iemOp_add_Al_Ib)
584{
585 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
586 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_add_u8);
587}
588
589
590/**
591 * @opcode 0x05
592 * @opgroup og_gen_arith_bin
593 * @opflmodify cf,pf,af,zf,sf,of
594 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
595 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
596 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
597 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
598 */
599FNIEMOP_DEF(iemOp_add_eAX_Iz)
600{
601 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
602 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
603}
604
605
606/**
607 * @opcode 0x06
608 * @opgroup og_stack_sreg
609 */
610FNIEMOP_DEF(iemOp_push_ES)
611{
612 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
613 IEMOP_HLP_NO_64BIT();
614 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
615}
616
617
618/**
619 * @opcode 0x07
620 * @opgroup og_stack_sreg
621 */
622FNIEMOP_DEF(iemOp_pop_ES)
623{
624 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
625 IEMOP_HLP_NO_64BIT();
626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
627 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE, iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
628}
629
630
631/**
632 * @opcode 0x08
633 * @opgroup og_gen_arith_bin
634 * @opflmodify cf,pf,af,zf,sf,of
635 * @opflundef af
636 * @opflclear of,cf
637 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
638 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
639 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
640 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
641 */
642FNIEMOP_DEF(iemOp_or_Eb_Gb)
643{
644 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
645 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
646 IEMOP_BODY_BINARY_rm_r8( iemAImpl_or_u8, IEM_ACCESS_DATA_RW);
647 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_or_u8_locked);
648}
649
650
651/*
652 * @opcode 0x09
653 * @opgroup og_gen_arith_bin
654 * @opflmodify cf,pf,af,zf,sf,of
655 * @opflundef af
656 * @opflclear of,cf
657 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
658 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
659 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
660 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
661 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
662 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
663 */
664FNIEMOP_DEF(iemOp_or_Ev_Gv)
665{
666 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
667 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
668 IEMOP_BODY_BINARY_rm_rv( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, IEM_ACCESS_DATA_RW);
669 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
670}
671
672
673/**
674 * @opcode 0x0a
675 * @opgroup og_gen_arith_bin
676 * @opflmodify cf,pf,af,zf,sf,of
677 * @opflundef af
678 * @opflclear of,cf
679 * @opcopytests iemOp_or_Eb_Gb
680 */
681FNIEMOP_DEF(iemOp_or_Gb_Eb)
682{
683 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
684 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
685 IEMOP_BODY_BINARY_r8_rm(iemAImpl_or_u8);
686}
687
688
689/**
690 * @opcode 0x0b
691 * @opgroup og_gen_arith_bin
692 * @opflmodify cf,pf,af,zf,sf,of
693 * @opflundef af
694 * @opflclear of,cf
695 * @opcopytests iemOp_or_Ev_Gv
696 */
697FNIEMOP_DEF(iemOp_or_Gv_Ev)
698{
699 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
700 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
701 IEMOP_BODY_BINARY_rv_rm(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
702}
703
704
705/**
706 * @opcode 0x0c
707 * @opgroup og_gen_arith_bin
708 * @opflmodify cf,pf,af,zf,sf,of
709 * @opflundef af
710 * @opflclear of,cf
711 * @opcopytests iemOp_or_Eb_Gb
712 */
713FNIEMOP_DEF(iemOp_or_Al_Ib)
714{
715 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
716 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
717 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_or_u8);
718}
719
720
721/**
722 * @opcode 0x0d
723 * @opgroup og_gen_arith_bin
724 * @opflmodify cf,pf,af,zf,sf,of
725 * @opflundef af
726 * @opflclear of,cf
727 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
728 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
729 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
730 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
731 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
732 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
733 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
734 */
735FNIEMOP_DEF(iemOp_or_eAX_Iz)
736{
737 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
738 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
739 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
740}
741
742
743/**
744 * @opcode 0x0e
745 * @opgroup og_stack_sreg
746 */
747FNIEMOP_DEF(iemOp_push_CS)
748{
749 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, 0);
750 IEMOP_HLP_NO_64BIT();
751 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
752}
753
754
755/**
756 * @opcode 0x0f
757 * @opmnemonic EscTwo0f
758 * @openc two0f
759 * @opdisenum OP_2B_ESC
760 * @ophints harmless
761 * @opgroup og_escapes
762 */
763FNIEMOP_DEF(iemOp_2byteEscape)
764{
765#ifdef VBOX_STRICT
766 /* Sanity check the table the first time around. */
767 static bool s_fTested = false;
768 if (RT_LIKELY(s_fTested)) { /* likely */ }
769 else
770 {
771 s_fTested = true;
772 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
773 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
774 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
775 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
776 }
777#endif
778
779 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
780 {
781 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
782 IEMOP_HLP_MIN_286();
783 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
784 }
785 /* @opdone */
786
787 /*
788 * On the 8086 this is a POP CS instruction.
789 * For the time being we don't specify this this.
790 */
791 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, IEMOPHINT_SKIP_PYTHON);
792 IEMOP_HLP_NO_64BIT();
793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
794 /** @todo eliminate END_TB here */
795 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_END_TB,
796 iemCImpl_pop_Sreg, X86_SREG_CS, pVCpu->iem.s.enmEffOpSize);
797}
798
799/**
800 * @opcode 0x10
801 * @opgroup og_gen_arith_bin
802 * @opfltest cf
803 * @opflmodify cf,pf,af,zf,sf,of
804 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
805 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
806 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
807 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
808 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
809 */
810FNIEMOP_DEF(iemOp_adc_Eb_Gb)
811{
812 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
813 IEMOP_BODY_BINARY_rm_r8( iemAImpl_adc_u8, IEM_ACCESS_DATA_RW);
814 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_adc_u8_locked);
815}
816
817
818/**
819 * @opcode 0x11
820 * @opgroup og_gen_arith_bin
821 * @opfltest cf
822 * @opflmodify cf,pf,af,zf,sf,of
823 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
824 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
825 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
826 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
827 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
828 */
829FNIEMOP_DEF(iemOp_adc_Ev_Gv)
830{
831 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
832 IEMOP_BODY_BINARY_rm_rv( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, IEM_ACCESS_DATA_RW);
833 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
834}
835
836
837/**
838 * @opcode 0x12
839 * @opgroup og_gen_arith_bin
840 * @opfltest cf
841 * @opflmodify cf,pf,af,zf,sf,of
842 * @opcopytests iemOp_adc_Eb_Gb
843 */
844FNIEMOP_DEF(iemOp_adc_Gb_Eb)
845{
846 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
847 IEMOP_BODY_BINARY_r8_rm(iemAImpl_adc_u8);
848}
849
850
851/**
852 * @opcode 0x13
853 * @opgroup og_gen_arith_bin
854 * @opfltest cf
855 * @opflmodify cf,pf,af,zf,sf,of
856 * @opcopytests iemOp_adc_Ev_Gv
857 */
858FNIEMOP_DEF(iemOp_adc_Gv_Ev)
859{
860 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
861 IEMOP_BODY_BINARY_rv_rm(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
862}
863
864
865/**
866 * @opcode 0x14
867 * @opgroup og_gen_arith_bin
868 * @opfltest cf
869 * @opflmodify cf,pf,af,zf,sf,of
870 * @opcopytests iemOp_adc_Eb_Gb
871 */
872FNIEMOP_DEF(iemOp_adc_Al_Ib)
873{
874 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
875 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_adc_u8);
876}
877
878
879/**
880 * @opcode 0x15
881 * @opgroup og_gen_arith_bin
882 * @opfltest cf
883 * @opflmodify cf,pf,af,zf,sf,of
884 * @opcopytests iemOp_adc_Ev_Gv
885 */
886FNIEMOP_DEF(iemOp_adc_eAX_Iz)
887{
888 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
889 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
890}
891
892
893/**
894 * @opcode 0x16
895 */
896FNIEMOP_DEF(iemOp_push_SS)
897{
898 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
899 IEMOP_HLP_NO_64BIT();
900 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
901}
902
903
904/**
905 * @opcode 0x17
906 * @opgroup og_gen_arith_bin
907 * @opfltest cf
908 * @opflmodify cf,pf,af,zf,sf,of
909 */
910FNIEMOP_DEF(iemOp_pop_SS)
911{
912 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
914 IEMOP_HLP_NO_64BIT();
915 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE, iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
916}
917
918
919/**
920 * @opcode 0x18
921 * @opgroup og_gen_arith_bin
922 * @opfltest cf
923 * @opflmodify cf,pf,af,zf,sf,of
924 */
925FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
926{
927 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
928 IEMOP_BODY_BINARY_rm_r8( iemAImpl_sbb_u8, IEM_ACCESS_DATA_RW);
929 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sbb_u8_locked);
930}
931
932
933/**
934 * @opcode 0x19
935 * @opgroup og_gen_arith_bin
936 * @opfltest cf
937 * @opflmodify cf,pf,af,zf,sf,of
938 */
939FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
940{
941 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
942 IEMOP_BODY_BINARY_rm_rv( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, IEM_ACCESS_DATA_RW);
943 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
944}
945
946
947/**
948 * @opcode 0x1a
949 * @opgroup og_gen_arith_bin
950 * @opfltest cf
951 * @opflmodify cf,pf,af,zf,sf,of
952 */
953FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
954{
955 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
956 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sbb_u8);
957}
958
959
960/**
961 * @opcode 0x1b
962 * @opgroup og_gen_arith_bin
963 * @opfltest cf
964 * @opflmodify cf,pf,af,zf,sf,of
965 */
966FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
967{
968 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
969 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
970}
971
972
973/**
974 * @opcode 0x1c
975 * @opgroup og_gen_arith_bin
976 * @opfltest cf
977 * @opflmodify cf,pf,af,zf,sf,of
978 */
979FNIEMOP_DEF(iemOp_sbb_Al_Ib)
980{
981 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
982 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sbb_u8);
983}
984
985
986/**
987 * @opcode 0x1d
988 * @opgroup og_gen_arith_bin
989 * @opfltest cf
990 * @opflmodify cf,pf,af,zf,sf,of
991 */
992FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
993{
994 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
995 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
996}
997
998
999/**
1000 * @opcode 0x1e
1001 * @opgroup og_stack_sreg
1002 */
1003FNIEMOP_DEF(iemOp_push_DS)
1004{
1005 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1006 IEMOP_HLP_NO_64BIT();
1007 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
1008}
1009
1010
1011/**
1012 * @opcode 0x1f
1013 * @opgroup og_stack_sreg
1014 */
1015FNIEMOP_DEF(iemOp_pop_DS)
1016{
1017 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1019 IEMOP_HLP_NO_64BIT();
1020 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE, iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
1021}
1022
1023
1024/**
1025 * @opcode 0x20
1026 * @opgroup og_gen_arith_bin
1027 * @opflmodify cf,pf,af,zf,sf,of
1028 * @opflundef af
1029 * @opflclear of,cf
1030 */
1031FNIEMOP_DEF(iemOp_and_Eb_Gb)
1032{
1033 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1034 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1035 IEMOP_BODY_BINARY_rm_r8( iemAImpl_and_u8, IEM_ACCESS_DATA_RW);
1036 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_and_u8_locked);
1037}
1038
1039
1040/**
1041 * @opcode 0x21
1042 * @opgroup og_gen_arith_bin
1043 * @opflmodify cf,pf,af,zf,sf,of
1044 * @opflundef af
1045 * @opflclear of,cf
1046 */
1047FNIEMOP_DEF(iemOp_and_Ev_Gv)
1048{
1049 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1050 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1051 IEMOP_BODY_BINARY_rm_rv( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, IEM_ACCESS_DATA_RW);
1052 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
1053}
1054
1055
1056/**
1057 * @opcode 0x22
1058 * @opgroup og_gen_arith_bin
1059 * @opflmodify cf,pf,af,zf,sf,of
1060 * @opflundef af
1061 * @opflclear of,cf
1062 */
1063FNIEMOP_DEF(iemOp_and_Gb_Eb)
1064{
1065 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1066 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1067 IEMOP_BODY_BINARY_r8_rm(iemAImpl_and_u8);
1068}
1069
1070
1071/**
1072 * @opcode 0x23
1073 * @opgroup og_gen_arith_bin
1074 * @opflmodify cf,pf,af,zf,sf,of
1075 * @opflundef af
1076 * @opflclear of,cf
1077 */
1078FNIEMOP_DEF(iemOp_and_Gv_Ev)
1079{
1080 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1081 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1082 IEMOP_BODY_BINARY_rv_rm(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1083}
1084
1085
1086/**
1087 * @opcode 0x24
1088 * @opgroup og_gen_arith_bin
1089 * @opflmodify cf,pf,af,zf,sf,of
1090 * @opflundef af
1091 * @opflclear of,cf
1092 */
1093FNIEMOP_DEF(iemOp_and_Al_Ib)
1094{
1095 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
1096 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1097 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_and_u8);
1098}
1099
1100
1101/**
1102 * @opcode 0x25
1103 * @opgroup og_gen_arith_bin
1104 * @opflmodify cf,pf,af,zf,sf,of
1105 * @opflundef af
1106 * @opflclear of,cf
1107 */
1108FNIEMOP_DEF(iemOp_and_eAX_Iz)
1109{
1110 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1111 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1112 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1113}
1114
1115
1116/**
1117 * @opcode 0x26
1118 * @opmnemonic SEG
1119 * @op1 ES
1120 * @opgroup og_prefix
1121 * @openc prefix
1122 * @opdisenum OP_SEG
1123 * @ophints harmless
1124 */
1125FNIEMOP_DEF(iemOp_seg_ES)
1126{
1127 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
1128 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
1129 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
1130
1131 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1132 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1133}
1134
1135
1136/**
1137 * @opcode 0x27
1138 * @opfltest af,cf
1139 * @opflmodify cf,pf,af,zf,sf,of
1140 * @opflundef of
1141 */
1142FNIEMOP_DEF(iemOp_daa)
1143{
1144 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1145 IEMOP_HLP_NO_64BIT();
1146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1147 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1148 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_daa);
1149}
1150
1151
1152/**
1153 * @opcode 0x28
1154 * @opgroup og_gen_arith_bin
1155 * @opflmodify cf,pf,af,zf,sf,of
1156 */
1157FNIEMOP_DEF(iemOp_sub_Eb_Gb)
1158{
1159 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1160 IEMOP_BODY_BINARY_rm_r8( iemAImpl_sub_u8, IEM_ACCESS_DATA_RW);
1161 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sub_u8_locked);
1162}
1163
1164
1165/**
1166 * @opcode 0x29
1167 * @opgroup og_gen_arith_bin
1168 * @opflmodify cf,pf,af,zf,sf,of
1169 */
1170FNIEMOP_DEF(iemOp_sub_Ev_Gv)
1171{
1172 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1173 IEMOP_BODY_BINARY_rm_rv( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, IEM_ACCESS_DATA_RW);
1174 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
1175}
1176
1177
1178/**
1179 * @opcode 0x2a
1180 * @opgroup og_gen_arith_bin
1181 * @opflmodify cf,pf,af,zf,sf,of
1182 */
1183FNIEMOP_DEF(iemOp_sub_Gb_Eb)
1184{
1185 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1186 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sub_u8);
1187}
1188
1189
1190/**
1191 * @opcode 0x2b
1192 * @opgroup og_gen_arith_bin
1193 * @opflmodify cf,pf,af,zf,sf,of
1194 */
1195FNIEMOP_DEF(iemOp_sub_Gv_Ev)
1196{
1197 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1198 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1199}
1200
1201
1202/**
1203 * @opcode 0x2c
1204 * @opgroup og_gen_arith_bin
1205 * @opflmodify cf,pf,af,zf,sf,of
1206 */
1207FNIEMOP_DEF(iemOp_sub_Al_Ib)
1208{
1209 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1210 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sub_u8);
1211}
1212
1213
1214/**
1215 * @opcode 0x2d
1216 * @opgroup og_gen_arith_bin
1217 * @opflmodify cf,pf,af,zf,sf,of
1218 */
1219FNIEMOP_DEF(iemOp_sub_eAX_Iz)
1220{
1221 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1222 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1223}
1224
1225
1226/**
1227 * @opcode 0x2e
1228 * @opmnemonic SEG
1229 * @op1 CS
1230 * @opgroup og_prefix
1231 * @openc prefix
1232 * @opdisenum OP_SEG
1233 * @ophints harmless
1234 */
1235FNIEMOP_DEF(iemOp_seg_CS)
1236{
1237 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
1238 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
1239 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
1240
1241 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1242 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1243}
1244
1245
1246/**
1247 * @opcode 0x2f
1248 * @opfltest af,cf
1249 * @opflmodify cf,pf,af,zf,sf,of
1250 * @opflundef of
1251 */
1252FNIEMOP_DEF(iemOp_das)
1253{
1254 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1255 IEMOP_HLP_NO_64BIT();
1256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1257 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1258 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_das);
1259}
1260
1261
1262/**
1263 * @opcode 0x30
1264 * @opgroup og_gen_arith_bin
1265 * @opflmodify cf,pf,af,zf,sf,of
1266 * @opflundef af
1267 * @opflclear of,cf
1268 */
1269FNIEMOP_DEF(iemOp_xor_Eb_Gb)
1270{
1271 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1272 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1273 IEMOP_BODY_BINARY_rm_r8( iemAImpl_xor_u8, IEM_ACCESS_DATA_RW);
1274 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_xor_u8_locked);
1275}
1276
1277
1278/**
1279 * @opcode 0x31
1280 * @opgroup og_gen_arith_bin
1281 * @opflmodify cf,pf,af,zf,sf,of
1282 * @opflundef af
1283 * @opflclear of,cf
1284 */
1285FNIEMOP_DEF(iemOp_xor_Ev_Gv)
1286{
1287 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1288 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1289 IEMOP_BODY_BINARY_rm_rv( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, IEM_ACCESS_DATA_RW);
1290 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
1291}
1292
1293
1294/**
1295 * @opcode 0x32
1296 * @opgroup og_gen_arith_bin
1297 * @opflmodify cf,pf,af,zf,sf,of
1298 * @opflundef af
1299 * @opflclear of,cf
1300 */
1301FNIEMOP_DEF(iemOp_xor_Gb_Eb)
1302{
1303 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1304 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1305 IEMOP_BODY_BINARY_r8_rm(iemAImpl_xor_u8);
1306}
1307
1308
1309/**
1310 * @opcode 0x33
1311 * @opgroup og_gen_arith_bin
1312 * @opflmodify cf,pf,af,zf,sf,of
1313 * @opflundef af
1314 * @opflclear of,cf
1315 */
1316FNIEMOP_DEF(iemOp_xor_Gv_Ev)
1317{
1318 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1319 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1320 IEMOP_BODY_BINARY_rv_rm(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1321}
1322
1323
1324/**
1325 * @opcode 0x34
1326 * @opgroup og_gen_arith_bin
1327 * @opflmodify cf,pf,af,zf,sf,of
1328 * @opflundef af
1329 * @opflclear of,cf
1330 */
1331FNIEMOP_DEF(iemOp_xor_Al_Ib)
1332{
1333 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1334 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1335 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_xor_u8);
1336}
1337
1338
1339/**
1340 * @opcode 0x35
1341 * @opgroup og_gen_arith_bin
1342 * @opflmodify cf,pf,af,zf,sf,of
1343 * @opflundef af
1344 * @opflclear of,cf
1345 */
1346FNIEMOP_DEF(iemOp_xor_eAX_Iz)
1347{
1348 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1349 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1350 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1351}
1352
1353
1354/**
1355 * @opcode 0x36
1356 * @opmnemonic SEG
1357 * @op1 SS
1358 * @opgroup og_prefix
1359 * @openc prefix
1360 * @opdisenum OP_SEG
1361 * @ophints harmless
1362 */
1363FNIEMOP_DEF(iemOp_seg_SS)
1364{
1365 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
1366 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
1367 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
1368
1369 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1370 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1371}
1372
1373
1374/**
1375 * @opcode 0x37
1376 * @opfltest af,cf
1377 * @opflmodify cf,pf,af,zf,sf,of
1378 * @opflundef pf,zf,sf,of
1379 * @opgroup og_gen_arith_dec
1380 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
1381 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
1382 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1383 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1384 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1385 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
1386 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
1387 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
1388 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
1389 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
1390 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
1391 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
1392 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
1393 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
1394 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
1395 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1396 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1397 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1398 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1399 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
1400 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
1401 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
1402 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
1403 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
1404 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
1405 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1406 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
1407 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
1408 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
1409 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
1410 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
1411 */
1412FNIEMOP_DEF(iemOp_aaa)
1413{
1414 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1415 IEMOP_HLP_NO_64BIT();
1416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1417 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1418
1419 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aaa);
1420}
1421
1422
1423/**
1424 * @opcode 0x38
1425 */
1426FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
1427{
1428 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
1429 IEMOP_BODY_BINARY_rm_r8(iemAImpl_cmp_u8, IEM_ACCESS_DATA_R);
1430 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
1431}
1432
1433
1434/**
1435 * @opcode 0x39
1436 */
1437FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
1438{
1439 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
1440 IEMOP_BODY_BINARY_rm_rv(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, IEM_ACCESS_DATA_R);
1441 IEMOP_BODY_BINARY_rm_rv_NO_LOCK();
1442}
1443
1444
1445/**
1446 * @opcode 0x3a
1447 */
1448FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
1449{
1450 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
1451 IEMOP_BODY_BINARY_r8_rm(iemAImpl_cmp_u8);
1452}
1453
1454
1455/**
1456 * @opcode 0x3b
1457 */
1458FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
1459{
1460 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
1461 IEMOP_BODY_BINARY_rv_rm(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1462}
1463
1464
1465/**
1466 * @opcode 0x3c
1467 */
1468FNIEMOP_DEF(iemOp_cmp_Al_Ib)
1469{
1470 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
1471 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_cmp_u8);
1472}
1473
1474
1475/**
1476 * @opcode 0x3d
1477 */
1478FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1479{
1480 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1481 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1482}
1483
1484
1485/**
1486 * @opcode 0x3e
1487 */
1488FNIEMOP_DEF(iemOp_seg_DS)
1489{
1490 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1491 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1492 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1493
1494 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1495 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1496}
1497
1498
1499/**
1500 * @opcode 0x3f
1501 * @opfltest af,cf
1502 * @opflmodify cf,pf,af,zf,sf,of
1503 * @opflundef pf,zf,sf,of
1504 * @opgroup og_gen_arith_dec
1505 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1506 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1507 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1508 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1509 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1510 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1511 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1512 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1513 * @optest amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1514 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1515 * @optest amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1516 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1517 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1518 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1519 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1520 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1521 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1522 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1523 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1524 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1525 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
1526 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1527 * @optest amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
1528 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1529 * @optest amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
1530 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1531 * @optest amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
1532 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1533 * @optest amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
1534 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1535 * @optest amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
1536 * @optest intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1537 * @optest amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1538 * @optest intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1539 * @optest amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1540 */
1541FNIEMOP_DEF(iemOp_aas)
1542{
1543 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1544 IEMOP_HLP_NO_64BIT();
1545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1546 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1547
1548 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aas);
1549}
1550
1551
1552/**
1553 * Common 'inc/dec register' helper.
1554 *
1555 * Not for 64-bit code, only for what became the rex prefixes.
1556 */
1557#define IEMOP_BODY_UNARY_GReg(a_fnNormalU16, a_fnNormalU32, a_iReg) \
1558 switch (pVCpu->iem.s.enmEffOpSize) \
1559 { \
1560 case IEMMODE_16BIT: \
1561 IEM_MC_BEGIN(2, 0); \
1562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1563 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
1564 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1565 IEM_MC_REF_GREG_U16(pu16Dst, a_iReg); \
1566 IEM_MC_REF_EFLAGS(pEFlags); \
1567 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
1568 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1569 IEM_MC_END(); \
1570 break; \
1571 \
1572 case IEMMODE_32BIT: \
1573 IEM_MC_BEGIN(2, 0); \
1574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1575 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
1576 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1577 IEM_MC_REF_GREG_U32(pu32Dst, a_iReg); \
1578 IEM_MC_REF_EFLAGS(pEFlags); \
1579 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
1580 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
1581 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1582 IEM_MC_END(); \
1583 break; \
1584 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1585 } \
1586 (void)0
1587
1588/**
1589 * @opcode 0x40
1590 */
1591FNIEMOP_DEF(iemOp_inc_eAX)
1592{
1593 /*
1594 * This is a REX prefix in 64-bit mode.
1595 */
1596 if (IEM_IS_64BIT_CODE(pVCpu))
1597 {
1598 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1599 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1600
1601 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1602 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1603 }
1604
1605 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1606 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xAX);
1607}
1608
1609
1610/**
1611 * @opcode 0x41
1612 */
1613FNIEMOP_DEF(iemOp_inc_eCX)
1614{
1615 /*
1616 * This is a REX prefix in 64-bit mode.
1617 */
1618 if (IEM_IS_64BIT_CODE(pVCpu))
1619 {
1620 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1621 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1622 pVCpu->iem.s.uRexB = 1 << 3;
1623
1624 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1625 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1626 }
1627
1628 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1629 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xCX);
1630}
1631
1632
1633/**
1634 * @opcode 0x42
1635 */
1636FNIEMOP_DEF(iemOp_inc_eDX)
1637{
1638 /*
1639 * This is a REX prefix in 64-bit mode.
1640 */
1641 if (IEM_IS_64BIT_CODE(pVCpu))
1642 {
1643 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1644 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1645 pVCpu->iem.s.uRexIndex = 1 << 3;
1646
1647 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1648 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1649 }
1650
1651 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1652 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDX);
1653}
1654
1655
1656
1657/**
1658 * @opcode 0x43
1659 */
1660FNIEMOP_DEF(iemOp_inc_eBX)
1661{
1662 /*
1663 * This is a REX prefix in 64-bit mode.
1664 */
1665 if (IEM_IS_64BIT_CODE(pVCpu))
1666 {
1667 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1668 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1669 pVCpu->iem.s.uRexB = 1 << 3;
1670 pVCpu->iem.s.uRexIndex = 1 << 3;
1671
1672 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1673 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1674 }
1675
1676 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1677 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBX);
1678}
1679
1680
1681/**
1682 * @opcode 0x44
1683 */
1684FNIEMOP_DEF(iemOp_inc_eSP)
1685{
1686 /*
1687 * This is a REX prefix in 64-bit mode.
1688 */
1689 if (IEM_IS_64BIT_CODE(pVCpu))
1690 {
1691 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1692 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1693 pVCpu->iem.s.uRexReg = 1 << 3;
1694
1695 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1696 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1697 }
1698
1699 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1700 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSP);
1701}
1702
1703
1704/**
1705 * @opcode 0x45
1706 */
1707FNIEMOP_DEF(iemOp_inc_eBP)
1708{
1709 /*
1710 * This is a REX prefix in 64-bit mode.
1711 */
1712 if (IEM_IS_64BIT_CODE(pVCpu))
1713 {
1714 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1715 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1716 pVCpu->iem.s.uRexReg = 1 << 3;
1717 pVCpu->iem.s.uRexB = 1 << 3;
1718
1719 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1720 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1721 }
1722
1723 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1724 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBP);
1725}
1726
1727
1728/**
1729 * @opcode 0x46
1730 */
1731FNIEMOP_DEF(iemOp_inc_eSI)
1732{
1733 /*
1734 * This is a REX prefix in 64-bit mode.
1735 */
1736 if (IEM_IS_64BIT_CODE(pVCpu))
1737 {
1738 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1739 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1740 pVCpu->iem.s.uRexReg = 1 << 3;
1741 pVCpu->iem.s.uRexIndex = 1 << 3;
1742
1743 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1744 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1745 }
1746
1747 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1748 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSI);
1749}
1750
1751
1752/**
1753 * @opcode 0x47
1754 */
1755FNIEMOP_DEF(iemOp_inc_eDI)
1756{
1757 /*
1758 * This is a REX prefix in 64-bit mode.
1759 */
1760 if (IEM_IS_64BIT_CODE(pVCpu))
1761 {
1762 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1763 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1764 pVCpu->iem.s.uRexReg = 1 << 3;
1765 pVCpu->iem.s.uRexB = 1 << 3;
1766 pVCpu->iem.s.uRexIndex = 1 << 3;
1767
1768 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1769 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1770 }
1771
1772 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1773 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDI);
1774}
1775
1776
1777/**
1778 * @opcode 0x48
1779 */
1780FNIEMOP_DEF(iemOp_dec_eAX)
1781{
1782 /*
1783 * This is a REX prefix in 64-bit mode.
1784 */
1785 if (IEM_IS_64BIT_CODE(pVCpu))
1786 {
1787 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
1788 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
1789 iemRecalEffOpSize(pVCpu);
1790
1791 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1792 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1793 }
1794
1795 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
1796 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xAX);
1797}
1798
1799
1800/**
1801 * @opcode 0x49
1802 */
1803FNIEMOP_DEF(iemOp_dec_eCX)
1804{
1805 /*
1806 * This is a REX prefix in 64-bit mode.
1807 */
1808 if (IEM_IS_64BIT_CODE(pVCpu))
1809 {
1810 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
1811 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1812 pVCpu->iem.s.uRexB = 1 << 3;
1813 iemRecalEffOpSize(pVCpu);
1814
1815 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1816 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1817 }
1818
1819 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
1820 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xCX);
1821}
1822
1823
1824/**
1825 * @opcode 0x4a
1826 */
1827FNIEMOP_DEF(iemOp_dec_eDX)
1828{
1829 /*
1830 * This is a REX prefix in 64-bit mode.
1831 */
1832 if (IEM_IS_64BIT_CODE(pVCpu))
1833 {
1834 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
1835 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1836 pVCpu->iem.s.uRexIndex = 1 << 3;
1837 iemRecalEffOpSize(pVCpu);
1838
1839 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1840 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1841 }
1842
1843 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
1844 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDX);
1845}
1846
1847
1848/**
1849 * @opcode 0x4b
1850 */
1851FNIEMOP_DEF(iemOp_dec_eBX)
1852{
1853 /*
1854 * This is a REX prefix in 64-bit mode.
1855 */
1856 if (IEM_IS_64BIT_CODE(pVCpu))
1857 {
1858 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
1859 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1860 pVCpu->iem.s.uRexB = 1 << 3;
1861 pVCpu->iem.s.uRexIndex = 1 << 3;
1862 iemRecalEffOpSize(pVCpu);
1863
1864 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1865 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1866 }
1867
1868 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
1869 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBX);
1870}
1871
1872
1873/**
1874 * @opcode 0x4c
1875 */
1876FNIEMOP_DEF(iemOp_dec_eSP)
1877{
1878 /*
1879 * This is a REX prefix in 64-bit mode.
1880 */
1881 if (IEM_IS_64BIT_CODE(pVCpu))
1882 {
1883 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
1884 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
1885 pVCpu->iem.s.uRexReg = 1 << 3;
1886 iemRecalEffOpSize(pVCpu);
1887
1888 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1889 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1890 }
1891
1892 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
1893 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSP);
1894}
1895
1896
1897/**
1898 * @opcode 0x4d
1899 */
1900FNIEMOP_DEF(iemOp_dec_eBP)
1901{
1902 /*
1903 * This is a REX prefix in 64-bit mode.
1904 */
1905 if (IEM_IS_64BIT_CODE(pVCpu))
1906 {
1907 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
1908 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1909 pVCpu->iem.s.uRexReg = 1 << 3;
1910 pVCpu->iem.s.uRexB = 1 << 3;
1911 iemRecalEffOpSize(pVCpu);
1912
1913 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1914 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1915 }
1916
1917 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
1918 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBP);
1919}
1920
1921
1922/**
1923 * @opcode 0x4e
1924 */
1925FNIEMOP_DEF(iemOp_dec_eSI)
1926{
1927 /*
1928 * This is a REX prefix in 64-bit mode.
1929 */
1930 if (IEM_IS_64BIT_CODE(pVCpu))
1931 {
1932 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
1933 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1934 pVCpu->iem.s.uRexReg = 1 << 3;
1935 pVCpu->iem.s.uRexIndex = 1 << 3;
1936 iemRecalEffOpSize(pVCpu);
1937
1938 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1939 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1940 }
1941
1942 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
1943 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSI);
1944}
1945
1946
1947/**
1948 * @opcode 0x4f
1949 */
1950FNIEMOP_DEF(iemOp_dec_eDI)
1951{
1952 /*
1953 * This is a REX prefix in 64-bit mode.
1954 */
1955 if (IEM_IS_64BIT_CODE(pVCpu))
1956 {
1957 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
1958 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1959 pVCpu->iem.s.uRexReg = 1 << 3;
1960 pVCpu->iem.s.uRexB = 1 << 3;
1961 pVCpu->iem.s.uRexIndex = 1 << 3;
1962 iemRecalEffOpSize(pVCpu);
1963
1964 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1965 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1966 }
1967
1968 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
1969 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDI);
1970}
1971
1972
1973/**
1974 * Common 'push register' helper.
1975 */
1976FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
1977{
1978 if (IEM_IS_64BIT_CODE(pVCpu))
1979 {
1980 iReg |= pVCpu->iem.s.uRexB;
1981 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1982 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1983 }
1984
1985 switch (pVCpu->iem.s.enmEffOpSize)
1986 {
1987 case IEMMODE_16BIT:
1988 IEM_MC_BEGIN(0, 1);
1989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1990 IEM_MC_LOCAL(uint16_t, u16Value);
1991 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
1992 IEM_MC_PUSH_U16(u16Value);
1993 IEM_MC_ADVANCE_RIP_AND_FINISH();
1994 IEM_MC_END();
1995 break;
1996
1997 case IEMMODE_32BIT:
1998 IEM_MC_BEGIN(0, 1);
1999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2000 IEM_MC_LOCAL(uint32_t, u32Value);
2001 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
2002 IEM_MC_PUSH_U32(u32Value);
2003 IEM_MC_ADVANCE_RIP_AND_FINISH();
2004 IEM_MC_END();
2005 break;
2006
2007 case IEMMODE_64BIT:
2008 IEM_MC_BEGIN(0, 1);
2009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2010 IEM_MC_LOCAL(uint64_t, u64Value);
2011 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
2012 IEM_MC_PUSH_U64(u64Value);
2013 IEM_MC_ADVANCE_RIP_AND_FINISH();
2014 IEM_MC_END();
2015 break;
2016
2017 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2018 }
2019}
2020
2021
2022/**
2023 * @opcode 0x50
2024 */
2025FNIEMOP_DEF(iemOp_push_eAX)
2026{
2027 IEMOP_MNEMONIC(push_rAX, "push rAX");
2028 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
2029}
2030
2031
2032/**
2033 * @opcode 0x51
2034 */
2035FNIEMOP_DEF(iemOp_push_eCX)
2036{
2037 IEMOP_MNEMONIC(push_rCX, "push rCX");
2038 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
2039}
2040
2041
2042/**
2043 * @opcode 0x52
2044 */
2045FNIEMOP_DEF(iemOp_push_eDX)
2046{
2047 IEMOP_MNEMONIC(push_rDX, "push rDX");
2048 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
2049}
2050
2051
2052/**
2053 * @opcode 0x53
2054 */
2055FNIEMOP_DEF(iemOp_push_eBX)
2056{
2057 IEMOP_MNEMONIC(push_rBX, "push rBX");
2058 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
2059}
2060
2061
2062/**
2063 * @opcode 0x54
2064 */
2065FNIEMOP_DEF(iemOp_push_eSP)
2066{
2067 IEMOP_MNEMONIC(push_rSP, "push rSP");
2068 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
2069 {
2070 IEM_MC_BEGIN(0, 1);
2071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2072 IEM_MC_LOCAL(uint16_t, u16Value);
2073 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
2074 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
2075 IEM_MC_PUSH_U16(u16Value);
2076 IEM_MC_ADVANCE_RIP_AND_FINISH();
2077 IEM_MC_END();
2078 }
2079 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
2080}
2081
2082
2083/**
2084 * @opcode 0x55
2085 */
2086FNIEMOP_DEF(iemOp_push_eBP)
2087{
2088 IEMOP_MNEMONIC(push_rBP, "push rBP");
2089 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
2090}
2091
2092
2093/**
2094 * @opcode 0x56
2095 */
2096FNIEMOP_DEF(iemOp_push_eSI)
2097{
2098 IEMOP_MNEMONIC(push_rSI, "push rSI");
2099 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
2100}
2101
2102
2103/**
2104 * @opcode 0x57
2105 */
2106FNIEMOP_DEF(iemOp_push_eDI)
2107{
2108 IEMOP_MNEMONIC(push_rDI, "push rDI");
2109 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
2110}
2111
2112
2113/**
2114 * Common 'pop register' helper.
2115 */
2116FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
2117{
2118 if (IEM_IS_64BIT_CODE(pVCpu))
2119 {
2120 iReg |= pVCpu->iem.s.uRexB;
2121 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2122 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2123 }
2124
2125 switch (pVCpu->iem.s.enmEffOpSize)
2126 {
2127 case IEMMODE_16BIT:
2128 IEM_MC_BEGIN(0, 1);
2129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2130 IEM_MC_LOCAL(uint16_t *, pu16Dst);
2131 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
2132 IEM_MC_POP_U16(pu16Dst);
2133 IEM_MC_ADVANCE_RIP_AND_FINISH();
2134 IEM_MC_END();
2135 break;
2136
2137 case IEMMODE_32BIT:
2138 IEM_MC_BEGIN(0, 1);
2139 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2140 IEM_MC_LOCAL(uint32_t *, pu32Dst);
2141 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
2142 IEM_MC_POP_U32(pu32Dst);
2143 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
2144 IEM_MC_ADVANCE_RIP_AND_FINISH();
2145 IEM_MC_END();
2146 break;
2147
2148 case IEMMODE_64BIT:
2149 IEM_MC_BEGIN(0, 1);
2150 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2151 IEM_MC_LOCAL(uint64_t *, pu64Dst);
2152 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
2153 IEM_MC_POP_U64(pu64Dst);
2154 IEM_MC_ADVANCE_RIP_AND_FINISH();
2155 IEM_MC_END();
2156 break;
2157
2158 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2159 }
2160}
2161
2162
2163/**
2164 * @opcode 0x58
2165 */
2166FNIEMOP_DEF(iemOp_pop_eAX)
2167{
2168 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
2169 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
2170}
2171
2172
2173/**
2174 * @opcode 0x59
2175 */
2176FNIEMOP_DEF(iemOp_pop_eCX)
2177{
2178 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
2179 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
2180}
2181
2182
2183/**
2184 * @opcode 0x5a
2185 */
2186FNIEMOP_DEF(iemOp_pop_eDX)
2187{
2188 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
2189 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
2190}
2191
2192
2193/**
2194 * @opcode 0x5b
2195 */
2196FNIEMOP_DEF(iemOp_pop_eBX)
2197{
2198 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
2199 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
2200}
2201
2202
2203/**
2204 * @opcode 0x5c
2205 */
2206FNIEMOP_DEF(iemOp_pop_eSP)
2207{
2208 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
2209 if (IEM_IS_64BIT_CODE(pVCpu))
2210 {
2211 if (pVCpu->iem.s.uRexB)
2212 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
2213 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2214 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2215 }
2216
2217 /** @todo add testcase for this instruction. */
2218 switch (pVCpu->iem.s.enmEffOpSize)
2219 {
2220 case IEMMODE_16BIT:
2221 IEM_MC_BEGIN(0, 1);
2222 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2223 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2224 IEM_MC_LOCAL(uint16_t, u16Dst);
2225 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
2226 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
2227 IEM_MC_ADVANCE_RIP_AND_FINISH();
2228 IEM_MC_END();
2229 break;
2230
2231 case IEMMODE_32BIT:
2232 IEM_MC_BEGIN(0, 1);
2233 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2234 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2235 IEM_MC_LOCAL(uint32_t, u32Dst);
2236 IEM_MC_POP_U32(&u32Dst);
2237 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
2238 IEM_MC_ADVANCE_RIP_AND_FINISH();
2239 IEM_MC_END();
2240 break;
2241
2242 case IEMMODE_64BIT:
2243 IEM_MC_BEGIN(0, 1);
2244 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2245 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2246 IEM_MC_LOCAL(uint64_t, u64Dst);
2247 IEM_MC_POP_U64(&u64Dst);
2248 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
2249 IEM_MC_ADVANCE_RIP_AND_FINISH();
2250 IEM_MC_END();
2251 break;
2252
2253 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2254 }
2255}
2256
2257
2258/**
2259 * @opcode 0x5d
2260 */
2261FNIEMOP_DEF(iemOp_pop_eBP)
2262{
2263 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
2264 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
2265}
2266
2267
2268/**
2269 * @opcode 0x5e
2270 */
2271FNIEMOP_DEF(iemOp_pop_eSI)
2272{
2273 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
2274 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
2275}
2276
2277
2278/**
2279 * @opcode 0x5f
2280 */
2281FNIEMOP_DEF(iemOp_pop_eDI)
2282{
2283 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
2284 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
2285}
2286
2287
2288/**
2289 * @opcode 0x60
2290 */
2291FNIEMOP_DEF(iemOp_pusha)
2292{
2293 IEMOP_MNEMONIC(pusha, "pusha");
2294 IEMOP_HLP_MIN_186();
2295 IEMOP_HLP_NO_64BIT();
2296 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2297 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_pusha_16);
2298 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2299 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_pusha_32);
2300}
2301
2302
2303/**
2304 * @opcode 0x61
2305 */
2306FNIEMOP_DEF(iemOp_popa__mvex)
2307{
2308 if (!IEM_IS_64BIT_CODE(pVCpu))
2309 {
2310 IEMOP_MNEMONIC(popa, "popa");
2311 IEMOP_HLP_MIN_186();
2312 IEMOP_HLP_NO_64BIT();
2313 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2314 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS, iemCImpl_popa_16);
2315 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2316 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS, iemCImpl_popa_32);
2317 }
2318 IEMOP_MNEMONIC(mvex, "mvex");
2319 Log(("mvex prefix is not supported!\n"));
2320 IEMOP_RAISE_INVALID_OPCODE_RET();
2321}
2322
2323
2324/**
2325 * @opcode 0x62
2326 * @opmnemonic bound
2327 * @op1 Gv_RO
2328 * @op2 Ma
2329 * @opmincpu 80186
2330 * @ophints harmless x86_invalid_64
2331 * @optest op1=0 op2=0 ->
2332 * @optest op1=1 op2=0 -> value.xcpt=5
2333 * @optest o16 / op1=0xffff op2=0x0000fffe ->
2334 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
2335 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
2336 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
2337 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
2338 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
2339 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
2340 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
2341 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
2342 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
2343 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
2344 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
2345 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
2346 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
2347 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
2348 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
2349 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
2350 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
2351 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
2352 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
2353 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
2354 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
2355 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
2356 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
2357 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
2358 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
2359 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
2360 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
2361 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
2362 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
2363 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
2364 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
2365 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
2366 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
2367 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
2368 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
2369 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
2370 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
2371 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
2372 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
2373 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
2374 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
2375 */
2376FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
2377{
2378 /* The BOUND instruction is invalid 64-bit mode. In legacy and
2379 compatability mode it is invalid with MOD=3.
2380
2381 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
2382 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
2383 given as R and X without an exact description, so we assume it builds on
2384 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
2385 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
2386 uint8_t bRm;
2387 if (!IEM_IS_64BIT_CODE(pVCpu))
2388 {
2389 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2390 IEMOP_HLP_MIN_186();
2391 IEM_OPCODE_GET_NEXT_U8(&bRm);
2392 if (IEM_IS_MODRM_MEM_MODE(bRm))
2393 {
2394 /** @todo testcase: check that there are two memory accesses involved. Check
2395 * whether they're both read before the \#BR triggers. */
2396 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2397 {
2398 IEM_MC_BEGIN(3, 1);
2399 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2400 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
2401 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
2402 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2403
2404 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2406
2407 IEM_MC_FETCH_GREG_U16(u16Index, IEM_GET_MODRM_REG_8(bRm));
2408 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2409 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
2410
2411 IEM_MC_CALL_CIMPL_3(0, iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
2412 IEM_MC_END();
2413 }
2414 else /* 32-bit operands */
2415 {
2416 IEM_MC_BEGIN(3, 1);
2417 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2418 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
2419 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
2420 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2421
2422 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2424
2425 IEM_MC_FETCH_GREG_U32(u32Index, IEM_GET_MODRM_REG_8(bRm));
2426 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2427 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
2428
2429 IEM_MC_CALL_CIMPL_3(0, iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
2430 IEM_MC_END();
2431 }
2432 }
2433
2434 /*
2435 * @opdone
2436 */
2437 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2438 {
2439 /* Note that there is no need for the CPU to fetch further bytes
2440 here because MODRM.MOD == 3. */
2441 Log(("evex not supported by the guest CPU!\n"));
2442 IEMOP_RAISE_INVALID_OPCODE_RET();
2443 }
2444 }
2445 else
2446 {
2447 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
2448 * does modr/m read, whereas AMD probably doesn't... */
2449 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2450 {
2451 Log(("evex not supported by the guest CPU!\n"));
2452 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
2453 }
2454 IEM_OPCODE_GET_NEXT_U8(&bRm);
2455 }
2456
2457 IEMOP_MNEMONIC(evex, "evex");
2458 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
2459 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
2460 Log(("evex prefix is not implemented!\n"));
2461 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2462}
2463
2464
2465/** Opcode 0x63 - non-64-bit modes. */
2466FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
2467{
2468 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
2469 IEMOP_HLP_MIN_286();
2470 IEMOP_HLP_NO_REAL_OR_V86_MODE();
2471 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2472
2473 if (IEM_IS_MODRM_REG_MODE(bRm))
2474 {
2475 /* Register */
2476 IEM_MC_BEGIN(3, 0);
2477 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2478 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2479 IEM_MC_ARG(uint16_t, u16Src, 1);
2480 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2481
2482 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2483 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM_8(bRm));
2484 IEM_MC_REF_EFLAGS(pEFlags);
2485 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2486
2487 IEM_MC_ADVANCE_RIP_AND_FINISH();
2488 IEM_MC_END();
2489 }
2490 else
2491 {
2492 /* Memory */
2493 IEM_MC_BEGIN(3, 2);
2494 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2495 IEM_MC_ARG(uint16_t, u16Src, 1);
2496 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2497 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2498
2499 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2500 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2501 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2502 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2503 IEM_MC_FETCH_EFLAGS(EFlags);
2504 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2505
2506 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
2507 IEM_MC_COMMIT_EFLAGS(EFlags);
2508 IEM_MC_ADVANCE_RIP_AND_FINISH();
2509 IEM_MC_END();
2510 }
2511}
2512
2513
2514/**
2515 * @opcode 0x63
2516 *
2517 * @note This is a weird one. It works like a regular move instruction if
2518 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2519 * @todo This definitely needs a testcase to verify the odd cases. */
2520FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2521{
2522 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2523
2524 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2525 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2526
2527 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2528 {
2529 if (IEM_IS_MODRM_REG_MODE(bRm))
2530 {
2531 /*
2532 * Register to register.
2533 */
2534 IEM_MC_BEGIN(0, 1);
2535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2536 IEM_MC_LOCAL(uint64_t, u64Value);
2537 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
2538 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2539 IEM_MC_ADVANCE_RIP_AND_FINISH();
2540 IEM_MC_END();
2541 }
2542 else
2543 {
2544 /*
2545 * We're loading a register from memory.
2546 */
2547 IEM_MC_BEGIN(0, 2);
2548 IEM_MC_LOCAL(uint64_t, u64Value);
2549 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2550 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2552 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2553 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2554 IEM_MC_ADVANCE_RIP_AND_FINISH();
2555 IEM_MC_END();
2556 }
2557 }
2558 else
2559 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
2560}
2561
2562
2563/**
2564 * @opcode 0x64
2565 * @opmnemonic segfs
2566 * @opmincpu 80386
2567 * @opgroup og_prefixes
2568 */
2569FNIEMOP_DEF(iemOp_seg_FS)
2570{
2571 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2572 IEMOP_HLP_MIN_386();
2573
2574 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2575 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2576
2577 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2578 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2579}
2580
2581
2582/**
2583 * @opcode 0x65
2584 * @opmnemonic seggs
2585 * @opmincpu 80386
2586 * @opgroup og_prefixes
2587 */
2588FNIEMOP_DEF(iemOp_seg_GS)
2589{
2590 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2591 IEMOP_HLP_MIN_386();
2592
2593 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2594 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2595
2596 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2597 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2598}
2599
2600
2601/**
2602 * @opcode 0x66
2603 * @opmnemonic opsize
2604 * @openc prefix
2605 * @opmincpu 80386
2606 * @ophints harmless
2607 * @opgroup og_prefixes
2608 */
2609FNIEMOP_DEF(iemOp_op_size)
2610{
2611 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2612 IEMOP_HLP_MIN_386();
2613
2614 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2615 iemRecalEffOpSize(pVCpu);
2616
2617 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2618 when REPZ or REPNZ are present. */
2619 if (pVCpu->iem.s.idxPrefix == 0)
2620 pVCpu->iem.s.idxPrefix = 1;
2621
2622 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2623 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2624}
2625
2626
2627/**
2628 * @opcode 0x67
2629 * @opmnemonic addrsize
2630 * @openc prefix
2631 * @opmincpu 80386
2632 * @ophints harmless
2633 * @opgroup og_prefixes
2634 */
2635FNIEMOP_DEF(iemOp_addr_size)
2636{
2637 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2638 IEMOP_HLP_MIN_386();
2639
2640 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2641 switch (pVCpu->iem.s.enmDefAddrMode)
2642 {
2643 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2644 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2645 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2646 default: AssertFailed();
2647 }
2648
2649 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2650 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2651}
2652
2653
2654/**
2655 * @opcode 0x68
2656 */
2657FNIEMOP_DEF(iemOp_push_Iz)
2658{
2659 IEMOP_MNEMONIC(push_Iz, "push Iz");
2660 IEMOP_HLP_MIN_186();
2661 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2662 switch (pVCpu->iem.s.enmEffOpSize)
2663 {
2664 case IEMMODE_16BIT:
2665 {
2666 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2667 IEM_MC_BEGIN(0,0);
2668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2669 IEM_MC_PUSH_U16(u16Imm);
2670 IEM_MC_ADVANCE_RIP_AND_FINISH();
2671 IEM_MC_END();
2672 break;
2673 }
2674
2675 case IEMMODE_32BIT:
2676 {
2677 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2678 IEM_MC_BEGIN(0,0);
2679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2680 IEM_MC_PUSH_U32(u32Imm);
2681 IEM_MC_ADVANCE_RIP_AND_FINISH();
2682 IEM_MC_END();
2683 break;
2684 }
2685
2686 case IEMMODE_64BIT:
2687 {
2688 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2689 IEM_MC_BEGIN(0,0);
2690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2691 IEM_MC_PUSH_U64(u64Imm);
2692 IEM_MC_ADVANCE_RIP_AND_FINISH();
2693 IEM_MC_END();
2694 break;
2695 }
2696
2697 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2698 }
2699}
2700
2701
2702/**
2703 * @opcode 0x69
2704 */
2705FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
2706{
2707 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
2708 IEMOP_HLP_MIN_186();
2709 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2710 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2711
2712 switch (pVCpu->iem.s.enmEffOpSize)
2713 {
2714 case IEMMODE_16BIT:
2715 {
2716 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
2717 if (IEM_IS_MODRM_REG_MODE(bRm))
2718 {
2719 /* register operand */
2720 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2721 IEM_MC_BEGIN(3, 1);
2722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2723 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2724 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
2725 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2726 IEM_MC_LOCAL(uint16_t, u16Tmp);
2727
2728 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2729 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2730 IEM_MC_REF_EFLAGS(pEFlags);
2731 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2732 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2733
2734 IEM_MC_ADVANCE_RIP_AND_FINISH();
2735 IEM_MC_END();
2736 }
2737 else
2738 {
2739 /* memory operand */
2740 IEM_MC_BEGIN(3, 2);
2741 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2742 IEM_MC_ARG(uint16_t, u16Src, 1);
2743 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2744 IEM_MC_LOCAL(uint16_t, u16Tmp);
2745 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2746
2747 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2748 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2749 IEM_MC_ASSIGN(u16Src, u16Imm);
2750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2751 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2752 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2753 IEM_MC_REF_EFLAGS(pEFlags);
2754 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2755 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2756
2757 IEM_MC_ADVANCE_RIP_AND_FINISH();
2758 IEM_MC_END();
2759 }
2760 break;
2761 }
2762
2763 case IEMMODE_32BIT:
2764 {
2765 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
2766 if (IEM_IS_MODRM_REG_MODE(bRm))
2767 {
2768 /* register operand */
2769 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2770 IEM_MC_BEGIN(3, 1);
2771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2772 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2773 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
2774 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2775 IEM_MC_LOCAL(uint32_t, u32Tmp);
2776
2777 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2778 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2779 IEM_MC_REF_EFLAGS(pEFlags);
2780 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2781 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2782
2783 IEM_MC_ADVANCE_RIP_AND_FINISH();
2784 IEM_MC_END();
2785 }
2786 else
2787 {
2788 /* memory operand */
2789 IEM_MC_BEGIN(3, 2);
2790 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2791 IEM_MC_ARG(uint32_t, u32Src, 1);
2792 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2793 IEM_MC_LOCAL(uint32_t, u32Tmp);
2794 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2795
2796 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2797 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2798 IEM_MC_ASSIGN(u32Src, u32Imm);
2799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2800 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2801 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2802 IEM_MC_REF_EFLAGS(pEFlags);
2803 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2804 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2805
2806 IEM_MC_ADVANCE_RIP_AND_FINISH();
2807 IEM_MC_END();
2808 }
2809 break;
2810 }
2811
2812 case IEMMODE_64BIT:
2813 {
2814 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
2815 if (IEM_IS_MODRM_REG_MODE(bRm))
2816 {
2817 /* register operand */
2818 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2819 IEM_MC_BEGIN(3, 1);
2820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2821 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2822 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
2823 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2824 IEM_MC_LOCAL(uint64_t, u64Tmp);
2825
2826 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2827 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2828 IEM_MC_REF_EFLAGS(pEFlags);
2829 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
2830 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2831
2832 IEM_MC_ADVANCE_RIP_AND_FINISH();
2833 IEM_MC_END();
2834 }
2835 else
2836 {
2837 /* memory operand */
2838 IEM_MC_BEGIN(3, 2);
2839 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2840 IEM_MC_ARG(uint64_t, u64Src, 1);
2841 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2842 IEM_MC_LOCAL(uint64_t, u64Tmp);
2843 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2844
2845 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2846 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); /* Not using IEM_OPCODE_GET_NEXT_S32_SX_U64 to reduce the */
2847 IEM_MC_ASSIGN_U32_SX_U64(u64Src, u32Imm); /* parameter count for the threaded function for this block. */
2848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2849 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2850 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2851 IEM_MC_REF_EFLAGS(pEFlags);
2852 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
2853 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2854
2855 IEM_MC_ADVANCE_RIP_AND_FINISH();
2856 IEM_MC_END();
2857 }
2858 break;
2859 }
2860
2861 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2862 }
2863}
2864
2865
2866/**
2867 * @opcode 0x6a
2868 */
2869FNIEMOP_DEF(iemOp_push_Ib)
2870{
2871 IEMOP_MNEMONIC(push_Ib, "push Ib");
2872 IEMOP_HLP_MIN_186();
2873 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2874 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2875
2876 switch (pVCpu->iem.s.enmEffOpSize)
2877 {
2878 case IEMMODE_16BIT:
2879 IEM_MC_BEGIN(0,0);
2880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2881 IEM_MC_PUSH_U16(i8Imm);
2882 IEM_MC_ADVANCE_RIP_AND_FINISH();
2883 IEM_MC_END();
2884 break;
2885 case IEMMODE_32BIT:
2886 IEM_MC_BEGIN(0,0);
2887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2888 IEM_MC_PUSH_U32(i8Imm);
2889 IEM_MC_ADVANCE_RIP_AND_FINISH();
2890 IEM_MC_END();
2891 break;
2892 case IEMMODE_64BIT:
2893 IEM_MC_BEGIN(0,0);
2894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2895 IEM_MC_PUSH_U64(i8Imm);
2896 IEM_MC_ADVANCE_RIP_AND_FINISH();
2897 IEM_MC_END();
2898 break;
2899 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2900 }
2901}
2902
2903
2904/**
2905 * @opcode 0x6b
2906 */
2907FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
2908{
2909 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
2910 IEMOP_HLP_MIN_186();
2911 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2912 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2913
2914 switch (pVCpu->iem.s.enmEffOpSize)
2915 {
2916 case IEMMODE_16BIT:
2917 {
2918 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
2919 if (IEM_IS_MODRM_REG_MODE(bRm))
2920 {
2921 /* register operand */
2922 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2923 IEM_MC_BEGIN(3, 1);
2924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2925 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2926 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
2927 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2928 IEM_MC_LOCAL(uint16_t, u16Tmp);
2929
2930 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2931 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2932 IEM_MC_REF_EFLAGS(pEFlags);
2933 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2934 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2935
2936 IEM_MC_ADVANCE_RIP_AND_FINISH();
2937 IEM_MC_END();
2938 }
2939 else
2940 {
2941 /* memory operand */
2942 IEM_MC_BEGIN(3, 2);
2943 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2944 IEM_MC_ARG(uint16_t, u16Src, 1);
2945 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2946 IEM_MC_LOCAL(uint16_t, u16Tmp);
2947 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2948
2949 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2950 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
2951 IEM_MC_ASSIGN(u16Src, u16Imm);
2952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2953 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2954 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2955 IEM_MC_REF_EFLAGS(pEFlags);
2956 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2957 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2958
2959 IEM_MC_ADVANCE_RIP_AND_FINISH();
2960 IEM_MC_END();
2961 }
2962 break;
2963 }
2964
2965 case IEMMODE_32BIT:
2966 {
2967 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
2968 if (IEM_IS_MODRM_REG_MODE(bRm))
2969 {
2970 /* register operand */
2971 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2972 IEM_MC_BEGIN(3, 1);
2973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2974 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2975 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
2976 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2977 IEM_MC_LOCAL(uint32_t, u32Tmp);
2978
2979 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2980 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2981 IEM_MC_REF_EFLAGS(pEFlags);
2982 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2983 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2984
2985 IEM_MC_ADVANCE_RIP_AND_FINISH();
2986 IEM_MC_END();
2987 }
2988 else
2989 {
2990 /* memory operand */
2991 IEM_MC_BEGIN(3, 2);
2992 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2993 IEM_MC_ARG(uint32_t, u32Src, 1);
2994 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2995 IEM_MC_LOCAL(uint32_t, u32Tmp);
2996 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2997
2998 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2999 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
3000 IEM_MC_ASSIGN(u32Src, u32Imm);
3001 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3002 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3003 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
3004 IEM_MC_REF_EFLAGS(pEFlags);
3005 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3006 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3007
3008 IEM_MC_ADVANCE_RIP_AND_FINISH();
3009 IEM_MC_END();
3010 }
3011 break;
3012 }
3013
3014 case IEMMODE_64BIT:
3015 {
3016 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3017 if (IEM_IS_MODRM_REG_MODE(bRm))
3018 {
3019 /* register operand */
3020 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3021 IEM_MC_BEGIN(3, 1);
3022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3023 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3024 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
3025 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3026 IEM_MC_LOCAL(uint64_t, u64Tmp);
3027
3028 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3029 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3030 IEM_MC_REF_EFLAGS(pEFlags);
3031 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3032 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3033
3034 IEM_MC_ADVANCE_RIP_AND_FINISH();
3035 IEM_MC_END();
3036 }
3037 else
3038 {
3039 /* memory operand */
3040 IEM_MC_BEGIN(3, 2);
3041 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3042 IEM_MC_ARG(uint64_t, u64Src, 1);
3043 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3044 IEM_MC_LOCAL(uint64_t, u64Tmp);
3045 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3046
3047 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3048 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); /* Not using IEM_OPCODE_GET_NEXT_S8_SX_U64 to reduce the */
3049 IEM_MC_ASSIGN_U8_SX_U64(u64Src, u8Imm); /* parameter count for the threaded function for this block. */
3050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3051 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3052 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3053 IEM_MC_REF_EFLAGS(pEFlags);
3054 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3055 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3056
3057 IEM_MC_ADVANCE_RIP_AND_FINISH();
3058 IEM_MC_END();
3059 }
3060 break;
3061 }
3062
3063 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3064 }
3065}
3066
3067
3068/**
3069 * @opcode 0x6c
3070 */
3071FNIEMOP_DEF(iemOp_insb_Yb_DX)
3072{
3073 IEMOP_HLP_MIN_186();
3074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3075 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3076 {
3077 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
3078 switch (pVCpu->iem.s.enmEffAddrMode)
3079 {
3080 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_ins_op8_addr16, false);
3081 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_ins_op8_addr32, false);
3082 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_ins_op8_addr64, false);
3083 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3084 }
3085 }
3086 else
3087 {
3088 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
3089 switch (pVCpu->iem.s.enmEffAddrMode)
3090 {
3091 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_ins_op8_addr16, false);
3092 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_ins_op8_addr32, false);
3093 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_ins_op8_addr64, false);
3094 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3095 }
3096 }
3097}
3098
3099
3100/**
3101 * @opcode 0x6d
3102 */
3103FNIEMOP_DEF(iemOp_inswd_Yv_DX)
3104{
3105 IEMOP_HLP_MIN_186();
3106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3107 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3108 {
3109 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
3110 switch (pVCpu->iem.s.enmEffOpSize)
3111 {
3112 case IEMMODE_16BIT:
3113 switch (pVCpu->iem.s.enmEffAddrMode)
3114 {
3115 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_ins_op16_addr16, false);
3116 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_ins_op16_addr32, false);
3117 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_ins_op16_addr64, false);
3118 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3119 }
3120 break;
3121 case IEMMODE_64BIT:
3122 case IEMMODE_32BIT:
3123 switch (pVCpu->iem.s.enmEffAddrMode)
3124 {
3125 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_ins_op32_addr16, false);
3126 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_ins_op32_addr32, false);
3127 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_ins_op32_addr64, false);
3128 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3129 }
3130 break;
3131 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3132 }
3133 }
3134 else
3135 {
3136 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
3137 switch (pVCpu->iem.s.enmEffOpSize)
3138 {
3139 case IEMMODE_16BIT:
3140 switch (pVCpu->iem.s.enmEffAddrMode)
3141 {
3142 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_ins_op16_addr16, false);
3143 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_ins_op16_addr32, false);
3144 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_ins_op16_addr64, false);
3145 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3146 }
3147 break;
3148 case IEMMODE_64BIT:
3149 case IEMMODE_32BIT:
3150 switch (pVCpu->iem.s.enmEffAddrMode)
3151 {
3152 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_ins_op32_addr16, false);
3153 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_ins_op32_addr32, false);
3154 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_ins_op32_addr64, false);
3155 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3156 }
3157 break;
3158 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3159 }
3160 }
3161}
3162
3163
3164/**
3165 * @opcode 0x6e
3166 */
3167FNIEMOP_DEF(iemOp_outsb_Yb_DX)
3168{
3169 IEMOP_HLP_MIN_186();
3170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3171 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3172 {
3173 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
3174 switch (pVCpu->iem.s.enmEffAddrMode)
3175 {
3176 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3177 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3178 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3179 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3180 }
3181 }
3182 else
3183 {
3184 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
3185 switch (pVCpu->iem.s.enmEffAddrMode)
3186 {
3187 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3188 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3189 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3190 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3191 }
3192 }
3193}
3194
3195
3196/**
3197 * @opcode 0x6f
3198 */
3199FNIEMOP_DEF(iemOp_outswd_Yv_DX)
3200{
3201 IEMOP_HLP_MIN_186();
3202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3203 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3204 {
3205 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
3206 switch (pVCpu->iem.s.enmEffOpSize)
3207 {
3208 case IEMMODE_16BIT:
3209 switch (pVCpu->iem.s.enmEffAddrMode)
3210 {
3211 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3212 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3213 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3214 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3215 }
3216 break;
3217 case IEMMODE_64BIT:
3218 case IEMMODE_32BIT:
3219 switch (pVCpu->iem.s.enmEffAddrMode)
3220 {
3221 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3222 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3223 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3224 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3225 }
3226 break;
3227 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3228 }
3229 }
3230 else
3231 {
3232 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
3233 switch (pVCpu->iem.s.enmEffOpSize)
3234 {
3235 case IEMMODE_16BIT:
3236 switch (pVCpu->iem.s.enmEffAddrMode)
3237 {
3238 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3239 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3240 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3241 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3242 }
3243 break;
3244 case IEMMODE_64BIT:
3245 case IEMMODE_32BIT:
3246 switch (pVCpu->iem.s.enmEffAddrMode)
3247 {
3248 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3249 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3250 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3251 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3252 }
3253 break;
3254 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3255 }
3256 }
3257}
3258
3259
3260/**
3261 * @opcode 0x70
3262 */
3263FNIEMOP_DEF(iemOp_jo_Jb)
3264{
3265 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
3266 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3267 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3268
3269 IEM_MC_BEGIN(0, 0);
3270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3271 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3272 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3273 } IEM_MC_ELSE() {
3274 IEM_MC_ADVANCE_RIP_AND_FINISH();
3275 } IEM_MC_ENDIF();
3276 IEM_MC_END();
3277}
3278
3279
3280/**
3281 * @opcode 0x71
3282 */
3283FNIEMOP_DEF(iemOp_jno_Jb)
3284{
3285 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
3286 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3287 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3288
3289 IEM_MC_BEGIN(0, 0);
3290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3291 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3292 IEM_MC_ADVANCE_RIP_AND_FINISH();
3293 } IEM_MC_ELSE() {
3294 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3295 } IEM_MC_ENDIF();
3296 IEM_MC_END();
3297}
3298
3299/**
3300 * @opcode 0x72
3301 */
3302FNIEMOP_DEF(iemOp_jc_Jb)
3303{
3304 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
3305 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3306 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3307
3308 IEM_MC_BEGIN(0, 0);
3309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3310 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3311 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3312 } IEM_MC_ELSE() {
3313 IEM_MC_ADVANCE_RIP_AND_FINISH();
3314 } IEM_MC_ENDIF();
3315 IEM_MC_END();
3316}
3317
3318
3319/**
3320 * @opcode 0x73
3321 */
3322FNIEMOP_DEF(iemOp_jnc_Jb)
3323{
3324 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
3325 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3326 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3327
3328 IEM_MC_BEGIN(0, 0);
3329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3330 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3331 IEM_MC_ADVANCE_RIP_AND_FINISH();
3332 } IEM_MC_ELSE() {
3333 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3334 } IEM_MC_ENDIF();
3335 IEM_MC_END();
3336}
3337
3338
3339/**
3340 * @opcode 0x74
3341 */
3342FNIEMOP_DEF(iemOp_je_Jb)
3343{
3344 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
3345 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3346 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3347
3348 IEM_MC_BEGIN(0, 0);
3349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3350 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3351 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3352 } IEM_MC_ELSE() {
3353 IEM_MC_ADVANCE_RIP_AND_FINISH();
3354 } IEM_MC_ENDIF();
3355 IEM_MC_END();
3356}
3357
3358
3359/**
3360 * @opcode 0x75
3361 */
3362FNIEMOP_DEF(iemOp_jne_Jb)
3363{
3364 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
3365 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3366 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3367
3368 IEM_MC_BEGIN(0, 0);
3369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3370 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3371 IEM_MC_ADVANCE_RIP_AND_FINISH();
3372 } IEM_MC_ELSE() {
3373 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3374 } IEM_MC_ENDIF();
3375 IEM_MC_END();
3376}
3377
3378
3379/**
3380 * @opcode 0x76
3381 */
3382FNIEMOP_DEF(iemOp_jbe_Jb)
3383{
3384 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
3385 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3386 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3387
3388 IEM_MC_BEGIN(0, 0);
3389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3390 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3391 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3392 } IEM_MC_ELSE() {
3393 IEM_MC_ADVANCE_RIP_AND_FINISH();
3394 } IEM_MC_ENDIF();
3395 IEM_MC_END();
3396}
3397
3398
3399/**
3400 * @opcode 0x77
3401 */
3402FNIEMOP_DEF(iemOp_jnbe_Jb)
3403{
3404 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
3405 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3406 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3407
3408 IEM_MC_BEGIN(0, 0);
3409 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3410 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3411 IEM_MC_ADVANCE_RIP_AND_FINISH();
3412 } IEM_MC_ELSE() {
3413 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3414 } IEM_MC_ENDIF();
3415 IEM_MC_END();
3416}
3417
3418
3419/**
3420 * @opcode 0x78
3421 */
3422FNIEMOP_DEF(iemOp_js_Jb)
3423{
3424 IEMOP_MNEMONIC(js_Jb, "js Jb");
3425 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3426 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3427
3428 IEM_MC_BEGIN(0, 0);
3429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3430 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3431 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3432 } IEM_MC_ELSE() {
3433 IEM_MC_ADVANCE_RIP_AND_FINISH();
3434 } IEM_MC_ENDIF();
3435 IEM_MC_END();
3436}
3437
3438
3439/**
3440 * @opcode 0x79
3441 */
3442FNIEMOP_DEF(iemOp_jns_Jb)
3443{
3444 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
3445 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3446 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3447
3448 IEM_MC_BEGIN(0, 0);
3449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3450 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3451 IEM_MC_ADVANCE_RIP_AND_FINISH();
3452 } IEM_MC_ELSE() {
3453 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3454 } IEM_MC_ENDIF();
3455 IEM_MC_END();
3456}
3457
3458
3459/**
3460 * @opcode 0x7a
3461 */
3462FNIEMOP_DEF(iemOp_jp_Jb)
3463{
3464 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
3465 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3466 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3467
3468 IEM_MC_BEGIN(0, 0);
3469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3470 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3471 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3472 } IEM_MC_ELSE() {
3473 IEM_MC_ADVANCE_RIP_AND_FINISH();
3474 } IEM_MC_ENDIF();
3475 IEM_MC_END();
3476}
3477
3478
3479/**
3480 * @opcode 0x7b
3481 */
3482FNIEMOP_DEF(iemOp_jnp_Jb)
3483{
3484 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
3485 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3486 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3487
3488 IEM_MC_BEGIN(0, 0);
3489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3490 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3491 IEM_MC_ADVANCE_RIP_AND_FINISH();
3492 } IEM_MC_ELSE() {
3493 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3494 } IEM_MC_ENDIF();
3495 IEM_MC_END();
3496}
3497
3498
3499/**
3500 * @opcode 0x7c
3501 */
3502FNIEMOP_DEF(iemOp_jl_Jb)
3503{
3504 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
3505 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3506 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3507
3508 IEM_MC_BEGIN(0, 0);
3509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3510 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3511 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3512 } IEM_MC_ELSE() {
3513 IEM_MC_ADVANCE_RIP_AND_FINISH();
3514 } IEM_MC_ENDIF();
3515 IEM_MC_END();
3516}
3517
3518
3519/**
3520 * @opcode 0x7d
3521 */
3522FNIEMOP_DEF(iemOp_jnl_Jb)
3523{
3524 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
3525 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3526 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3527
3528 IEM_MC_BEGIN(0, 0);
3529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3530 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3531 IEM_MC_ADVANCE_RIP_AND_FINISH();
3532 } IEM_MC_ELSE() {
3533 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3534 } IEM_MC_ENDIF();
3535 IEM_MC_END();
3536}
3537
3538
3539/**
3540 * @opcode 0x7e
3541 */
3542FNIEMOP_DEF(iemOp_jle_Jb)
3543{
3544 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
3545 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3546 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3547
3548 IEM_MC_BEGIN(0, 0);
3549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3550 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3551 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3552 } IEM_MC_ELSE() {
3553 IEM_MC_ADVANCE_RIP_AND_FINISH();
3554 } IEM_MC_ENDIF();
3555 IEM_MC_END();
3556}
3557
3558
3559/**
3560 * @opcode 0x7f
3561 */
3562FNIEMOP_DEF(iemOp_jnle_Jb)
3563{
3564 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
3565 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3566 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3567
3568 IEM_MC_BEGIN(0, 0);
3569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3570 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3571 IEM_MC_ADVANCE_RIP_AND_FINISH();
3572 } IEM_MC_ELSE() {
3573 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3574 } IEM_MC_ENDIF();
3575 IEM_MC_END();
3576}
3577
3578
3579/**
3580 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
3581 * iemOp_Grp1_Eb_Ib_80.
3582 */
3583#define IEMOP_BODY_BINARY_Eb_Ib(a_fnNormalU8, a_fRW) \
3584 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3585 { \
3586 /* register target */ \
3587 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3588 IEM_MC_BEGIN(3, 0); \
3589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3590 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3591 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3592 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3593 \
3594 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3595 IEM_MC_REF_EFLAGS(pEFlags); \
3596 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3597 \
3598 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3599 IEM_MC_END(); \
3600 } \
3601 else \
3602 { \
3603 /* memory target */ \
3604 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
3605 { \
3606 IEM_MC_BEGIN(3, 2); \
3607 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3608 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3609 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3610 \
3611 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3612 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3613 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3614 IEMOP_HLP_DONE_DECODING(); \
3615 \
3616 IEM_MC_MEM_MAP(pu8Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3617 IEM_MC_FETCH_EFLAGS(EFlags); \
3618 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3619 \
3620 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, a_fRW); \
3621 IEM_MC_COMMIT_EFLAGS(EFlags); \
3622 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3623 IEM_MC_END(); \
3624 } \
3625 else \
3626 { \
3627 (void)0
3628
3629#define IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK() \
3630 IEMOP_HLP_DONE_DECODING(); \
3631 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
3632 } \
3633 } \
3634 (void)0
3635
3636#define IEMOP_BODY_BINARY_Eb_Ib_LOCKED(a_fnLockedU8) \
3637 IEM_MC_BEGIN(3, 2); \
3638 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3639 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3640 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3641 \
3642 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3643 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3644 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3645 IEMOP_HLP_DONE_DECODING(); \
3646 \
3647 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3648 IEM_MC_FETCH_EFLAGS(EFlags); \
3649 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
3650 \
3651 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW); \
3652 IEM_MC_COMMIT_EFLAGS(EFlags); \
3653 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3654 IEM_MC_END(); \
3655 } \
3656 } \
3657 (void)0
3658
3659
3660/**
3661 * @opmaps grp1_80,grp1_83
3662 * @opcode /0
3663 */
3664FNIEMOP_DEF_1(iemOp_Grp1_add_Eb_Ib, uint8_t, bRm)
3665{
3666 IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib");
3667 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_add_u8, IEM_ACCESS_DATA_RW);
3668 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_add_u8_locked);
3669}
3670
3671
3672/**
3673 * @opmaps grp1_80,grp1_83
3674 * @opcode /1
3675 */
3676FNIEMOP_DEF_1(iemOp_Grp1_or_Eb_Ib, uint8_t, bRm)
3677{
3678 IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib");
3679 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_or_u8, IEM_ACCESS_DATA_RW);
3680 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_or_u8_locked);
3681}
3682
3683
3684/**
3685 * @opmaps grp1_80,grp1_83
3686 * @opcode /2
3687 */
3688FNIEMOP_DEF_1(iemOp_Grp1_adc_Eb_Ib, uint8_t, bRm)
3689{
3690 IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib");
3691 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_adc_u8, IEM_ACCESS_DATA_RW);
3692 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_adc_u8_locked);
3693}
3694
3695
3696/**
3697 * @opmaps grp1_80,grp1_83
3698 * @opcode /3
3699 */
3700FNIEMOP_DEF_1(iemOp_Grp1_sbb_Eb_Ib, uint8_t, bRm)
3701{
3702 IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib");
3703 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_sbb_u8, IEM_ACCESS_DATA_RW);
3704 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sbb_u8_locked);
3705}
3706
3707
3708/**
3709 * @opmaps grp1_80,grp1_83
3710 * @opcode /4
3711 */
3712FNIEMOP_DEF_1(iemOp_Grp1_and_Eb_Ib, uint8_t, bRm)
3713{
3714 IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib");
3715 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_and_u8, IEM_ACCESS_DATA_RW);
3716 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_and_u8_locked);
3717}
3718
3719
3720/**
3721 * @opmaps grp1_80,grp1_83
3722 * @opcode /5
3723 */
3724FNIEMOP_DEF_1(iemOp_Grp1_sub_Eb_Ib, uint8_t, bRm)
3725{
3726 IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib");
3727 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_sub_u8, IEM_ACCESS_DATA_RW);
3728 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sub_u8_locked);
3729}
3730
3731
3732/**
3733 * @opmaps grp1_80,grp1_83
3734 * @opcode /6
3735 */
3736FNIEMOP_DEF_1(iemOp_Grp1_xor_Eb_Ib, uint8_t, bRm)
3737{
3738 IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib");
3739 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_xor_u8, IEM_ACCESS_DATA_RW);
3740 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_xor_u8_locked);
3741}
3742
3743
3744/**
3745 * @opmaps grp1_80,grp1_83
3746 * @opcode /7
3747 */
3748FNIEMOP_DEF_1(iemOp_Grp1_cmp_Eb_Ib, uint8_t, bRm)
3749{
3750 IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib");
3751 IEMOP_BODY_BINARY_Eb_Ib(iemAImpl_cmp_u8, IEM_ACCESS_DATA_R);
3752 IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK();
3753}
3754
3755
3756/**
3757 * @opcode 0x80
3758 */
3759FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
3760{
3761 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3762 switch (IEM_GET_MODRM_REG_8(bRm))
3763 {
3764 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Eb_Ib, bRm);
3765 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Eb_Ib, bRm);
3766 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Eb_Ib, bRm);
3767 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Eb_Ib, bRm);
3768 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Eb_Ib, bRm);
3769 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Eb_Ib, bRm);
3770 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Eb_Ib, bRm);
3771 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Eb_Ib, bRm);
3772 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3773 }
3774}
3775
3776
3777/**
3778 * Body for a group 1 binary operator.
3779 */
3780#define IEMOP_BODY_BINARY_Ev_Iz(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fRW) \
3781 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3782 { \
3783 /* register target */ \
3784 switch (pVCpu->iem.s.enmEffOpSize) \
3785 { \
3786 case IEMMODE_16BIT: \
3787 { \
3788 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
3789 IEM_MC_BEGIN(3, 0); \
3790 IEMOP_HLP_DONE_DECODING(); \
3791 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
3792 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
3793 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3794 \
3795 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3796 IEM_MC_REF_EFLAGS(pEFlags); \
3797 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
3798 \
3799 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3800 IEM_MC_END(); \
3801 break; \
3802 } \
3803 \
3804 case IEMMODE_32BIT: \
3805 { \
3806 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
3807 IEM_MC_BEGIN(3, 0); \
3808 IEMOP_HLP_DONE_DECODING(); \
3809 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
3810 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
3811 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3812 \
3813 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3814 IEM_MC_REF_EFLAGS(pEFlags); \
3815 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
3816 if (a_fRW == IEM_ACCESS_DATA_RW) \
3817 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
3818 \
3819 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3820 IEM_MC_END(); \
3821 break; \
3822 } \
3823 \
3824 case IEMMODE_64BIT: \
3825 { \
3826 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
3827 IEM_MC_BEGIN(3, 0); \
3828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3829 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
3830 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
3831 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3832 \
3833 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3834 IEM_MC_REF_EFLAGS(pEFlags); \
3835 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
3836 \
3837 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3838 IEM_MC_END(); \
3839 break; \
3840 } \
3841 \
3842 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
3843 } \
3844 } \
3845 else \
3846 { \
3847 /* memory target */ \
3848 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
3849 { \
3850 switch (pVCpu->iem.s.enmEffOpSize) \
3851 { \
3852 case IEMMODE_16BIT: \
3853 { \
3854 IEM_MC_BEGIN(3, 2); \
3855 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
3856 IEM_MC_ARG(uint16_t, u16Src, 1); \
3857 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3858 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3859 \
3860 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
3861 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
3862 IEM_MC_ASSIGN(u16Src, u16Imm); \
3863 IEMOP_HLP_DONE_DECODING(); \
3864 IEM_MC_MEM_MAP(pu16Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3865 IEM_MC_FETCH_EFLAGS(EFlags); \
3866 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
3867 \
3868 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, a_fRW); \
3869 IEM_MC_COMMIT_EFLAGS(EFlags); \
3870 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3871 IEM_MC_END(); \
3872 break; \
3873 } \
3874 \
3875 case IEMMODE_32BIT: \
3876 { \
3877 IEM_MC_BEGIN(3, 2); \
3878 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
3879 IEM_MC_ARG(uint32_t, u32Src, 1); \
3880 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3881 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3882 \
3883 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
3884 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
3885 IEM_MC_ASSIGN(u32Src, u32Imm); \
3886 IEMOP_HLP_DONE_DECODING(); \
3887 IEM_MC_MEM_MAP(pu32Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3888 IEM_MC_FETCH_EFLAGS(EFlags); \
3889 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
3890 \
3891 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, a_fRW); \
3892 IEM_MC_COMMIT_EFLAGS(EFlags); \
3893 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3894 IEM_MC_END(); \
3895 break; \
3896 } \
3897 \
3898 case IEMMODE_64BIT: \
3899 { \
3900 IEM_MC_BEGIN(3, 2); \
3901 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
3902 IEM_MC_ARG(uint64_t, u64Src, 1); \
3903 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3904 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3905 \
3906 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
3907 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
3908 IEMOP_HLP_DONE_DECODING(); \
3909 IEM_MC_ASSIGN(u64Src, u64Imm); \
3910 IEM_MC_MEM_MAP(pu64Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3911 IEM_MC_FETCH_EFLAGS(EFlags); \
3912 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
3913 \
3914 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, a_fRW); \
3915 IEM_MC_COMMIT_EFLAGS(EFlags); \
3916 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3917 IEM_MC_END(); \
3918 break; \
3919 } \
3920 \
3921 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
3922 } \
3923 } \
3924 else \
3925 { \
3926 (void)0
3927
3928#define IEMOP_BODY_BINARY_Ev_Iz_NO_LOCK() \
3929 IEMOP_HLP_DONE_DECODING(); \
3930 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
3931 } \
3932 } \
3933 (void)0
3934
3935#define IEMOP_BODY_BINARY_Ev_Iz_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
3936 switch (pVCpu->iem.s.enmEffOpSize) \
3937 { \
3938 case IEMMODE_16BIT: \
3939 { \
3940 IEM_MC_BEGIN(3, 2); \
3941 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
3942 IEM_MC_ARG(uint16_t, u16Src, 1); \
3943 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3944 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3945 \
3946 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
3947 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
3948 IEM_MC_ASSIGN(u16Src, u16Imm); \
3949 IEMOP_HLP_DONE_DECODING(); \
3950 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3951 IEM_MC_FETCH_EFLAGS(EFlags); \
3952 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
3953 \
3954 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
3955 IEM_MC_COMMIT_EFLAGS(EFlags); \
3956 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3957 IEM_MC_END(); \
3958 break; \
3959 } \
3960 \
3961 case IEMMODE_32BIT: \
3962 { \
3963 IEM_MC_BEGIN(3, 2); \
3964 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
3965 IEM_MC_ARG(uint32_t, u32Src, 1); \
3966 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3967 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3968 \
3969 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
3970 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
3971 IEM_MC_ASSIGN(u32Src, u32Imm); \
3972 IEMOP_HLP_DONE_DECODING(); \
3973 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3974 IEM_MC_FETCH_EFLAGS(EFlags); \
3975 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
3976 \
3977 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); \
3978 IEM_MC_COMMIT_EFLAGS(EFlags); \
3979 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3980 IEM_MC_END(); \
3981 break; \
3982 } \
3983 \
3984 case IEMMODE_64BIT: \
3985 { \
3986 IEM_MC_BEGIN(3, 2); \
3987 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
3988 IEM_MC_ARG(uint64_t, u64Src, 1); \
3989 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3990 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3991 \
3992 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
3993 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
3994 IEMOP_HLP_DONE_DECODING(); \
3995 IEM_MC_ASSIGN(u64Src, u64Imm); \
3996 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3997 IEM_MC_FETCH_EFLAGS(EFlags); \
3998 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
3999 \
4000 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
4001 IEM_MC_COMMIT_EFLAGS(EFlags); \
4002 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4003 IEM_MC_END(); \
4004 break; \
4005 } \
4006 \
4007 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4008 } \
4009 } \
4010 } \
4011 (void)0
4012
4013
4014/**
4015 * @opmaps grp1_81
4016 * @opcode /0
4017 */
4018FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Iz, uint8_t, bRm)
4019{
4020 IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz");
4021 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, IEM_ACCESS_DATA_RW);
4022 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4023}
4024
4025
4026/**
4027 * @opmaps grp1_81
4028 * @opcode /1
4029 */
4030FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Iz, uint8_t, bRm)
4031{
4032 IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz");
4033 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, IEM_ACCESS_DATA_RW);
4034 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4035}
4036
4037
4038/**
4039 * @opmaps grp1_81
4040 * @opcode /2
4041 */
4042FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Iz, uint8_t, bRm)
4043{
4044 IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz");
4045 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, IEM_ACCESS_DATA_RW);
4046 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
4047}
4048
4049
4050/**
4051 * @opmaps grp1_81
4052 * @opcode /3
4053 */
4054FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Iz, uint8_t, bRm)
4055{
4056 IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz");
4057 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, IEM_ACCESS_DATA_RW);
4058 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
4059}
4060
4061
4062/**
4063 * @opmaps grp1_81
4064 * @opcode /4
4065 */
4066FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Iz, uint8_t, bRm)
4067{
4068 IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz");
4069 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, IEM_ACCESS_DATA_RW);
4070 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
4071}
4072
4073
4074/**
4075 * @opmaps grp1_81
4076 * @opcode /5
4077 */
4078FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Iz, uint8_t, bRm)
4079{
4080 IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz");
4081 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, IEM_ACCESS_DATA_RW);
4082 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
4083}
4084
4085
4086/**
4087 * @opmaps grp1_81
4088 * @opcode /6
4089 */
4090FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Iz, uint8_t, bRm)
4091{
4092 IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz");
4093 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, IEM_ACCESS_DATA_RW);
4094 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
4095}
4096
4097
4098/**
4099 * @opmaps grp1_81
4100 * @opcode /7
4101 */
4102FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Iz, uint8_t, bRm)
4103{
4104 IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz");
4105 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, IEM_ACCESS_DATA_R);
4106 IEMOP_BODY_BINARY_Ev_Iz_NO_LOCK();
4107}
4108
4109
4110/**
4111 * @opcode 0x81
4112 */
4113FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
4114{
4115 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4116 switch (IEM_GET_MODRM_REG_8(bRm))
4117 {
4118 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Iz, bRm);
4119 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Iz, bRm);
4120 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Iz, bRm);
4121 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Iz, bRm);
4122 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Iz, bRm);
4123 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Iz, bRm);
4124 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Iz, bRm);
4125 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Iz, bRm);
4126 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4127 }
4128}
4129
4130
4131/**
4132 * @opcode 0x82
4133 * @opmnemonic grp1_82
4134 * @opgroup og_groups
4135 */
4136FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
4137{
4138 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
4139 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
4140}
4141
4142
4143/**
4144 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
4145 * iemOp_Grp1_Ev_Ib.
4146 */
4147#define IEMOP_BODY_BINARY_Ev_Ib(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fRW) \
4148 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4149 { \
4150 /* \
4151 * Register target \
4152 */ \
4153 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4154 switch (pVCpu->iem.s.enmEffOpSize) \
4155 { \
4156 case IEMMODE_16BIT: \
4157 { \
4158 IEM_MC_BEGIN(3, 0); \
4159 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4160 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4161 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4162 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4163 \
4164 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4165 IEM_MC_REF_EFLAGS(pEFlags); \
4166 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4167 \
4168 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4169 IEM_MC_END(); \
4170 break; \
4171 } \
4172 \
4173 case IEMMODE_32BIT: \
4174 { \
4175 IEM_MC_BEGIN(3, 0); \
4176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4177 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4178 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4179 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4180 \
4181 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4182 IEM_MC_REF_EFLAGS(pEFlags); \
4183 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4184 if ((a_fRW) != IEM_ACCESS_DATA_R) \
4185 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
4186 \
4187 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4188 IEM_MC_END(); \
4189 break; \
4190 } \
4191 \
4192 case IEMMODE_64BIT: \
4193 { \
4194 IEM_MC_BEGIN(3, 0); \
4195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4196 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4197 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4198 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4199 \
4200 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4201 IEM_MC_REF_EFLAGS(pEFlags); \
4202 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4203 \
4204 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4205 IEM_MC_END(); \
4206 break; \
4207 } \
4208 \
4209 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4210 } \
4211 } \
4212 else \
4213 { \
4214 /* \
4215 * Memory target. \
4216 */ \
4217 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4218 { \
4219 switch (pVCpu->iem.s.enmEffOpSize) \
4220 { \
4221 case IEMMODE_16BIT: \
4222 { \
4223 IEM_MC_BEGIN(3, 2); \
4224 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4225 IEM_MC_ARG(uint16_t, u16Src, 1); \
4226 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4227 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4228 \
4229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4230 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4231 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm); \
4232 IEMOP_HLP_DONE_DECODING(); \
4233 IEM_MC_MEM_MAP(pu16Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4234 IEM_MC_FETCH_EFLAGS(EFlags); \
4235 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4236 \
4237 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, a_fRW); \
4238 IEM_MC_COMMIT_EFLAGS(EFlags); \
4239 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4240 IEM_MC_END(); \
4241 break; \
4242 } \
4243 \
4244 case IEMMODE_32BIT: \
4245 { \
4246 IEM_MC_BEGIN(3, 2); \
4247 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4248 IEM_MC_ARG(uint32_t, u32Src, 1); \
4249 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4250 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4251 \
4252 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4253 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4254 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm); \
4255 IEMOP_HLP_DONE_DECODING(); \
4256 IEM_MC_MEM_MAP(pu32Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4257 IEM_MC_FETCH_EFLAGS(EFlags); \
4258 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4259 \
4260 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, a_fRW); \
4261 IEM_MC_COMMIT_EFLAGS(EFlags); \
4262 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4263 IEM_MC_END(); \
4264 break; \
4265 } \
4266 \
4267 case IEMMODE_64BIT: \
4268 { \
4269 IEM_MC_BEGIN(3, 2); \
4270 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4271 IEM_MC_ARG(uint64_t, u64Src, 1); \
4272 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4273 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4274 \
4275 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4276 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4277 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm); \
4278 IEMOP_HLP_DONE_DECODING(); \
4279 IEM_MC_MEM_MAP(pu64Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4280 IEM_MC_FETCH_EFLAGS(EFlags); \
4281 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4282 \
4283 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, a_fRW); \
4284 IEM_MC_COMMIT_EFLAGS(EFlags); \
4285 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4286 IEM_MC_END(); \
4287 break; \
4288 } \
4289 \
4290 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4291 } \
4292 } \
4293 else \
4294 { \
4295 (void)0
4296
4297#define IEMOP_BODY_BINARY_Ev_Ib_NO_LOCK() \
4298 IEMOP_HLP_DONE_DECODING(); \
4299 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4300 } \
4301 } \
4302 (void)0
4303
4304#define IEMOP_BODY_BINARY_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4305 switch (pVCpu->iem.s.enmEffOpSize) \
4306 { \
4307 case IEMMODE_16BIT: \
4308 { \
4309 IEM_MC_BEGIN(3, 2); \
4310 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4311 IEM_MC_ARG(uint16_t, u16Src, 1); \
4312 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4313 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4314 \
4315 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4316 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4317 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm); \
4318 IEMOP_HLP_DONE_DECODING(); \
4319 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4320 IEM_MC_FETCH_EFLAGS(EFlags); \
4321 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4322 \
4323 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
4324 IEM_MC_COMMIT_EFLAGS(EFlags); \
4325 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4326 IEM_MC_END(); \
4327 break; \
4328 } \
4329 \
4330 case IEMMODE_32BIT: \
4331 { \
4332 IEM_MC_BEGIN(3, 2); \
4333 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4334 IEM_MC_ARG(uint32_t, u32Src, 1); \
4335 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4336 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4337 \
4338 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4339 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4340 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm); \
4341 IEMOP_HLP_DONE_DECODING(); \
4342 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4343 IEM_MC_FETCH_EFLAGS(EFlags); \
4344 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4345 \
4346 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); \
4347 IEM_MC_COMMIT_EFLAGS(EFlags); \
4348 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4349 IEM_MC_END(); \
4350 break; \
4351 } \
4352 \
4353 case IEMMODE_64BIT: \
4354 { \
4355 IEM_MC_BEGIN(3, 2); \
4356 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4357 IEM_MC_ARG(uint64_t, u64Src, 1); \
4358 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4359 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4360 \
4361 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4362 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4363 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm); \
4364 IEMOP_HLP_DONE_DECODING(); \
4365 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4366 IEM_MC_FETCH_EFLAGS(EFlags); \
4367 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4368 \
4369 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
4370 IEM_MC_COMMIT_EFLAGS(EFlags); \
4371 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4372 IEM_MC_END(); \
4373 break; \
4374 } \
4375 \
4376 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4377 } \
4378 } \
4379 } \
4380 (void)0
4381
4382/**
4383 * @opmaps grp1_83
4384 * @opcode /0
4385 */
4386FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Ib, uint8_t, bRm)
4387{
4388 IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib");
4389 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, IEM_ACCESS_DATA_RW);
4390 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4391}
4392
4393
4394/**
4395 * @opmaps grp1_83
4396 * @opcode /1
4397 */
4398FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Ib, uint8_t, bRm)
4399{
4400 IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib");
4401 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, IEM_ACCESS_DATA_RW);
4402 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4403}
4404
4405
4406/**
4407 * @opmaps grp1_83
4408 * @opcode /2
4409 */
4410FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Ib, uint8_t, bRm)
4411{
4412 IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib");
4413 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, IEM_ACCESS_DATA_RW);
4414 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
4415}
4416
4417
4418/**
4419 * @opmaps grp1_83
4420 * @opcode /3
4421 */
4422FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Ib, uint8_t, bRm)
4423{
4424 IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib");
4425 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, IEM_ACCESS_DATA_RW);
4426 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
4427}
4428
4429
4430/**
4431 * @opmaps grp1_83
4432 * @opcode /4
4433 */
4434FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Ib, uint8_t, bRm)
4435{
4436 IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib");
4437 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, IEM_ACCESS_DATA_RW);
4438 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
4439}
4440
4441
4442/**
4443 * @opmaps grp1_83
4444 * @opcode /5
4445 */
4446FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Ib, uint8_t, bRm)
4447{
4448 IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib");
4449 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, IEM_ACCESS_DATA_RW);
4450 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
4451}
4452
4453
4454/**
4455 * @opmaps grp1_83
4456 * @opcode /6
4457 */
4458FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Ib, uint8_t, bRm)
4459{
4460 IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib");
4461 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, IEM_ACCESS_DATA_RW);
4462 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
4463}
4464
4465
4466/**
4467 * @opmaps grp1_83
4468 * @opcode /7
4469 */
4470FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Ib, uint8_t, bRm)
4471{
4472 IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib");
4473 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, IEM_ACCESS_DATA_R);
4474 IEMOP_BODY_BINARY_Ev_Ib_NO_LOCK();
4475}
4476
4477
4478/**
4479 * @opcode 0x83
4480 */
4481FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
4482{
4483 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
4484 to the 386 even if absent in the intel reference manuals and some
4485 3rd party opcode listings. */
4486 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4487 switch (IEM_GET_MODRM_REG_8(bRm))
4488 {
4489 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Ib, bRm);
4490 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Ib, bRm);
4491 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Ib, bRm);
4492 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Ib, bRm);
4493 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Ib, bRm);
4494 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Ib, bRm);
4495 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Ib, bRm);
4496 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Ib, bRm);
4497 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4498 }
4499}
4500
4501
4502/**
4503 * @opcode 0x84
4504 */
4505FNIEMOP_DEF(iemOp_test_Eb_Gb)
4506{
4507 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
4508 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
4509 IEMOP_BODY_BINARY_rm_r8(iemAImpl_test_u8, IEM_ACCESS_DATA_R);
4510 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
4511}
4512
4513
4514/**
4515 * @opcode 0x85
4516 */
4517FNIEMOP_DEF(iemOp_test_Ev_Gv)
4518{
4519 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
4520 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
4521 IEMOP_BODY_BINARY_rm_rv(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, IEM_ACCESS_DATA_R);
4522 IEMOP_BODY_BINARY_rm_rv_NO_LOCK();
4523}
4524
4525
4526/**
4527 * @opcode 0x86
4528 */
4529FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
4530{
4531 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4532 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
4533
4534 /*
4535 * If rm is denoting a register, no more instruction bytes.
4536 */
4537 if (IEM_IS_MODRM_REG_MODE(bRm))
4538 {
4539 IEM_MC_BEGIN(0, 2);
4540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4541 IEM_MC_LOCAL(uint8_t, uTmp1);
4542 IEM_MC_LOCAL(uint8_t, uTmp2);
4543
4544 IEM_MC_FETCH_GREG_U8(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
4545 IEM_MC_FETCH_GREG_U8(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
4546 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
4547 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
4548
4549 IEM_MC_ADVANCE_RIP_AND_FINISH();
4550 IEM_MC_END();
4551 }
4552 else
4553 {
4554 /*
4555 * We're accessing memory.
4556 */
4557/** @todo the register must be committed separately! */
4558 IEM_MC_BEGIN(2, 2);
4559 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
4560 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
4561 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4562
4563 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4565 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
4566 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
4567 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
4568 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_locked, pu8Mem, pu8Reg);
4569 else
4570 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_unlocked, pu8Mem, pu8Reg);
4571 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
4572
4573 IEM_MC_ADVANCE_RIP_AND_FINISH();
4574 IEM_MC_END();
4575 }
4576}
4577
4578
4579/**
4580 * @opcode 0x87
4581 */
4582FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
4583{
4584 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
4585 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4586
4587 /*
4588 * If rm is denoting a register, no more instruction bytes.
4589 */
4590 if (IEM_IS_MODRM_REG_MODE(bRm))
4591 {
4592 switch (pVCpu->iem.s.enmEffOpSize)
4593 {
4594 case IEMMODE_16BIT:
4595 IEM_MC_BEGIN(0, 2);
4596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4597 IEM_MC_LOCAL(uint16_t, uTmp1);
4598 IEM_MC_LOCAL(uint16_t, uTmp2);
4599
4600 IEM_MC_FETCH_GREG_U16(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
4601 IEM_MC_FETCH_GREG_U16(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
4602 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
4603 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
4604
4605 IEM_MC_ADVANCE_RIP_AND_FINISH();
4606 IEM_MC_END();
4607 break;
4608
4609 case IEMMODE_32BIT:
4610 IEM_MC_BEGIN(0, 2);
4611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4612 IEM_MC_LOCAL(uint32_t, uTmp1);
4613 IEM_MC_LOCAL(uint32_t, uTmp2);
4614
4615 IEM_MC_FETCH_GREG_U32(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
4616 IEM_MC_FETCH_GREG_U32(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
4617 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
4618 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
4619
4620 IEM_MC_ADVANCE_RIP_AND_FINISH();
4621 IEM_MC_END();
4622 break;
4623
4624 case IEMMODE_64BIT:
4625 IEM_MC_BEGIN(0, 2);
4626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4627 IEM_MC_LOCAL(uint64_t, uTmp1);
4628 IEM_MC_LOCAL(uint64_t, uTmp2);
4629
4630 IEM_MC_FETCH_GREG_U64(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
4631 IEM_MC_FETCH_GREG_U64(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
4632 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
4633 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
4634
4635 IEM_MC_ADVANCE_RIP_AND_FINISH();
4636 IEM_MC_END();
4637 break;
4638
4639 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4640 }
4641 }
4642 else
4643 {
4644 /*
4645 * We're accessing memory.
4646 */
4647 switch (pVCpu->iem.s.enmEffOpSize)
4648 {
4649/** @todo the register must be committed separately! */
4650 case IEMMODE_16BIT:
4651 IEM_MC_BEGIN(2, 2);
4652 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
4653 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
4654 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4655
4656 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4658 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
4659 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
4660 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
4661 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_locked, pu16Mem, pu16Reg);
4662 else
4663 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_unlocked, pu16Mem, pu16Reg);
4664 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
4665
4666 IEM_MC_ADVANCE_RIP_AND_FINISH();
4667 IEM_MC_END();
4668 break;
4669
4670 case IEMMODE_32BIT:
4671 IEM_MC_BEGIN(2, 2);
4672 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
4673 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
4674 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4675
4676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4678 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
4679 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
4680 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
4681 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_locked, pu32Mem, pu32Reg);
4682 else
4683 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_unlocked, pu32Mem, pu32Reg);
4684 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
4685
4686 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
4687 IEM_MC_ADVANCE_RIP_AND_FINISH();
4688 IEM_MC_END();
4689 break;
4690
4691 case IEMMODE_64BIT:
4692 IEM_MC_BEGIN(2, 2);
4693 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
4694 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
4695 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4696
4697 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4699 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
4700 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
4701 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
4702 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_locked, pu64Mem, pu64Reg);
4703 else
4704 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_unlocked, pu64Mem, pu64Reg);
4705 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
4706
4707 IEM_MC_ADVANCE_RIP_AND_FINISH();
4708 IEM_MC_END();
4709 break;
4710
4711 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4712 }
4713 }
4714}
4715
4716
4717/**
4718 * @opcode 0x88
4719 */
4720FNIEMOP_DEF(iemOp_mov_Eb_Gb)
4721{
4722 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
4723
4724 uint8_t bRm;
4725 IEM_OPCODE_GET_NEXT_U8(&bRm);
4726
4727 /*
4728 * If rm is denoting a register, no more instruction bytes.
4729 */
4730 if (IEM_IS_MODRM_REG_MODE(bRm))
4731 {
4732 IEM_MC_BEGIN(0, 1);
4733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4734 IEM_MC_LOCAL(uint8_t, u8Value);
4735 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4736 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Value);
4737 IEM_MC_ADVANCE_RIP_AND_FINISH();
4738 IEM_MC_END();
4739 }
4740 else
4741 {
4742 /*
4743 * We're writing a register to memory.
4744 */
4745 IEM_MC_BEGIN(0, 2);
4746 IEM_MC_LOCAL(uint8_t, u8Value);
4747 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4750 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4751 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
4752 IEM_MC_ADVANCE_RIP_AND_FINISH();
4753 IEM_MC_END();
4754 }
4755}
4756
4757
4758/**
4759 * @opcode 0x89
4760 */
4761FNIEMOP_DEF(iemOp_mov_Ev_Gv)
4762{
4763 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
4764
4765 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4766
4767 /*
4768 * If rm is denoting a register, no more instruction bytes.
4769 */
4770 if (IEM_IS_MODRM_REG_MODE(bRm))
4771 {
4772 switch (pVCpu->iem.s.enmEffOpSize)
4773 {
4774 case IEMMODE_16BIT:
4775 IEM_MC_BEGIN(0, 1);
4776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4777 IEM_MC_LOCAL(uint16_t, u16Value);
4778 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4779 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
4780 IEM_MC_ADVANCE_RIP_AND_FINISH();
4781 IEM_MC_END();
4782 break;
4783
4784 case IEMMODE_32BIT:
4785 IEM_MC_BEGIN(0, 1);
4786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4787 IEM_MC_LOCAL(uint32_t, u32Value);
4788 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4789 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
4790 IEM_MC_ADVANCE_RIP_AND_FINISH();
4791 IEM_MC_END();
4792 break;
4793
4794 case IEMMODE_64BIT:
4795 IEM_MC_BEGIN(0, 1);
4796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4797 IEM_MC_LOCAL(uint64_t, u64Value);
4798 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4799 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
4800 IEM_MC_ADVANCE_RIP_AND_FINISH();
4801 IEM_MC_END();
4802 break;
4803
4804 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4805 }
4806 }
4807 else
4808 {
4809 /*
4810 * We're writing a register to memory.
4811 */
4812 switch (pVCpu->iem.s.enmEffOpSize)
4813 {
4814 case IEMMODE_16BIT:
4815 IEM_MC_BEGIN(0, 2);
4816 IEM_MC_LOCAL(uint16_t, u16Value);
4817 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4818 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4820 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4821 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
4822 IEM_MC_ADVANCE_RIP_AND_FINISH();
4823 IEM_MC_END();
4824 break;
4825
4826 case IEMMODE_32BIT:
4827 IEM_MC_BEGIN(0, 2);
4828 IEM_MC_LOCAL(uint32_t, u32Value);
4829 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4830 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4832 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4833 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
4834 IEM_MC_ADVANCE_RIP_AND_FINISH();
4835 IEM_MC_END();
4836 break;
4837
4838 case IEMMODE_64BIT:
4839 IEM_MC_BEGIN(0, 2);
4840 IEM_MC_LOCAL(uint64_t, u64Value);
4841 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4842 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4844 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4845 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
4846 IEM_MC_ADVANCE_RIP_AND_FINISH();
4847 IEM_MC_END();
4848 break;
4849
4850 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4851 }
4852 }
4853}
4854
4855
4856/**
4857 * @opcode 0x8a
4858 */
4859FNIEMOP_DEF(iemOp_mov_Gb_Eb)
4860{
4861 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
4862
4863 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4864
4865 /*
4866 * If rm is denoting a register, no more instruction bytes.
4867 */
4868 if (IEM_IS_MODRM_REG_MODE(bRm))
4869 {
4870 IEM_MC_BEGIN(0, 1);
4871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4872 IEM_MC_LOCAL(uint8_t, u8Value);
4873 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4874 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
4875 IEM_MC_ADVANCE_RIP_AND_FINISH();
4876 IEM_MC_END();
4877 }
4878 else
4879 {
4880 /*
4881 * We're loading a register from memory.
4882 */
4883 IEM_MC_BEGIN(0, 2);
4884 IEM_MC_LOCAL(uint8_t, u8Value);
4885 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4886 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4888 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4889 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
4890 IEM_MC_ADVANCE_RIP_AND_FINISH();
4891 IEM_MC_END();
4892 }
4893}
4894
4895
4896/**
4897 * @opcode 0x8b
4898 */
4899FNIEMOP_DEF(iemOp_mov_Gv_Ev)
4900{
4901 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
4902
4903 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4904
4905 /*
4906 * If rm is denoting a register, no more instruction bytes.
4907 */
4908 if (IEM_IS_MODRM_REG_MODE(bRm))
4909 {
4910 switch (pVCpu->iem.s.enmEffOpSize)
4911 {
4912 case IEMMODE_16BIT:
4913 IEM_MC_BEGIN(0, 1);
4914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4915 IEM_MC_LOCAL(uint16_t, u16Value);
4916 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4917 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
4918 IEM_MC_ADVANCE_RIP_AND_FINISH();
4919 IEM_MC_END();
4920 break;
4921
4922 case IEMMODE_32BIT:
4923 IEM_MC_BEGIN(0, 1);
4924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4925 IEM_MC_LOCAL(uint32_t, u32Value);
4926 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4927 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
4928 IEM_MC_ADVANCE_RIP_AND_FINISH();
4929 IEM_MC_END();
4930 break;
4931
4932 case IEMMODE_64BIT:
4933 IEM_MC_BEGIN(0, 1);
4934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4935 IEM_MC_LOCAL(uint64_t, u64Value);
4936 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4937 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
4938 IEM_MC_ADVANCE_RIP_AND_FINISH();
4939 IEM_MC_END();
4940 break;
4941
4942 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4943 }
4944 }
4945 else
4946 {
4947 /*
4948 * We're loading a register from memory.
4949 */
4950 switch (pVCpu->iem.s.enmEffOpSize)
4951 {
4952 case IEMMODE_16BIT:
4953 IEM_MC_BEGIN(0, 2);
4954 IEM_MC_LOCAL(uint16_t, u16Value);
4955 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4956 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4958 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4959 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
4960 IEM_MC_ADVANCE_RIP_AND_FINISH();
4961 IEM_MC_END();
4962 break;
4963
4964 case IEMMODE_32BIT:
4965 IEM_MC_BEGIN(0, 2);
4966 IEM_MC_LOCAL(uint32_t, u32Value);
4967 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4968 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4970 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4971 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
4972 IEM_MC_ADVANCE_RIP_AND_FINISH();
4973 IEM_MC_END();
4974 break;
4975
4976 case IEMMODE_64BIT:
4977 IEM_MC_BEGIN(0, 2);
4978 IEM_MC_LOCAL(uint64_t, u64Value);
4979 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4982 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4983 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
4984 IEM_MC_ADVANCE_RIP_AND_FINISH();
4985 IEM_MC_END();
4986 break;
4987
4988 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4989 }
4990 }
4991}
4992
4993
4994/**
4995 * opcode 0x63
4996 * @todo Table fixme
4997 */
4998FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
4999{
5000 if (!IEM_IS_64BIT_CODE(pVCpu))
5001 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
5002 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
5003 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
5004 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
5005}
5006
5007
5008/**
5009 * @opcode 0x8c
5010 */
5011FNIEMOP_DEF(iemOp_mov_Ev_Sw)
5012{
5013 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
5014
5015 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5016
5017 /*
5018 * Check that the destination register exists. The REX.R prefix is ignored.
5019 */
5020 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5021 if (iSegReg > X86_SREG_GS)
5022 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5023
5024 /*
5025 * If rm is denoting a register, no more instruction bytes.
5026 * In that case, the operand size is respected and the upper bits are
5027 * cleared (starting with some pentium).
5028 */
5029 if (IEM_IS_MODRM_REG_MODE(bRm))
5030 {
5031 switch (pVCpu->iem.s.enmEffOpSize)
5032 {
5033 case IEMMODE_16BIT:
5034 IEM_MC_BEGIN(0, 1);
5035 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5036 IEM_MC_LOCAL(uint16_t, u16Value);
5037 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5038 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5039 IEM_MC_ADVANCE_RIP_AND_FINISH();
5040 IEM_MC_END();
5041 break;
5042
5043 case IEMMODE_32BIT:
5044 IEM_MC_BEGIN(0, 1);
5045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5046 IEM_MC_LOCAL(uint32_t, u32Value);
5047 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
5048 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5049 IEM_MC_ADVANCE_RIP_AND_FINISH();
5050 IEM_MC_END();
5051 break;
5052
5053 case IEMMODE_64BIT:
5054 IEM_MC_BEGIN(0, 1);
5055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5056 IEM_MC_LOCAL(uint64_t, u64Value);
5057 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
5058 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5059 IEM_MC_ADVANCE_RIP_AND_FINISH();
5060 IEM_MC_END();
5061 break;
5062
5063 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5064 }
5065 }
5066 else
5067 {
5068 /*
5069 * We're saving the register to memory. The access is word sized
5070 * regardless of operand size prefixes.
5071 */
5072#if 0 /* not necessary */
5073 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5074#endif
5075 IEM_MC_BEGIN(0, 2);
5076 IEM_MC_LOCAL(uint16_t, u16Value);
5077 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5078 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5080 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5081 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5082 IEM_MC_ADVANCE_RIP_AND_FINISH();
5083 IEM_MC_END();
5084 }
5085}
5086
5087
5088
5089
5090/**
5091 * @opcode 0x8d
5092 */
5093FNIEMOP_DEF(iemOp_lea_Gv_M)
5094{
5095 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
5096 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5097 if (IEM_IS_MODRM_REG_MODE(bRm))
5098 IEMOP_RAISE_INVALID_OPCODE_RET(); /* no register form */
5099
5100 switch (pVCpu->iem.s.enmEffOpSize)
5101 {
5102 case IEMMODE_16BIT:
5103 IEM_MC_BEGIN(0, 2);
5104 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5105 IEM_MC_LOCAL(uint16_t, u16Cast);
5106 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5108 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
5109 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Cast);
5110 IEM_MC_ADVANCE_RIP_AND_FINISH();
5111 IEM_MC_END();
5112 break;
5113
5114 case IEMMODE_32BIT:
5115 IEM_MC_BEGIN(0, 2);
5116 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5117 IEM_MC_LOCAL(uint32_t, u32Cast);
5118 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5120 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
5121 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Cast);
5122 IEM_MC_ADVANCE_RIP_AND_FINISH();
5123 IEM_MC_END();
5124 break;
5125
5126 case IEMMODE_64BIT:
5127 IEM_MC_BEGIN(0, 1);
5128 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5129 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5131 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), GCPtrEffSrc);
5132 IEM_MC_ADVANCE_RIP_AND_FINISH();
5133 IEM_MC_END();
5134 break;
5135
5136 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5137 }
5138}
5139
5140
5141/**
5142 * @opcode 0x8e
5143 */
5144FNIEMOP_DEF(iemOp_mov_Sw_Ev)
5145{
5146 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
5147
5148 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5149
5150 /*
5151 * The practical operand size is 16-bit.
5152 */
5153#if 0 /* not necessary */
5154 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5155#endif
5156
5157 /*
5158 * Check that the destination register exists and can be used with this
5159 * instruction. The REX.R prefix is ignored.
5160 */
5161 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5162 /** @todo r=bird: What does 8086 do here wrt CS? */
5163 if ( iSegReg == X86_SREG_CS
5164 || iSegReg > X86_SREG_GS)
5165 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5166
5167 /*
5168 * If rm is denoting a register, no more instruction bytes.
5169 */
5170 if (IEM_IS_MODRM_REG_MODE(bRm))
5171 {
5172 IEM_MC_BEGIN(2, 0);
5173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5174 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
5175 IEM_MC_ARG(uint16_t, u16Value, 1);
5176 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5177 if (iSRegArg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5178 IEM_MC_CALL_CIMPL_2( 0, iemCImpl_load_SReg, iSRegArg, u16Value);
5179 else
5180 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE, iemCImpl_load_SReg, iSRegArg, u16Value);
5181 IEM_MC_END();
5182 }
5183 else
5184 {
5185 /*
5186 * We're loading the register from memory. The access is word sized
5187 * regardless of operand size prefixes.
5188 */
5189 IEM_MC_BEGIN(2, 1);
5190 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
5191 IEM_MC_ARG(uint16_t, u16Value, 1);
5192 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5195 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5196 if (iSRegArg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5197 IEM_MC_CALL_CIMPL_2( 0, iemCImpl_load_SReg, iSRegArg, u16Value);
5198 else
5199 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE, iemCImpl_load_SReg, iSRegArg, u16Value);
5200 IEM_MC_END();
5201 }
5202}
5203
5204
5205/** Opcode 0x8f /0. */
5206FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
5207{
5208 /* This bugger is rather annoying as it requires rSP to be updated before
5209 doing the effective address calculations. Will eventually require a
5210 split between the R/M+SIB decoding and the effective address
5211 calculation - which is something that is required for any attempt at
5212 reusing this code for a recompiler. It may also be good to have if we
5213 need to delay #UD exception caused by invalid lock prefixes.
5214
5215 For now, we'll do a mostly safe interpreter-only implementation here. */
5216 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
5217 * now until tests show it's checked.. */
5218 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
5219
5220 /* Register access is relatively easy and can share code. */
5221 if (IEM_IS_MODRM_REG_MODE(bRm))
5222 return FNIEMOP_CALL_1(iemOpCommonPopGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
5223
5224 /*
5225 * Memory target.
5226 *
5227 * Intel says that RSP is incremented before it's used in any effective
5228 * address calcuations. This means some serious extra annoyance here since
5229 * we decode and calculate the effective address in one step and like to
5230 * delay committing registers till everything is done.
5231 *
5232 * So, we'll decode and calculate the effective address twice. This will
5233 * require some recoding if turned into a recompiler.
5234 */
5235 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
5236
5237#if 1 /* This can be compiled, optimize later if needed. */
5238 switch (pVCpu->iem.s.enmEffOpSize)
5239 {
5240 case IEMMODE_16BIT:
5241 {
5242 IEM_MC_BEGIN(2, 0);
5243 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5244 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5245 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2 << 8);
5246 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5247 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5248 IEM_MC_CALL_CIMPL_2(0, iemCImpl_pop_mem16, iEffSeg, GCPtrEffDst);
5249 IEM_MC_END();
5250 }
5251
5252 case IEMMODE_32BIT:
5253 {
5254 IEM_MC_BEGIN(2, 0);
5255 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5256 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4 << 8);
5258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5259 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5260 IEM_MC_CALL_CIMPL_2(0, iemCImpl_pop_mem32, iEffSeg, GCPtrEffDst);
5261 IEM_MC_END();
5262 }
5263
5264 case IEMMODE_64BIT:
5265 {
5266 IEM_MC_BEGIN(2, 0);
5267 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5268 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5269 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 8 << 8);
5270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5271 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5272 IEM_MC_CALL_CIMPL_2(0, iemCImpl_pop_mem64, iEffSeg, GCPtrEffDst);
5273 IEM_MC_END();
5274 }
5275
5276 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5277 }
5278
5279#else
5280# ifndef TST_IEM_CHECK_MC
5281 /* Calc effective address with modified ESP. */
5282/** @todo testcase */
5283 RTGCPTR GCPtrEff;
5284 VBOXSTRICTRC rcStrict;
5285 switch (pVCpu->iem.s.enmEffOpSize)
5286 {
5287 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 2 << 8, &GCPtrEff); break;
5288 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 4 << 8, &GCPtrEff); break;
5289 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 8 << 8, &GCPtrEff); break;
5290 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5291 }
5292 if (rcStrict != VINF_SUCCESS)
5293 return rcStrict;
5294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5295
5296 /* Perform the operation - this should be CImpl. */
5297 RTUINT64U TmpRsp;
5298 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
5299 switch (pVCpu->iem.s.enmEffOpSize)
5300 {
5301 case IEMMODE_16BIT:
5302 {
5303 uint16_t u16Value;
5304 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
5305 if (rcStrict == VINF_SUCCESS)
5306 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
5307 break;
5308 }
5309
5310 case IEMMODE_32BIT:
5311 {
5312 uint32_t u32Value;
5313 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
5314 if (rcStrict == VINF_SUCCESS)
5315 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
5316 break;
5317 }
5318
5319 case IEMMODE_64BIT:
5320 {
5321 uint64_t u64Value;
5322 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
5323 if (rcStrict == VINF_SUCCESS)
5324 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
5325 break;
5326 }
5327
5328 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5329 }
5330 if (rcStrict == VINF_SUCCESS)
5331 {
5332 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
5333 return iemRegUpdateRipAndFinishClearingRF(pVCpu);
5334 }
5335 return rcStrict;
5336
5337# else
5338 return VERR_IEM_IPE_2;
5339# endif
5340#endif
5341}
5342
5343
5344/**
5345 * @opcode 0x8f
5346 */
5347FNIEMOP_DEF(iemOp_Grp1A__xop)
5348{
5349 /*
5350 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
5351 * three byte VEX prefix, except that the mmmmm field cannot have the values
5352 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
5353 */
5354 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5355 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
5356 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
5357
5358 IEMOP_MNEMONIC(xop, "xop");
5359 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
5360 {
5361 /** @todo Test when exctly the XOP conformance checks kick in during
5362 * instruction decoding and fetching (using \#PF). */
5363 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
5364 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
5365 if ( ( pVCpu->iem.s.fPrefixes
5366 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
5367 == 0)
5368 {
5369 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
5370 if ((bXop2 & 0x80 /* XOP.W */) && IEM_IS_64BIT_CODE(pVCpu))
5371 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
5372 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
5373 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
5374 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
5375 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
5376 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
5377 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
5378
5379 /** @todo XOP: Just use new tables and decoders. */
5380 switch (bRm & 0x1f)
5381 {
5382 case 8: /* xop opcode map 8. */
5383 IEMOP_BITCH_ABOUT_STUB();
5384 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5385
5386 case 9: /* xop opcode map 9. */
5387 IEMOP_BITCH_ABOUT_STUB();
5388 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5389
5390 case 10: /* xop opcode map 10. */
5391 IEMOP_BITCH_ABOUT_STUB();
5392 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5393
5394 default:
5395 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
5396 IEMOP_RAISE_INVALID_OPCODE_RET();
5397 }
5398 }
5399 else
5400 Log(("XOP: Invalid prefix mix!\n"));
5401 }
5402 else
5403 Log(("XOP: XOP support disabled!\n"));
5404 IEMOP_RAISE_INVALID_OPCODE_RET();
5405}
5406
5407
5408/**
5409 * Common 'xchg reg,rAX' helper.
5410 */
5411FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
5412{
5413 iReg |= pVCpu->iem.s.uRexB;
5414 switch (pVCpu->iem.s.enmEffOpSize)
5415 {
5416 case IEMMODE_16BIT:
5417 IEM_MC_BEGIN(0, 2);
5418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5419 IEM_MC_LOCAL(uint16_t, u16Tmp1);
5420 IEM_MC_LOCAL(uint16_t, u16Tmp2);
5421 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
5422 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
5423 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
5424 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
5425 IEM_MC_ADVANCE_RIP_AND_FINISH();
5426 IEM_MC_END();
5427 break;
5428
5429 case IEMMODE_32BIT:
5430 IEM_MC_BEGIN(0, 2);
5431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5432 IEM_MC_LOCAL(uint32_t, u32Tmp1);
5433 IEM_MC_LOCAL(uint32_t, u32Tmp2);
5434 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
5435 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
5436 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
5437 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
5438 IEM_MC_ADVANCE_RIP_AND_FINISH();
5439 IEM_MC_END();
5440 break;
5441
5442 case IEMMODE_64BIT:
5443 IEM_MC_BEGIN(0, 2);
5444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5445 IEM_MC_LOCAL(uint64_t, u64Tmp1);
5446 IEM_MC_LOCAL(uint64_t, u64Tmp2);
5447 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
5448 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
5449 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
5450 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
5451 IEM_MC_ADVANCE_RIP_AND_FINISH();
5452 IEM_MC_END();
5453 break;
5454
5455 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5456 }
5457}
5458
5459
5460/**
5461 * @opcode 0x90
5462 */
5463FNIEMOP_DEF(iemOp_nop)
5464{
5465 /* R8/R8D and RAX/EAX can be exchanged. */
5466 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
5467 {
5468 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
5469 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
5470 }
5471
5472 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
5473 {
5474 IEMOP_MNEMONIC(pause, "pause");
5475 /* ASSUMING that we keep the IEM_F_X86_CTX_IN_GUEST, IEM_F_X86_CTX_VMX
5476 and IEM_F_X86_CTX_SVM in the TB key, we can safely do the following: */
5477 if (!IEM_IS_IN_GUEST(pVCpu))
5478 { /* probable */ }
5479#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
5480 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_VMX)
5481 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmx_pause);
5482#endif
5483#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
5484 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_SVM)
5485 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_svm_pause);
5486#endif
5487 }
5488 else
5489 IEMOP_MNEMONIC(nop, "nop");
5490 /** @todo testcase: lock nop; lock pause */
5491 IEM_MC_BEGIN(0, 0);
5492 IEMOP_HLP_DONE_DECODING();
5493 IEM_MC_ADVANCE_RIP_AND_FINISH();
5494 IEM_MC_END();
5495}
5496
5497
5498/**
5499 * @opcode 0x91
5500 */
5501FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
5502{
5503 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
5504 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
5505}
5506
5507
5508/**
5509 * @opcode 0x92
5510 */
5511FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
5512{
5513 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
5514 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
5515}
5516
5517
5518/**
5519 * @opcode 0x93
5520 */
5521FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
5522{
5523 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
5524 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
5525}
5526
5527
5528/**
5529 * @opcode 0x94
5530 */
5531FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
5532{
5533 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
5534 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
5535}
5536
5537
5538/**
5539 * @opcode 0x95
5540 */
5541FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
5542{
5543 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
5544 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
5545}
5546
5547
5548/**
5549 * @opcode 0x96
5550 */
5551FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
5552{
5553 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
5554 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
5555}
5556
5557
5558/**
5559 * @opcode 0x97
5560 */
5561FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
5562{
5563 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
5564 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
5565}
5566
5567
5568/**
5569 * @opcode 0x98
5570 */
5571FNIEMOP_DEF(iemOp_cbw)
5572{
5573 switch (pVCpu->iem.s.enmEffOpSize)
5574 {
5575 case IEMMODE_16BIT:
5576 IEMOP_MNEMONIC(cbw, "cbw");
5577 IEM_MC_BEGIN(0, 1);
5578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5579 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
5580 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
5581 } IEM_MC_ELSE() {
5582 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
5583 } IEM_MC_ENDIF();
5584 IEM_MC_ADVANCE_RIP_AND_FINISH();
5585 IEM_MC_END();
5586 break;
5587
5588 case IEMMODE_32BIT:
5589 IEMOP_MNEMONIC(cwde, "cwde");
5590 IEM_MC_BEGIN(0, 1);
5591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5592 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
5593 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
5594 } IEM_MC_ELSE() {
5595 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
5596 } IEM_MC_ENDIF();
5597 IEM_MC_ADVANCE_RIP_AND_FINISH();
5598 IEM_MC_END();
5599 break;
5600
5601 case IEMMODE_64BIT:
5602 IEMOP_MNEMONIC(cdqe, "cdqe");
5603 IEM_MC_BEGIN(0, 1);
5604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5605 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
5606 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
5607 } IEM_MC_ELSE() {
5608 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
5609 } IEM_MC_ENDIF();
5610 IEM_MC_ADVANCE_RIP_AND_FINISH();
5611 IEM_MC_END();
5612 break;
5613
5614 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5615 }
5616}
5617
5618
5619/**
5620 * @opcode 0x99
5621 */
5622FNIEMOP_DEF(iemOp_cwd)
5623{
5624 switch (pVCpu->iem.s.enmEffOpSize)
5625 {
5626 case IEMMODE_16BIT:
5627 IEMOP_MNEMONIC(cwd, "cwd");
5628 IEM_MC_BEGIN(0, 1);
5629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5630 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
5631 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
5632 } IEM_MC_ELSE() {
5633 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
5634 } IEM_MC_ENDIF();
5635 IEM_MC_ADVANCE_RIP_AND_FINISH();
5636 IEM_MC_END();
5637 break;
5638
5639 case IEMMODE_32BIT:
5640 IEMOP_MNEMONIC(cdq, "cdq");
5641 IEM_MC_BEGIN(0, 1);
5642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5643 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
5644 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
5645 } IEM_MC_ELSE() {
5646 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
5647 } IEM_MC_ENDIF();
5648 IEM_MC_ADVANCE_RIP_AND_FINISH();
5649 IEM_MC_END();
5650 break;
5651
5652 case IEMMODE_64BIT:
5653 IEMOP_MNEMONIC(cqo, "cqo");
5654 IEM_MC_BEGIN(0, 1);
5655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5656 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
5657 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
5658 } IEM_MC_ELSE() {
5659 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
5660 } IEM_MC_ENDIF();
5661 IEM_MC_ADVANCE_RIP_AND_FINISH();
5662 IEM_MC_END();
5663 break;
5664
5665 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5666 }
5667}
5668
5669
5670/**
5671 * @opcode 0x9a
5672 */
5673FNIEMOP_DEF(iemOp_call_Ap)
5674{
5675 IEMOP_MNEMONIC(call_Ap, "call Ap");
5676 IEMOP_HLP_NO_64BIT();
5677
5678 /* Decode the far pointer address and pass it on to the far call C implementation. */
5679 uint32_t off32Seg;
5680 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
5681 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
5682 else
5683 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
5684 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
5685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5686 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
5687 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
5688 iemCImpl_callf, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
5689}
5690
5691
5692/** Opcode 0x9b. (aka fwait) */
5693FNIEMOP_DEF(iemOp_wait)
5694{
5695 IEMOP_MNEMONIC(wait, "wait");
5696 IEM_MC_BEGIN(0, 0);
5697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5698 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
5699 IEM_MC_MAYBE_RAISE_FPU_XCPT();
5700 IEM_MC_ADVANCE_RIP_AND_FINISH();
5701 IEM_MC_END();
5702}
5703
5704
5705/**
5706 * @opcode 0x9c
5707 */
5708FNIEMOP_DEF(iemOp_pushf_Fv)
5709{
5710 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
5711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5712 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5713 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
5714}
5715
5716
5717/**
5718 * @opcode 0x9d
5719 */
5720FNIEMOP_DEF(iemOp_popf_Fv)
5721{
5722 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
5723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5724 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5725 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS, iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
5726}
5727
5728
5729/**
5730 * @opcode 0x9e
5731 */
5732FNIEMOP_DEF(iemOp_sahf)
5733{
5734 IEMOP_MNEMONIC(sahf, "sahf");
5735 if ( IEM_IS_64BIT_CODE(pVCpu)
5736 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
5737 IEMOP_RAISE_INVALID_OPCODE_RET();
5738 IEM_MC_BEGIN(0, 2);
5739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5740 IEM_MC_LOCAL(uint32_t, u32Flags);
5741 IEM_MC_LOCAL(uint32_t, EFlags);
5742 IEM_MC_FETCH_EFLAGS(EFlags);
5743 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
5744 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
5745 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
5746 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
5747 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
5748 IEM_MC_COMMIT_EFLAGS(EFlags);
5749 IEM_MC_ADVANCE_RIP_AND_FINISH();
5750 IEM_MC_END();
5751}
5752
5753
5754/**
5755 * @opcode 0x9f
5756 */
5757FNIEMOP_DEF(iemOp_lahf)
5758{
5759 IEMOP_MNEMONIC(lahf, "lahf");
5760 if ( IEM_IS_64BIT_CODE(pVCpu)
5761 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
5762 IEMOP_RAISE_INVALID_OPCODE_RET();
5763 IEM_MC_BEGIN(0, 1);
5764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5765 IEM_MC_LOCAL(uint8_t, u8Flags);
5766 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
5767 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
5768 IEM_MC_ADVANCE_RIP_AND_FINISH();
5769 IEM_MC_END();
5770}
5771
5772
5773/**
5774 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
5775 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode.
5776 * Will return/throw on failures.
5777 * @param a_GCPtrMemOff The variable to store the offset in.
5778 */
5779#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
5780 do \
5781 { \
5782 switch (pVCpu->iem.s.enmEffAddrMode) \
5783 { \
5784 case IEMMODE_16BIT: \
5785 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
5786 break; \
5787 case IEMMODE_32BIT: \
5788 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
5789 break; \
5790 case IEMMODE_64BIT: \
5791 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
5792 break; \
5793 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5794 } \
5795 } while (0)
5796
5797/**
5798 * @opcode 0xa0
5799 */
5800FNIEMOP_DEF(iemOp_mov_AL_Ob)
5801{
5802 /*
5803 * Get the offset.
5804 */
5805 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
5806 RTGCPTR GCPtrMemOff;
5807 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
5808
5809 /*
5810 * Fetch AL.
5811 */
5812 IEM_MC_BEGIN(0,1);
5813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5814 IEM_MC_LOCAL(uint8_t, u8Tmp);
5815 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
5816 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
5817 IEM_MC_ADVANCE_RIP_AND_FINISH();
5818 IEM_MC_END();
5819}
5820
5821
5822/**
5823 * @opcode 0xa1
5824 */
5825FNIEMOP_DEF(iemOp_mov_rAX_Ov)
5826{
5827 /*
5828 * Get the offset.
5829 */
5830 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
5831 RTGCPTR GCPtrMemOff;
5832 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
5833
5834 /*
5835 * Fetch rAX.
5836 */
5837 switch (pVCpu->iem.s.enmEffOpSize)
5838 {
5839 case IEMMODE_16BIT:
5840 IEM_MC_BEGIN(0,1);
5841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5842 IEM_MC_LOCAL(uint16_t, u16Tmp);
5843 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
5844 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
5845 IEM_MC_ADVANCE_RIP_AND_FINISH();
5846 IEM_MC_END();
5847 break;
5848
5849 case IEMMODE_32BIT:
5850 IEM_MC_BEGIN(0,1);
5851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5852 IEM_MC_LOCAL(uint32_t, u32Tmp);
5853 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
5854 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
5855 IEM_MC_ADVANCE_RIP_AND_FINISH();
5856 IEM_MC_END();
5857 break;
5858
5859 case IEMMODE_64BIT:
5860 IEM_MC_BEGIN(0,1);
5861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5862 IEM_MC_LOCAL(uint64_t, u64Tmp);
5863 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
5864 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
5865 IEM_MC_ADVANCE_RIP_AND_FINISH();
5866 IEM_MC_END();
5867 break;
5868
5869 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5870 }
5871}
5872
5873
5874/**
5875 * @opcode 0xa2
5876 */
5877FNIEMOP_DEF(iemOp_mov_Ob_AL)
5878{
5879 /*
5880 * Get the offset.
5881 */
5882 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
5883 RTGCPTR GCPtrMemOff;
5884 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
5885
5886 /*
5887 * Store AL.
5888 */
5889 IEM_MC_BEGIN(0,1);
5890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5891 IEM_MC_LOCAL(uint8_t, u8Tmp);
5892 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
5893 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
5894 IEM_MC_ADVANCE_RIP_AND_FINISH();
5895 IEM_MC_END();
5896}
5897
5898
5899/**
5900 * @opcode 0xa3
5901 */
5902FNIEMOP_DEF(iemOp_mov_Ov_rAX)
5903{
5904 /*
5905 * Get the offset.
5906 */
5907 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
5908 RTGCPTR GCPtrMemOff;
5909 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
5910
5911 /*
5912 * Store rAX.
5913 */
5914 switch (pVCpu->iem.s.enmEffOpSize)
5915 {
5916 case IEMMODE_16BIT:
5917 IEM_MC_BEGIN(0,1);
5918 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5919 IEM_MC_LOCAL(uint16_t, u16Tmp);
5920 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
5921 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
5922 IEM_MC_ADVANCE_RIP_AND_FINISH();
5923 IEM_MC_END();
5924 break;
5925
5926 case IEMMODE_32BIT:
5927 IEM_MC_BEGIN(0,1);
5928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5929 IEM_MC_LOCAL(uint32_t, u32Tmp);
5930 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
5931 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
5932 IEM_MC_ADVANCE_RIP_AND_FINISH();
5933 IEM_MC_END();
5934 break;
5935
5936 case IEMMODE_64BIT:
5937 IEM_MC_BEGIN(0,1);
5938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5939 IEM_MC_LOCAL(uint64_t, u64Tmp);
5940 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
5941 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
5942 IEM_MC_ADVANCE_RIP_AND_FINISH();
5943 IEM_MC_END();
5944 break;
5945
5946 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5947 }
5948}
5949
5950/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
5951#define IEM_MOVS_CASE(ValBits, AddrBits) \
5952 IEM_MC_BEGIN(0, 2); \
5953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5954 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5955 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5956 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
5957 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
5958 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5959 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
5960 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5961 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5962 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5963 } IEM_MC_ELSE() { \
5964 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5965 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5966 } IEM_MC_ENDIF(); \
5967 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5968 IEM_MC_END() \
5969
5970/**
5971 * @opcode 0xa4
5972 */
5973FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
5974{
5975 /*
5976 * Use the C implementation if a repeat prefix is encountered.
5977 */
5978 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5979 {
5980 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
5981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5982 switch (pVCpu->iem.s.enmEffAddrMode)
5983 {
5984 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
5985 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
5986 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
5987 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5988 }
5989 }
5990
5991 /*
5992 * Sharing case implementation with movs[wdq] below.
5993 */
5994 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
5995 switch (pVCpu->iem.s.enmEffAddrMode)
5996 {
5997 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
5998 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
5999 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
6000 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6001 }
6002}
6003
6004
6005/**
6006 * @opcode 0xa5
6007 */
6008FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
6009{
6010
6011 /*
6012 * Use the C implementation if a repeat prefix is encountered.
6013 */
6014 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6015 {
6016 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
6017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6018 switch (pVCpu->iem.s.enmEffOpSize)
6019 {
6020 case IEMMODE_16BIT:
6021 switch (pVCpu->iem.s.enmEffAddrMode)
6022 {
6023 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
6024 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
6025 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
6026 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6027 }
6028 break;
6029 case IEMMODE_32BIT:
6030 switch (pVCpu->iem.s.enmEffAddrMode)
6031 {
6032 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
6033 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
6034 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
6035 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6036 }
6037 case IEMMODE_64BIT:
6038 switch (pVCpu->iem.s.enmEffAddrMode)
6039 {
6040 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
6041 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
6042 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
6043 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6044 }
6045 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6046 }
6047 }
6048
6049 /*
6050 * Annoying double switch here.
6051 * Using ugly macro for implementing the cases, sharing it with movsb.
6052 */
6053 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
6054 switch (pVCpu->iem.s.enmEffOpSize)
6055 {
6056 case IEMMODE_16BIT:
6057 switch (pVCpu->iem.s.enmEffAddrMode)
6058 {
6059 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
6060 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
6061 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
6062 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6063 }
6064 break;
6065
6066 case IEMMODE_32BIT:
6067 switch (pVCpu->iem.s.enmEffAddrMode)
6068 {
6069 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
6070 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
6071 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
6072 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6073 }
6074 break;
6075
6076 case IEMMODE_64BIT:
6077 switch (pVCpu->iem.s.enmEffAddrMode)
6078 {
6079 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6080 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
6081 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
6082 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6083 }
6084 break;
6085 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6086 }
6087}
6088
6089#undef IEM_MOVS_CASE
6090
6091/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
6092#define IEM_CMPS_CASE(ValBits, AddrBits) \
6093 IEM_MC_BEGIN(3, 3); \
6094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6095 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
6096 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
6097 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
6098 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
6099 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6100 \
6101 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6102 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
6103 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6104 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
6105 IEM_MC_REF_LOCAL(puValue1, uValue1); \
6106 IEM_MC_REF_EFLAGS(pEFlags); \
6107 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
6108 \
6109 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6110 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6111 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6112 } IEM_MC_ELSE() { \
6113 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6114 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6115 } IEM_MC_ENDIF(); \
6116 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6117 IEM_MC_END() \
6118
6119/**
6120 * @opcode 0xa6
6121 */
6122FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
6123{
6124
6125 /*
6126 * Use the C implementation if a repeat prefix is encountered.
6127 */
6128 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6129 {
6130 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
6131 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6132 switch (pVCpu->iem.s.enmEffAddrMode)
6133 {
6134 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6135 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6136 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6137 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6138 }
6139 }
6140 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6141 {
6142 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
6143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6144 switch (pVCpu->iem.s.enmEffAddrMode)
6145 {
6146 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6147 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6148 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6149 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6150 }
6151 }
6152
6153 /*
6154 * Sharing case implementation with cmps[wdq] below.
6155 */
6156 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
6157 switch (pVCpu->iem.s.enmEffAddrMode)
6158 {
6159 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
6160 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
6161 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
6162 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6163 }
6164}
6165
6166
6167/**
6168 * @opcode 0xa7
6169 */
6170FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
6171{
6172 /*
6173 * Use the C implementation if a repeat prefix is encountered.
6174 */
6175 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6176 {
6177 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
6178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6179 switch (pVCpu->iem.s.enmEffOpSize)
6180 {
6181 case IEMMODE_16BIT:
6182 switch (pVCpu->iem.s.enmEffAddrMode)
6183 {
6184 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
6185 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
6186 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
6187 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6188 }
6189 break;
6190 case IEMMODE_32BIT:
6191 switch (pVCpu->iem.s.enmEffAddrMode)
6192 {
6193 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
6194 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
6195 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
6196 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6197 }
6198 case IEMMODE_64BIT:
6199 switch (pVCpu->iem.s.enmEffAddrMode)
6200 {
6201 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
6202 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
6203 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
6204 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6205 }
6206 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6207 }
6208 }
6209
6210 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6211 {
6212 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
6213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6214 switch (pVCpu->iem.s.enmEffOpSize)
6215 {
6216 case IEMMODE_16BIT:
6217 switch (pVCpu->iem.s.enmEffAddrMode)
6218 {
6219 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
6220 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
6221 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
6222 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6223 }
6224 break;
6225 case IEMMODE_32BIT:
6226 switch (pVCpu->iem.s.enmEffAddrMode)
6227 {
6228 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
6229 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
6230 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
6231 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6232 }
6233 case IEMMODE_64BIT:
6234 switch (pVCpu->iem.s.enmEffAddrMode)
6235 {
6236 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
6237 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
6238 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
6239 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6240 }
6241 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6242 }
6243 }
6244
6245 /*
6246 * Annoying double switch here.
6247 * Using ugly macro for implementing the cases, sharing it with cmpsb.
6248 */
6249 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
6250 switch (pVCpu->iem.s.enmEffOpSize)
6251 {
6252 case IEMMODE_16BIT:
6253 switch (pVCpu->iem.s.enmEffAddrMode)
6254 {
6255 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
6256 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
6257 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
6258 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6259 }
6260 break;
6261
6262 case IEMMODE_32BIT:
6263 switch (pVCpu->iem.s.enmEffAddrMode)
6264 {
6265 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
6266 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
6267 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
6268 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6269 }
6270 break;
6271
6272 case IEMMODE_64BIT:
6273 switch (pVCpu->iem.s.enmEffAddrMode)
6274 {
6275 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6276 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
6277 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
6278 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6279 }
6280 break;
6281 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6282 }
6283}
6284
6285#undef IEM_CMPS_CASE
6286
6287/**
6288 * @opcode 0xa8
6289 */
6290FNIEMOP_DEF(iemOp_test_AL_Ib)
6291{
6292 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
6293 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6294 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_test_u8);
6295}
6296
6297
6298/**
6299 * @opcode 0xa9
6300 */
6301FNIEMOP_DEF(iemOp_test_eAX_Iz)
6302{
6303 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
6304 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6305 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, 0);
6306}
6307
6308
6309/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
6310#define IEM_STOS_CASE(ValBits, AddrBits) \
6311 IEM_MC_BEGIN(0, 2); \
6312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6313 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6314 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6315 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
6316 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6317 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
6318 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6319 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6320 } IEM_MC_ELSE() { \
6321 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6322 } IEM_MC_ENDIF(); \
6323 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6324 IEM_MC_END() \
6325
6326/**
6327 * @opcode 0xaa
6328 */
6329FNIEMOP_DEF(iemOp_stosb_Yb_AL)
6330{
6331 /*
6332 * Use the C implementation if a repeat prefix is encountered.
6333 */
6334 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6335 {
6336 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
6337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6338 switch (pVCpu->iem.s.enmEffAddrMode)
6339 {
6340 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP, iemCImpl_stos_al_m16);
6341 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP, iemCImpl_stos_al_m32);
6342 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP, iemCImpl_stos_al_m64);
6343 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6344 }
6345 }
6346
6347 /*
6348 * Sharing case implementation with stos[wdq] below.
6349 */
6350 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
6351 switch (pVCpu->iem.s.enmEffAddrMode)
6352 {
6353 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
6354 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
6355 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
6356 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6357 }
6358}
6359
6360
6361/**
6362 * @opcode 0xab
6363 */
6364FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
6365{
6366 /*
6367 * Use the C implementation if a repeat prefix is encountered.
6368 */
6369 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6370 {
6371 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
6372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6373 switch (pVCpu->iem.s.enmEffOpSize)
6374 {
6375 case IEMMODE_16BIT:
6376 switch (pVCpu->iem.s.enmEffAddrMode)
6377 {
6378 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_ax_m16);
6379 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_ax_m32);
6380 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_ax_m64);
6381 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6382 }
6383 break;
6384 case IEMMODE_32BIT:
6385 switch (pVCpu->iem.s.enmEffAddrMode)
6386 {
6387 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_eax_m16);
6388 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_eax_m32);
6389 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_eax_m64);
6390 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6391 }
6392 case IEMMODE_64BIT:
6393 switch (pVCpu->iem.s.enmEffAddrMode)
6394 {
6395 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
6396 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_rax_m32);
6397 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_rax_m64);
6398 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6399 }
6400 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6401 }
6402 }
6403
6404 /*
6405 * Annoying double switch here.
6406 * Using ugly macro for implementing the cases, sharing it with stosb.
6407 */
6408 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
6409 switch (pVCpu->iem.s.enmEffOpSize)
6410 {
6411 case IEMMODE_16BIT:
6412 switch (pVCpu->iem.s.enmEffAddrMode)
6413 {
6414 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
6415 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
6416 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
6417 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6418 }
6419 break;
6420
6421 case IEMMODE_32BIT:
6422 switch (pVCpu->iem.s.enmEffAddrMode)
6423 {
6424 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
6425 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
6426 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
6427 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6428 }
6429 break;
6430
6431 case IEMMODE_64BIT:
6432 switch (pVCpu->iem.s.enmEffAddrMode)
6433 {
6434 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6435 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
6436 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
6437 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6438 }
6439 break;
6440 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6441 }
6442}
6443
6444#undef IEM_STOS_CASE
6445
6446/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
6447#define IEM_LODS_CASE(ValBits, AddrBits) \
6448 IEM_MC_BEGIN(0, 2); \
6449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6450 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6451 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6452 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6453 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
6454 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
6455 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6456 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6457 } IEM_MC_ELSE() { \
6458 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6459 } IEM_MC_ENDIF(); \
6460 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6461 IEM_MC_END() \
6462
6463/**
6464 * @opcode 0xac
6465 */
6466FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
6467{
6468 /*
6469 * Use the C implementation if a repeat prefix is encountered.
6470 */
6471 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6472 {
6473 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
6474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6475 switch (pVCpu->iem.s.enmEffAddrMode)
6476 {
6477 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
6478 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
6479 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
6480 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6481 }
6482 }
6483
6484 /*
6485 * Sharing case implementation with stos[wdq] below.
6486 */
6487 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
6488 switch (pVCpu->iem.s.enmEffAddrMode)
6489 {
6490 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
6491 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
6492 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
6493 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6494 }
6495}
6496
6497
6498/**
6499 * @opcode 0xad
6500 */
6501FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
6502{
6503 /*
6504 * Use the C implementation if a repeat prefix is encountered.
6505 */
6506 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6507 {
6508 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
6509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6510 switch (pVCpu->iem.s.enmEffOpSize)
6511 {
6512 case IEMMODE_16BIT:
6513 switch (pVCpu->iem.s.enmEffAddrMode)
6514 {
6515 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
6516 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
6517 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
6518 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6519 }
6520 break;
6521 case IEMMODE_32BIT:
6522 switch (pVCpu->iem.s.enmEffAddrMode)
6523 {
6524 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
6525 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
6526 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
6527 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6528 }
6529 case IEMMODE_64BIT:
6530 switch (pVCpu->iem.s.enmEffAddrMode)
6531 {
6532 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
6533 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
6534 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
6535 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6536 }
6537 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6538 }
6539 }
6540
6541 /*
6542 * Annoying double switch here.
6543 * Using ugly macro for implementing the cases, sharing it with lodsb.
6544 */
6545 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
6546 switch (pVCpu->iem.s.enmEffOpSize)
6547 {
6548 case IEMMODE_16BIT:
6549 switch (pVCpu->iem.s.enmEffAddrMode)
6550 {
6551 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
6552 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
6553 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
6554 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6555 }
6556 break;
6557
6558 case IEMMODE_32BIT:
6559 switch (pVCpu->iem.s.enmEffAddrMode)
6560 {
6561 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
6562 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
6563 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
6564 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6565 }
6566 break;
6567
6568 case IEMMODE_64BIT:
6569 switch (pVCpu->iem.s.enmEffAddrMode)
6570 {
6571 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6572 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
6573 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
6574 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6575 }
6576 break;
6577 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6578 }
6579}
6580
6581#undef IEM_LODS_CASE
6582
6583/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
6584#define IEM_SCAS_CASE(ValBits, AddrBits) \
6585 IEM_MC_BEGIN(3, 2); \
6586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6587 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
6588 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
6589 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
6590 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6591 \
6592 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6593 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
6594 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
6595 IEM_MC_REF_EFLAGS(pEFlags); \
6596 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
6597 \
6598 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6599 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6600 } IEM_MC_ELSE() { \
6601 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6602 } IEM_MC_ENDIF(); \
6603 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6604 IEM_MC_END();
6605
6606/**
6607 * @opcode 0xae
6608 */
6609FNIEMOP_DEF(iemOp_scasb_AL_Xb)
6610{
6611 /*
6612 * Use the C implementation if a repeat prefix is encountered.
6613 */
6614 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6615 {
6616 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
6617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6618 switch (pVCpu->iem.s.enmEffAddrMode)
6619 {
6620 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_al_m16);
6621 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_al_m32);
6622 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_al_m64);
6623 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6624 }
6625 }
6626 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6627 {
6628 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
6629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6630 switch (pVCpu->iem.s.enmEffAddrMode)
6631 {
6632 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_al_m16);
6633 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_al_m32);
6634 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_al_m64);
6635 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6636 }
6637 }
6638
6639 /*
6640 * Sharing case implementation with stos[wdq] below.
6641 */
6642 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
6643 switch (pVCpu->iem.s.enmEffAddrMode)
6644 {
6645 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
6646 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
6647 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
6648 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6649 }
6650}
6651
6652
6653/**
6654 * @opcode 0xaf
6655 */
6656FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
6657{
6658 /*
6659 * Use the C implementation if a repeat prefix is encountered.
6660 */
6661 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6662 {
6663 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
6664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6665 switch (pVCpu->iem.s.enmEffOpSize)
6666 {
6667 case IEMMODE_16BIT:
6668 switch (pVCpu->iem.s.enmEffAddrMode)
6669 {
6670 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_ax_m16);
6671 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_ax_m32);
6672 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_ax_m64);
6673 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6674 }
6675 break;
6676 case IEMMODE_32BIT:
6677 switch (pVCpu->iem.s.enmEffAddrMode)
6678 {
6679 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_eax_m16);
6680 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_eax_m32);
6681 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_eax_m64);
6682 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6683 }
6684 case IEMMODE_64BIT:
6685 switch (pVCpu->iem.s.enmEffAddrMode)
6686 {
6687 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
6688 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_rax_m32);
6689 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_rax_m64);
6690 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6691 }
6692 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6693 }
6694 }
6695 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6696 {
6697 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
6698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6699 switch (pVCpu->iem.s.enmEffOpSize)
6700 {
6701 case IEMMODE_16BIT:
6702 switch (pVCpu->iem.s.enmEffAddrMode)
6703 {
6704 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_ax_m16);
6705 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_ax_m32);
6706 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_ax_m64);
6707 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6708 }
6709 break;
6710 case IEMMODE_32BIT:
6711 switch (pVCpu->iem.s.enmEffAddrMode)
6712 {
6713 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_eax_m16);
6714 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_eax_m32);
6715 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_eax_m64);
6716 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6717 }
6718 case IEMMODE_64BIT:
6719 switch (pVCpu->iem.s.enmEffAddrMode)
6720 {
6721 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
6722 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_rax_m32);
6723 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_rax_m64);
6724 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6725 }
6726 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6727 }
6728 }
6729
6730 /*
6731 * Annoying double switch here.
6732 * Using ugly macro for implementing the cases, sharing it with scasb.
6733 */
6734 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
6735 switch (pVCpu->iem.s.enmEffOpSize)
6736 {
6737 case IEMMODE_16BIT:
6738 switch (pVCpu->iem.s.enmEffAddrMode)
6739 {
6740 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
6741 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
6742 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
6743 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6744 }
6745 break;
6746
6747 case IEMMODE_32BIT:
6748 switch (pVCpu->iem.s.enmEffAddrMode)
6749 {
6750 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
6751 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
6752 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
6753 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6754 }
6755 break;
6756
6757 case IEMMODE_64BIT:
6758 switch (pVCpu->iem.s.enmEffAddrMode)
6759 {
6760 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6761 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
6762 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
6763 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6764 }
6765 break;
6766 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6767 }
6768}
6769
6770#undef IEM_SCAS_CASE
6771
6772/**
6773 * Common 'mov r8, imm8' helper.
6774 */
6775FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iFixedReg)
6776{
6777 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6778 IEM_MC_BEGIN(0, 1);
6779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6780 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
6781 IEM_MC_STORE_GREG_U8(iFixedReg, u8Value);
6782 IEM_MC_ADVANCE_RIP_AND_FINISH();
6783 IEM_MC_END();
6784}
6785
6786
6787/**
6788 * @opcode 0xb0
6789 */
6790FNIEMOP_DEF(iemOp_mov_AL_Ib)
6791{
6792 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
6793 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
6794}
6795
6796
6797/**
6798 * @opcode 0xb1
6799 */
6800FNIEMOP_DEF(iemOp_CL_Ib)
6801{
6802 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
6803 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
6804}
6805
6806
6807/**
6808 * @opcode 0xb2
6809 */
6810FNIEMOP_DEF(iemOp_DL_Ib)
6811{
6812 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
6813 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
6814}
6815
6816
6817/**
6818 * @opcode 0xb3
6819 */
6820FNIEMOP_DEF(iemOp_BL_Ib)
6821{
6822 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
6823 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
6824}
6825
6826
6827/**
6828 * @opcode 0xb4
6829 */
6830FNIEMOP_DEF(iemOp_mov_AH_Ib)
6831{
6832 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
6833 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
6834}
6835
6836
6837/**
6838 * @opcode 0xb5
6839 */
6840FNIEMOP_DEF(iemOp_CH_Ib)
6841{
6842 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
6843 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
6844}
6845
6846
6847/**
6848 * @opcode 0xb6
6849 */
6850FNIEMOP_DEF(iemOp_DH_Ib)
6851{
6852 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
6853 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
6854}
6855
6856
6857/**
6858 * @opcode 0xb7
6859 */
6860FNIEMOP_DEF(iemOp_BH_Ib)
6861{
6862 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
6863 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
6864}
6865
6866
6867/**
6868 * Common 'mov regX,immX' helper.
6869 */
6870FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iFixedReg)
6871{
6872 switch (pVCpu->iem.s.enmEffOpSize)
6873 {
6874 case IEMMODE_16BIT:
6875 {
6876 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6877 IEM_MC_BEGIN(0, 1);
6878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6879 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
6880 IEM_MC_STORE_GREG_U16(iFixedReg, u16Value);
6881 IEM_MC_ADVANCE_RIP_AND_FINISH();
6882 IEM_MC_END();
6883 break;
6884 }
6885
6886 case IEMMODE_32BIT:
6887 {
6888 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6889 IEM_MC_BEGIN(0, 1);
6890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6891 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
6892 IEM_MC_STORE_GREG_U32(iFixedReg, u32Value);
6893 IEM_MC_ADVANCE_RIP_AND_FINISH();
6894 IEM_MC_END();
6895 break;
6896 }
6897 case IEMMODE_64BIT:
6898 {
6899 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
6900 IEM_MC_BEGIN(0, 1);
6901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6902 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
6903 IEM_MC_STORE_GREG_U64(iFixedReg, u64Value);
6904 IEM_MC_ADVANCE_RIP_AND_FINISH();
6905 IEM_MC_END();
6906 break;
6907 }
6908 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6909 }
6910}
6911
6912
6913/**
6914 * @opcode 0xb8
6915 */
6916FNIEMOP_DEF(iemOp_eAX_Iv)
6917{
6918 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
6919 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
6920}
6921
6922
6923/**
6924 * @opcode 0xb9
6925 */
6926FNIEMOP_DEF(iemOp_eCX_Iv)
6927{
6928 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
6929 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
6930}
6931
6932
6933/**
6934 * @opcode 0xba
6935 */
6936FNIEMOP_DEF(iemOp_eDX_Iv)
6937{
6938 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
6939 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
6940}
6941
6942
6943/**
6944 * @opcode 0xbb
6945 */
6946FNIEMOP_DEF(iemOp_eBX_Iv)
6947{
6948 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
6949 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
6950}
6951
6952
6953/**
6954 * @opcode 0xbc
6955 */
6956FNIEMOP_DEF(iemOp_eSP_Iv)
6957{
6958 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
6959 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
6960}
6961
6962
6963/**
6964 * @opcode 0xbd
6965 */
6966FNIEMOP_DEF(iemOp_eBP_Iv)
6967{
6968 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
6969 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
6970}
6971
6972
6973/**
6974 * @opcode 0xbe
6975 */
6976FNIEMOP_DEF(iemOp_eSI_Iv)
6977{
6978 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
6979 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
6980}
6981
6982
6983/**
6984 * @opcode 0xbf
6985 */
6986FNIEMOP_DEF(iemOp_eDI_Iv)
6987{
6988 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
6989 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
6990}
6991
6992
6993/**
6994 * @opcode 0xc0
6995 */
6996FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
6997{
6998 IEMOP_HLP_MIN_186();
6999 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7000 PCIEMOPSHIFTSIZES pImpl;
7001 switch (IEM_GET_MODRM_REG_8(bRm))
7002 {
7003 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
7004 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
7005 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
7006 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
7007 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
7008 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
7009 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
7010 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7011 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
7012 }
7013 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7014
7015 if (IEM_IS_MODRM_REG_MODE(bRm))
7016 {
7017 /* register */
7018 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7019 IEM_MC_BEGIN(3, 0);
7020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7021 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7022 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7023 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7024 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7025 IEM_MC_REF_EFLAGS(pEFlags);
7026 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7027 IEM_MC_ADVANCE_RIP_AND_FINISH();
7028 IEM_MC_END();
7029 }
7030 else
7031 {
7032 /* memory */
7033 IEM_MC_BEGIN(3, 2);
7034 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7035 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7036 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7037 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7038
7039 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7040 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7041 IEM_MC_ASSIGN(cShiftArg, cShift);
7042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7043 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7044 IEM_MC_FETCH_EFLAGS(EFlags);
7045 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7046
7047 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7048 IEM_MC_COMMIT_EFLAGS(EFlags);
7049 IEM_MC_ADVANCE_RIP_AND_FINISH();
7050 IEM_MC_END();
7051 }
7052}
7053
7054
7055/**
7056 * @opcode 0xc1
7057 */
7058FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
7059{
7060 IEMOP_HLP_MIN_186();
7061 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7062 PCIEMOPSHIFTSIZES pImpl;
7063 switch (IEM_GET_MODRM_REG_8(bRm))
7064 {
7065 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
7066 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
7067 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
7068 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
7069 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
7070 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
7071 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
7072 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7073 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
7074 }
7075 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7076
7077 if (IEM_IS_MODRM_REG_MODE(bRm))
7078 {
7079 /* register */
7080 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7081 switch (pVCpu->iem.s.enmEffOpSize)
7082 {
7083 case IEMMODE_16BIT:
7084 IEM_MC_BEGIN(3, 0);
7085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7086 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7087 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7088 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7089 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7090 IEM_MC_REF_EFLAGS(pEFlags);
7091 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7092 IEM_MC_ADVANCE_RIP_AND_FINISH();
7093 IEM_MC_END();
7094 break;
7095
7096 case IEMMODE_32BIT:
7097 IEM_MC_BEGIN(3, 0);
7098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7099 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7100 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7101 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7102 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7103 IEM_MC_REF_EFLAGS(pEFlags);
7104 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7105 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7106 IEM_MC_ADVANCE_RIP_AND_FINISH();
7107 IEM_MC_END();
7108 break;
7109
7110 case IEMMODE_64BIT:
7111 IEM_MC_BEGIN(3, 0);
7112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7113 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7114 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7115 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7116 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7117 IEM_MC_REF_EFLAGS(pEFlags);
7118 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7119 IEM_MC_ADVANCE_RIP_AND_FINISH();
7120 IEM_MC_END();
7121 break;
7122
7123 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7124 }
7125 }
7126 else
7127 {
7128 /* memory */
7129 switch (pVCpu->iem.s.enmEffOpSize)
7130 {
7131 case IEMMODE_16BIT:
7132 IEM_MC_BEGIN(3, 2);
7133 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7134 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7135 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7136 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7137
7138 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7139 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7140 IEM_MC_ASSIGN(cShiftArg, cShift);
7141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7142 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7143 IEM_MC_FETCH_EFLAGS(EFlags);
7144 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7145
7146 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7147 IEM_MC_COMMIT_EFLAGS(EFlags);
7148 IEM_MC_ADVANCE_RIP_AND_FINISH();
7149 IEM_MC_END();
7150 break;
7151
7152 case IEMMODE_32BIT:
7153 IEM_MC_BEGIN(3, 2);
7154 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7155 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7156 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7157 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7158
7159 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7160 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7161 IEM_MC_ASSIGN(cShiftArg, cShift);
7162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7163 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7164 IEM_MC_FETCH_EFLAGS(EFlags);
7165 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7166
7167 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7168 IEM_MC_COMMIT_EFLAGS(EFlags);
7169 IEM_MC_ADVANCE_RIP_AND_FINISH();
7170 IEM_MC_END();
7171 break;
7172
7173 case IEMMODE_64BIT:
7174 IEM_MC_BEGIN(3, 2);
7175 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7176 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7177 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7178 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7179
7180 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7181 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7182 IEM_MC_ASSIGN(cShiftArg, cShift);
7183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7184 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7185 IEM_MC_FETCH_EFLAGS(EFlags);
7186 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7187
7188 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7189 IEM_MC_COMMIT_EFLAGS(EFlags);
7190 IEM_MC_ADVANCE_RIP_AND_FINISH();
7191 IEM_MC_END();
7192 break;
7193
7194 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7195 }
7196 }
7197}
7198
7199
7200/**
7201 * @opcode 0xc2
7202 */
7203FNIEMOP_DEF(iemOp_retn_Iw)
7204{
7205 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
7206 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7207 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7209 switch (pVCpu->iem.s.enmEffOpSize)
7210 {
7211 case IEMMODE_16BIT:
7212 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_iw_16, u16Imm);
7213 case IEMMODE_32BIT:
7214 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_iw_32, u16Imm);
7215 case IEMMODE_64BIT:
7216 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_iw_64, u16Imm);
7217 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7218 }
7219}
7220
7221
7222/**
7223 * @opcode 0xc3
7224 */
7225FNIEMOP_DEF(iemOp_retn)
7226{
7227 IEMOP_MNEMONIC(retn, "retn");
7228 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7230 switch (pVCpu->iem.s.enmEffOpSize)
7231 {
7232 case IEMMODE_16BIT:
7233 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_16);
7234 case IEMMODE_32BIT:
7235 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_32);
7236 case IEMMODE_64BIT:
7237 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_64);
7238 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7239 }
7240}
7241
7242
7243/**
7244 * @opcode 0xc4
7245 */
7246FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
7247{
7248 /* The LDS instruction is invalid 64-bit mode. In legacy and
7249 compatability mode it is invalid with MOD=3.
7250 The use as a VEX prefix is made possible by assigning the inverted
7251 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
7252 outside of 64-bit mode. VEX is not available in real or v86 mode. */
7253 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7254 if ( IEM_IS_64BIT_CODE(pVCpu)
7255 || IEM_IS_MODRM_REG_MODE(bRm) )
7256 {
7257 IEMOP_MNEMONIC(vex3_prefix, "vex3");
7258 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
7259 {
7260 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
7261 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
7262 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
7263 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
7264 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
7265 if ((bVex2 & 0x80 /* VEX.W */) && IEM_IS_64BIT_CODE(pVCpu))
7266 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
7267 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
7268 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
7269 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
7270 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
7271 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
7272 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
7273
7274 switch (bRm & 0x1f)
7275 {
7276 case 1: /* 0x0f lead opcode byte. */
7277#ifdef IEM_WITH_VEX
7278 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7279#else
7280 IEMOP_BITCH_ABOUT_STUB();
7281 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7282#endif
7283
7284 case 2: /* 0x0f 0x38 lead opcode bytes. */
7285#ifdef IEM_WITH_VEX
7286 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7287#else
7288 IEMOP_BITCH_ABOUT_STUB();
7289 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7290#endif
7291
7292 case 3: /* 0x0f 0x3a lead opcode bytes. */
7293#ifdef IEM_WITH_VEX
7294 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7295#else
7296 IEMOP_BITCH_ABOUT_STUB();
7297 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7298#endif
7299
7300 default:
7301 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
7302 IEMOP_RAISE_INVALID_OPCODE_RET();
7303 }
7304 }
7305 Log(("VEX3: VEX support disabled!\n"));
7306 IEMOP_RAISE_INVALID_OPCODE_RET();
7307 }
7308
7309 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
7310 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
7311}
7312
7313
7314/**
7315 * @opcode 0xc5
7316 */
7317FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
7318{
7319 /* The LES instruction is invalid 64-bit mode. In legacy and
7320 compatability mode it is invalid with MOD=3.
7321 The use as a VEX prefix is made possible by assigning the inverted
7322 REX.R to the top MOD bit, and the top bit in the inverted register
7323 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
7324 to accessing registers 0..7 in this VEX form. */
7325 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7326 if ( IEM_IS_64BIT_CODE(pVCpu)
7327 || IEM_IS_MODRM_REG_MODE(bRm))
7328 {
7329 IEMOP_MNEMONIC(vex2_prefix, "vex2");
7330 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
7331 {
7332 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
7333 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
7334 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
7335 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
7336 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
7337 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
7338 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
7339 pVCpu->iem.s.idxPrefix = bRm & 0x3;
7340
7341#ifdef IEM_WITH_VEX
7342 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7343#else
7344 IEMOP_BITCH_ABOUT_STUB();
7345 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7346#endif
7347 }
7348
7349 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
7350 Log(("VEX2: VEX support disabled!\n"));
7351 IEMOP_RAISE_INVALID_OPCODE_RET();
7352 }
7353
7354 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
7355 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
7356}
7357
7358
7359/**
7360 * @opcode 0xc6
7361 */
7362FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
7363{
7364 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7365 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
7366 IEMOP_RAISE_INVALID_OPCODE_RET();
7367 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
7368
7369 if (IEM_IS_MODRM_REG_MODE(bRm))
7370 {
7371 /* register access */
7372 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7373 IEM_MC_BEGIN(0, 0);
7374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7375 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Imm);
7376 IEM_MC_ADVANCE_RIP_AND_FINISH();
7377 IEM_MC_END();
7378 }
7379 else
7380 {
7381 /* memory access. */
7382 IEM_MC_BEGIN(0, 1);
7383 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7384 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7385 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7387 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
7388 IEM_MC_ADVANCE_RIP_AND_FINISH();
7389 IEM_MC_END();
7390 }
7391}
7392
7393
7394/**
7395 * @opcode 0xc7
7396 */
7397FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
7398{
7399 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7400 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
7401 IEMOP_RAISE_INVALID_OPCODE_RET();
7402 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
7403
7404 if (IEM_IS_MODRM_REG_MODE(bRm))
7405 {
7406 /* register access */
7407 switch (pVCpu->iem.s.enmEffOpSize)
7408 {
7409 case IEMMODE_16BIT:
7410 IEM_MC_BEGIN(0, 0);
7411 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7413 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Imm);
7414 IEM_MC_ADVANCE_RIP_AND_FINISH();
7415 IEM_MC_END();
7416 break;
7417
7418 case IEMMODE_32BIT:
7419 IEM_MC_BEGIN(0, 0);
7420 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
7421 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7422 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Imm);
7423 IEM_MC_ADVANCE_RIP_AND_FINISH();
7424 IEM_MC_END();
7425 break;
7426
7427 case IEMMODE_64BIT:
7428 IEM_MC_BEGIN(0, 0);
7429 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
7430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7431 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Imm);
7432 IEM_MC_ADVANCE_RIP_AND_FINISH();
7433 IEM_MC_END();
7434 break;
7435
7436 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7437 }
7438 }
7439 else
7440 {
7441 /* memory access. */
7442 switch (pVCpu->iem.s.enmEffOpSize)
7443 {
7444 case IEMMODE_16BIT:
7445 IEM_MC_BEGIN(0, 1);
7446 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7447 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
7448 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7450 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
7451 IEM_MC_ADVANCE_RIP_AND_FINISH();
7452 IEM_MC_END();
7453 break;
7454
7455 case IEMMODE_32BIT:
7456 IEM_MC_BEGIN(0, 1);
7457 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7458 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
7459 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
7460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7461 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
7462 IEM_MC_ADVANCE_RIP_AND_FINISH();
7463 IEM_MC_END();
7464 break;
7465
7466 case IEMMODE_64BIT:
7467 IEM_MC_BEGIN(0, 1);
7468 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7469 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
7470 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
7471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7472 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
7473 IEM_MC_ADVANCE_RIP_AND_FINISH();
7474 IEM_MC_END();
7475 break;
7476
7477 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7478 }
7479 }
7480}
7481
7482
7483
7484
7485/**
7486 * @opcode 0xc8
7487 */
7488FNIEMOP_DEF(iemOp_enter_Iw_Ib)
7489{
7490 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
7491 IEMOP_HLP_MIN_186();
7492 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7493 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
7494 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
7495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7496 IEM_MC_DEFER_TO_CIMPL_3_RET(0, iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
7497}
7498
7499
7500/**
7501 * @opcode 0xc9
7502 */
7503FNIEMOP_DEF(iemOp_leave)
7504{
7505 IEMOP_MNEMONIC(leave, "leave");
7506 IEMOP_HLP_MIN_186();
7507 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7509 IEM_MC_DEFER_TO_CIMPL_1_RET(0, iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
7510}
7511
7512
7513/**
7514 * @opcode 0xca
7515 */
7516FNIEMOP_DEF(iemOp_retf_Iw)
7517{
7518 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
7519 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7521 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_MODE,
7522 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
7523}
7524
7525
7526/**
7527 * @opcode 0xcb
7528 */
7529FNIEMOP_DEF(iemOp_retf)
7530{
7531 IEMOP_MNEMONIC(retf, "retf");
7532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7533 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_MODE,
7534 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
7535}
7536
7537
7538/**
7539 * @opcode 0xcc
7540 */
7541FNIEMOP_DEF(iemOp_int3)
7542{
7543 IEMOP_MNEMONIC(int3, "int3");
7544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7545 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
7546 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
7547 iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
7548}
7549
7550
7551/**
7552 * @opcode 0xcd
7553 */
7554FNIEMOP_DEF(iemOp_int_Ib)
7555{
7556 IEMOP_MNEMONIC(int_Ib, "int Ib");
7557 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
7558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7559 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
7560 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
7561 iemCImpl_int, u8Int, IEMINT_INTN);
7562}
7563
7564
7565/**
7566 * @opcode 0xce
7567 */
7568FNIEMOP_DEF(iemOp_into)
7569{
7570 IEMOP_MNEMONIC(into, "into");
7571 IEMOP_HLP_NO_64BIT();
7572 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_CONDITIONAL
7573 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
7574 iemCImpl_int, X86_XCPT_OF, IEMINT_INTO);
7575}
7576
7577
7578/**
7579 * @opcode 0xcf
7580 */
7581FNIEMOP_DEF(iemOp_iret)
7582{
7583 IEMOP_MNEMONIC(iret, "iret");
7584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7585 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
7586 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
7587 iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
7588}
7589
7590
7591/**
7592 * @opcode 0xd0
7593 */
7594FNIEMOP_DEF(iemOp_Grp2_Eb_1)
7595{
7596 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7597 PCIEMOPSHIFTSIZES pImpl;
7598 switch (IEM_GET_MODRM_REG_8(bRm))
7599 {
7600 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
7601 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
7602 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
7603 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
7604 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
7605 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
7606 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
7607 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7608 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
7609 }
7610 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7611
7612 if (IEM_IS_MODRM_REG_MODE(bRm))
7613 {
7614 /* register */
7615 IEM_MC_BEGIN(3, 0);
7616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7617 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7618 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
7619 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7620 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7621 IEM_MC_REF_EFLAGS(pEFlags);
7622 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7623 IEM_MC_ADVANCE_RIP_AND_FINISH();
7624 IEM_MC_END();
7625 }
7626 else
7627 {
7628 /* memory */
7629 IEM_MC_BEGIN(3, 2);
7630 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7631 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
7632 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7633 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7634
7635 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7637 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7638 IEM_MC_FETCH_EFLAGS(EFlags);
7639 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7640
7641 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7642 IEM_MC_COMMIT_EFLAGS(EFlags);
7643 IEM_MC_ADVANCE_RIP_AND_FINISH();
7644 IEM_MC_END();
7645 }
7646}
7647
7648
7649
7650/**
7651 * @opcode 0xd1
7652 */
7653FNIEMOP_DEF(iemOp_Grp2_Ev_1)
7654{
7655 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7656 PCIEMOPSHIFTSIZES pImpl;
7657 switch (IEM_GET_MODRM_REG_8(bRm))
7658 {
7659 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
7660 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
7661 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
7662 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
7663 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
7664 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
7665 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
7666 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7667 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
7668 }
7669 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7670
7671 if (IEM_IS_MODRM_REG_MODE(bRm))
7672 {
7673 /* register */
7674 switch (pVCpu->iem.s.enmEffOpSize)
7675 {
7676 case IEMMODE_16BIT:
7677 IEM_MC_BEGIN(3, 0);
7678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7679 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7680 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7681 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7682 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7683 IEM_MC_REF_EFLAGS(pEFlags);
7684 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7685 IEM_MC_ADVANCE_RIP_AND_FINISH();
7686 IEM_MC_END();
7687 break;
7688
7689 case IEMMODE_32BIT:
7690 IEM_MC_BEGIN(3, 0);
7691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7692 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7693 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7694 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7695 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7696 IEM_MC_REF_EFLAGS(pEFlags);
7697 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7698 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7699 IEM_MC_ADVANCE_RIP_AND_FINISH();
7700 IEM_MC_END();
7701 break;
7702
7703 case IEMMODE_64BIT:
7704 IEM_MC_BEGIN(3, 0);
7705 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7706 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7707 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7708 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7709 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7710 IEM_MC_REF_EFLAGS(pEFlags);
7711 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7712 IEM_MC_ADVANCE_RIP_AND_FINISH();
7713 IEM_MC_END();
7714 break;
7715
7716 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7717 }
7718 }
7719 else
7720 {
7721 /* memory */
7722 switch (pVCpu->iem.s.enmEffOpSize)
7723 {
7724 case IEMMODE_16BIT:
7725 IEM_MC_BEGIN(3, 2);
7726 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7727 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7728 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7729 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7730
7731 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7733 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7734 IEM_MC_FETCH_EFLAGS(EFlags);
7735 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7736
7737 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7738 IEM_MC_COMMIT_EFLAGS(EFlags);
7739 IEM_MC_ADVANCE_RIP_AND_FINISH();
7740 IEM_MC_END();
7741 break;
7742
7743 case IEMMODE_32BIT:
7744 IEM_MC_BEGIN(3, 2);
7745 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7746 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7747 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7748 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7749
7750 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7751 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7752 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7753 IEM_MC_FETCH_EFLAGS(EFlags);
7754 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7755
7756 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7757 IEM_MC_COMMIT_EFLAGS(EFlags);
7758 IEM_MC_ADVANCE_RIP_AND_FINISH();
7759 IEM_MC_END();
7760 break;
7761
7762 case IEMMODE_64BIT:
7763 IEM_MC_BEGIN(3, 2);
7764 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7765 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7766 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7767 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7768
7769 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7771 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7772 IEM_MC_FETCH_EFLAGS(EFlags);
7773 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7774
7775 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7776 IEM_MC_COMMIT_EFLAGS(EFlags);
7777 IEM_MC_ADVANCE_RIP_AND_FINISH();
7778 IEM_MC_END();
7779 break;
7780
7781 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7782 }
7783 }
7784}
7785
7786
7787/**
7788 * @opcode 0xd2
7789 */
7790FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
7791{
7792 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7793 PCIEMOPSHIFTSIZES pImpl;
7794 switch (IEM_GET_MODRM_REG_8(bRm))
7795 {
7796 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
7797 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
7798 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
7799 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
7800 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
7801 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
7802 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
7803 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7804 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
7805 }
7806 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7807
7808 if (IEM_IS_MODRM_REG_MODE(bRm))
7809 {
7810 /* register */
7811 IEM_MC_BEGIN(3, 0);
7812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7813 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7814 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7815 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7816 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7817 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7818 IEM_MC_REF_EFLAGS(pEFlags);
7819 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7820 IEM_MC_ADVANCE_RIP_AND_FINISH();
7821 IEM_MC_END();
7822 }
7823 else
7824 {
7825 /* memory */
7826 IEM_MC_BEGIN(3, 2);
7827 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7828 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7829 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7830 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7831
7832 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7834 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7835 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7836 IEM_MC_FETCH_EFLAGS(EFlags);
7837 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7838
7839 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7840 IEM_MC_COMMIT_EFLAGS(EFlags);
7841 IEM_MC_ADVANCE_RIP_AND_FINISH();
7842 IEM_MC_END();
7843 }
7844}
7845
7846
7847/**
7848 * @opcode 0xd3
7849 */
7850FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
7851{
7852 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7853 PCIEMOPSHIFTSIZES pImpl;
7854 switch (IEM_GET_MODRM_REG_8(bRm))
7855 {
7856 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
7857 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
7858 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
7859 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
7860 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
7861 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
7862 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
7863 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7864 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
7865 }
7866 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7867
7868 if (IEM_IS_MODRM_REG_MODE(bRm))
7869 {
7870 /* register */
7871 switch (pVCpu->iem.s.enmEffOpSize)
7872 {
7873 case IEMMODE_16BIT:
7874 IEM_MC_BEGIN(3, 0);
7875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7876 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7877 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7878 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7879 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7880 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7881 IEM_MC_REF_EFLAGS(pEFlags);
7882 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7883 IEM_MC_ADVANCE_RIP_AND_FINISH();
7884 IEM_MC_END();
7885 break;
7886
7887 case IEMMODE_32BIT:
7888 IEM_MC_BEGIN(3, 0);
7889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7890 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7891 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7892 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7893 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7894 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7895 IEM_MC_REF_EFLAGS(pEFlags);
7896 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7897 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7898 IEM_MC_ADVANCE_RIP_AND_FINISH();
7899 IEM_MC_END();
7900 break;
7901
7902 case IEMMODE_64BIT:
7903 IEM_MC_BEGIN(3, 0);
7904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7905 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7906 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7907 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7908 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7909 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7910 IEM_MC_REF_EFLAGS(pEFlags);
7911 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7912 IEM_MC_ADVANCE_RIP_AND_FINISH();
7913 IEM_MC_END();
7914 break;
7915
7916 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7917 }
7918 }
7919 else
7920 {
7921 /* memory */
7922 switch (pVCpu->iem.s.enmEffOpSize)
7923 {
7924 case IEMMODE_16BIT:
7925 IEM_MC_BEGIN(3, 2);
7926 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7927 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7928 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7929 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7930
7931 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7933 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7934 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7935 IEM_MC_FETCH_EFLAGS(EFlags);
7936 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7937
7938 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7939 IEM_MC_COMMIT_EFLAGS(EFlags);
7940 IEM_MC_ADVANCE_RIP_AND_FINISH();
7941 IEM_MC_END();
7942 break;
7943
7944 case IEMMODE_32BIT:
7945 IEM_MC_BEGIN(3, 2);
7946 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7947 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7948 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7949 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7950
7951 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7953 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7954 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7955 IEM_MC_FETCH_EFLAGS(EFlags);
7956 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7957
7958 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7959 IEM_MC_COMMIT_EFLAGS(EFlags);
7960 IEM_MC_ADVANCE_RIP_AND_FINISH();
7961 IEM_MC_END();
7962 break;
7963
7964 case IEMMODE_64BIT:
7965 IEM_MC_BEGIN(3, 2);
7966 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7967 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7968 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7969 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7970
7971 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7973 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7974 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7975 IEM_MC_FETCH_EFLAGS(EFlags);
7976 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7977
7978 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7979 IEM_MC_COMMIT_EFLAGS(EFlags);
7980 IEM_MC_ADVANCE_RIP_AND_FINISH();
7981 IEM_MC_END();
7982 break;
7983
7984 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7985 }
7986 }
7987}
7988
7989/**
7990 * @opcode 0xd4
7991 */
7992FNIEMOP_DEF(iemOp_aam_Ib)
7993{
7994 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
7995 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
7996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7997 IEMOP_HLP_NO_64BIT();
7998 if (!bImm)
7999 IEMOP_RAISE_DIVIDE_ERROR_RET();
8000 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aam, bImm);
8001}
8002
8003
8004/**
8005 * @opcode 0xd5
8006 */
8007FNIEMOP_DEF(iemOp_aad_Ib)
8008{
8009 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
8010 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
8011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8012 IEMOP_HLP_NO_64BIT();
8013 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aad, bImm);
8014}
8015
8016
8017/**
8018 * @opcode 0xd6
8019 */
8020FNIEMOP_DEF(iemOp_salc)
8021{
8022 IEMOP_MNEMONIC(salc, "salc");
8023 IEMOP_HLP_NO_64BIT();
8024
8025 IEM_MC_BEGIN(0, 0);
8026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8027 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8028 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
8029 } IEM_MC_ELSE() {
8030 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
8031 } IEM_MC_ENDIF();
8032 IEM_MC_ADVANCE_RIP_AND_FINISH();
8033 IEM_MC_END();
8034}
8035
8036
8037/**
8038 * @opcode 0xd7
8039 */
8040FNIEMOP_DEF(iemOp_xlat)
8041{
8042 IEMOP_MNEMONIC(xlat, "xlat");
8043 switch (pVCpu->iem.s.enmEffAddrMode)
8044 {
8045 case IEMMODE_16BIT:
8046 IEM_MC_BEGIN(2, 0);
8047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8048 IEM_MC_LOCAL(uint8_t, u8Tmp);
8049 IEM_MC_LOCAL(uint16_t, u16Addr);
8050 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
8051 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
8052 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
8053 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
8054 IEM_MC_ADVANCE_RIP_AND_FINISH();
8055 IEM_MC_END();
8056 break;
8057
8058 case IEMMODE_32BIT:
8059 IEM_MC_BEGIN(2, 0);
8060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8061 IEM_MC_LOCAL(uint8_t, u8Tmp);
8062 IEM_MC_LOCAL(uint32_t, u32Addr);
8063 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
8064 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
8065 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
8066 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
8067 IEM_MC_ADVANCE_RIP_AND_FINISH();
8068 IEM_MC_END();
8069 break;
8070
8071 case IEMMODE_64BIT:
8072 IEM_MC_BEGIN(2, 0);
8073 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8074 IEM_MC_LOCAL(uint8_t, u8Tmp);
8075 IEM_MC_LOCAL(uint64_t, u64Addr);
8076 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
8077 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
8078 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
8079 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
8080 IEM_MC_ADVANCE_RIP_AND_FINISH();
8081 IEM_MC_END();
8082 break;
8083
8084 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8085 }
8086}
8087
8088
8089/**
8090 * Common worker for FPU instructions working on ST0 and STn, and storing the
8091 * result in ST0.
8092 *
8093 * @param bRm Mod R/M byte.
8094 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8095 */
8096FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
8097{
8098 IEM_MC_BEGIN(3, 1);
8099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8100 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8101 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8102 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8103 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8104
8105 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8106 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8107 IEM_MC_PREPARE_FPU_USAGE();
8108 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8109 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
8110 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
8111 } IEM_MC_ELSE() {
8112 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
8113 } IEM_MC_ENDIF();
8114 IEM_MC_ADVANCE_RIP_AND_FINISH();
8115
8116 IEM_MC_END();
8117}
8118
8119
8120/**
8121 * Common worker for FPU instructions working on ST0 and STn, and only affecting
8122 * flags.
8123 *
8124 * @param bRm Mod R/M byte.
8125 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8126 */
8127FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8128{
8129 IEM_MC_BEGIN(3, 1);
8130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8131 IEM_MC_LOCAL(uint16_t, u16Fsw);
8132 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8133 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8134 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8135
8136 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8137 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8138 IEM_MC_PREPARE_FPU_USAGE();
8139 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8140 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8141 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
8142 } IEM_MC_ELSE() {
8143 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
8144 } IEM_MC_ENDIF();
8145 IEM_MC_ADVANCE_RIP_AND_FINISH();
8146
8147 IEM_MC_END();
8148}
8149
8150
8151/**
8152 * Common worker for FPU instructions working on ST0 and STn, only affecting
8153 * flags, and popping when done.
8154 *
8155 * @param bRm Mod R/M byte.
8156 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8157 */
8158FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8159{
8160 IEM_MC_BEGIN(3, 1);
8161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8162 IEM_MC_LOCAL(uint16_t, u16Fsw);
8163 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8164 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8165 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8166
8167 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8168 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8169 IEM_MC_PREPARE_FPU_USAGE();
8170 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8171 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8172 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
8173 } IEM_MC_ELSE() {
8174 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
8175 } IEM_MC_ENDIF();
8176 IEM_MC_ADVANCE_RIP_AND_FINISH();
8177
8178 IEM_MC_END();
8179}
8180
8181
8182/** Opcode 0xd8 11/0. */
8183FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
8184{
8185 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
8186 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
8187}
8188
8189
8190/** Opcode 0xd8 11/1. */
8191FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
8192{
8193 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
8194 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
8195}
8196
8197
8198/** Opcode 0xd8 11/2. */
8199FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
8200{
8201 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
8202 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
8203}
8204
8205
8206/** Opcode 0xd8 11/3. */
8207FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
8208{
8209 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
8210 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
8211}
8212
8213
8214/** Opcode 0xd8 11/4. */
8215FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
8216{
8217 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
8218 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
8219}
8220
8221
8222/** Opcode 0xd8 11/5. */
8223FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
8224{
8225 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
8226 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
8227}
8228
8229
8230/** Opcode 0xd8 11/6. */
8231FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
8232{
8233 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
8234 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
8235}
8236
8237
8238/** Opcode 0xd8 11/7. */
8239FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
8240{
8241 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
8242 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
8243}
8244
8245
8246/**
8247 * Common worker for FPU instructions working on ST0 and an m32r, and storing
8248 * the result in ST0.
8249 *
8250 * @param bRm Mod R/M byte.
8251 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8252 */
8253FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
8254{
8255 IEM_MC_BEGIN(3, 3);
8256 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8257 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8258 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8259 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8260 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8261 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8262
8263 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8265
8266 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8267 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8268 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8269
8270 IEM_MC_PREPARE_FPU_USAGE();
8271 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8272 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
8273 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
8274 } IEM_MC_ELSE() {
8275 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
8276 } IEM_MC_ENDIF();
8277 IEM_MC_ADVANCE_RIP_AND_FINISH();
8278
8279 IEM_MC_END();
8280}
8281
8282
8283/** Opcode 0xd8 !11/0. */
8284FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
8285{
8286 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
8287 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
8288}
8289
8290
8291/** Opcode 0xd8 !11/1. */
8292FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
8293{
8294 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
8295 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
8296}
8297
8298
8299/** Opcode 0xd8 !11/2. */
8300FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
8301{
8302 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
8303
8304 IEM_MC_BEGIN(3, 3);
8305 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8306 IEM_MC_LOCAL(uint16_t, u16Fsw);
8307 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8308 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8309 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8310 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8311
8312 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8314
8315 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8316 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8317 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8318
8319 IEM_MC_PREPARE_FPU_USAGE();
8320 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8321 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
8322 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8323 } IEM_MC_ELSE() {
8324 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8325 } IEM_MC_ENDIF();
8326 IEM_MC_ADVANCE_RIP_AND_FINISH();
8327
8328 IEM_MC_END();
8329}
8330
8331
8332/** Opcode 0xd8 !11/3. */
8333FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
8334{
8335 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
8336
8337 IEM_MC_BEGIN(3, 3);
8338 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8339 IEM_MC_LOCAL(uint16_t, u16Fsw);
8340 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8341 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8342 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8343 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8344
8345 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8347
8348 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8349 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8350 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8351
8352 IEM_MC_PREPARE_FPU_USAGE();
8353 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8354 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
8355 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8356 } IEM_MC_ELSE() {
8357 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8358 } IEM_MC_ENDIF();
8359 IEM_MC_ADVANCE_RIP_AND_FINISH();
8360
8361 IEM_MC_END();
8362}
8363
8364
8365/** Opcode 0xd8 !11/4. */
8366FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
8367{
8368 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
8369 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
8370}
8371
8372
8373/** Opcode 0xd8 !11/5. */
8374FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
8375{
8376 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
8377 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
8378}
8379
8380
8381/** Opcode 0xd8 !11/6. */
8382FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
8383{
8384 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
8385 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
8386}
8387
8388
8389/** Opcode 0xd8 !11/7. */
8390FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
8391{
8392 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
8393 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
8394}
8395
8396
8397/**
8398 * @opcode 0xd8
8399 */
8400FNIEMOP_DEF(iemOp_EscF0)
8401{
8402 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8403 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
8404
8405 if (IEM_IS_MODRM_REG_MODE(bRm))
8406 {
8407 switch (IEM_GET_MODRM_REG_8(bRm))
8408 {
8409 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
8410 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
8411 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
8412 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
8413 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
8414 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
8415 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
8416 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
8417 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8418 }
8419 }
8420 else
8421 {
8422 switch (IEM_GET_MODRM_REG_8(bRm))
8423 {
8424 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
8425 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
8426 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
8427 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
8428 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
8429 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
8430 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
8431 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
8432 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8433 }
8434 }
8435}
8436
8437
8438/** Opcode 0xd9 /0 mem32real
8439 * @sa iemOp_fld_m64r */
8440FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
8441{
8442 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
8443
8444 IEM_MC_BEGIN(2, 3);
8445 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8446 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8447 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
8448 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8449 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
8450
8451 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8453
8454 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8455 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8456 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8457 IEM_MC_PREPARE_FPU_USAGE();
8458 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
8459 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r32, pFpuRes, pr32Val);
8460 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8461 } IEM_MC_ELSE() {
8462 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8463 } IEM_MC_ENDIF();
8464 IEM_MC_ADVANCE_RIP_AND_FINISH();
8465
8466 IEM_MC_END();
8467}
8468
8469
8470/** Opcode 0xd9 !11/2 mem32real */
8471FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
8472{
8473 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
8474 IEM_MC_BEGIN(3, 2);
8475 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8476 IEM_MC_LOCAL(uint16_t, u16Fsw);
8477 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8478 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
8479 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8480
8481 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8483 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8484 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8485
8486 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8487 IEM_MC_PREPARE_FPU_USAGE();
8488 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8489 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
8490 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8491 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
8492 } IEM_MC_ELSE() {
8493 IEM_MC_IF_FCW_IM() {
8494 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
8495 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
8496 } IEM_MC_ENDIF();
8497 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
8498 } IEM_MC_ENDIF();
8499 IEM_MC_ADVANCE_RIP_AND_FINISH();
8500
8501 IEM_MC_END();
8502}
8503
8504
8505/** Opcode 0xd9 !11/3 */
8506FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
8507{
8508 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
8509 IEM_MC_BEGIN(3, 2);
8510 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8511 IEM_MC_LOCAL(uint16_t, u16Fsw);
8512 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8513 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
8514 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8515
8516 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8518 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8519 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8520
8521 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8522 IEM_MC_PREPARE_FPU_USAGE();
8523 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8524 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
8525 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8526 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
8527 } IEM_MC_ELSE() {
8528 IEM_MC_IF_FCW_IM() {
8529 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
8530 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
8531 } IEM_MC_ENDIF();
8532 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
8533 } IEM_MC_ENDIF();
8534 IEM_MC_ADVANCE_RIP_AND_FINISH();
8535
8536 IEM_MC_END();
8537}
8538
8539
8540/** Opcode 0xd9 !11/4 */
8541FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
8542{
8543 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
8544 IEM_MC_BEGIN(3, 0);
8545 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
8546 IEM_MC_ARG(uint8_t, iEffSeg, 1);
8547 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
8548 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8550 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8551 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8552 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8553 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
8554 IEM_MC_END();
8555}
8556
8557
8558/** Opcode 0xd9 !11/5 */
8559FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
8560{
8561 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
8562 IEM_MC_BEGIN(1, 1);
8563 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8564 IEM_MC_ARG(uint16_t, u16Fsw, 0);
8565 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8567 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8568 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8569 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8570 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_FPU, iemCImpl_fldcw, u16Fsw);
8571 IEM_MC_END();
8572}
8573
8574
8575/** Opcode 0xd9 !11/6 */
8576FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
8577{
8578 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
8579 IEM_MC_BEGIN(3, 0);
8580 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
8581 IEM_MC_ARG(uint8_t, iEffSeg, 1);
8582 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
8583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8585 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8586 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8587 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8588 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
8589 IEM_MC_END();
8590}
8591
8592
8593/** Opcode 0xd9 !11/7 */
8594FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
8595{
8596 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
8597 IEM_MC_BEGIN(2, 0);
8598 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8599 IEM_MC_LOCAL(uint16_t, u16Fcw);
8600 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8602 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8603 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8604 IEM_MC_FETCH_FCW(u16Fcw);
8605 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
8606 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
8607 IEM_MC_END();
8608}
8609
8610
8611/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
8612FNIEMOP_DEF(iemOp_fnop)
8613{
8614 IEMOP_MNEMONIC(fnop, "fnop");
8615 IEM_MC_BEGIN(0, 0);
8616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8617 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8618 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8619 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8620 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
8621 * intel optimizations. Investigate. */
8622 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
8623 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
8624 IEM_MC_END();
8625}
8626
8627
8628/** Opcode 0xd9 11/0 stN */
8629FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
8630{
8631 IEMOP_MNEMONIC(fld_stN, "fld stN");
8632 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
8633 * indicates that it does. */
8634 IEM_MC_BEGIN(0, 2);
8635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8636 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
8637 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8638 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8639 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8640
8641 IEM_MC_PREPARE_FPU_USAGE();
8642 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, IEM_GET_MODRM_RM_8(bRm)) {
8643 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
8644 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
8645 } IEM_MC_ELSE() {
8646 IEM_MC_FPU_STACK_PUSH_UNDERFLOW(pVCpu->iem.s.uFpuOpcode);
8647 } IEM_MC_ENDIF();
8648
8649 IEM_MC_ADVANCE_RIP_AND_FINISH();
8650 IEM_MC_END();
8651}
8652
8653
8654/** Opcode 0xd9 11/3 stN */
8655FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
8656{
8657 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
8658 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
8659 * indicates that it does. */
8660 IEM_MC_BEGIN(2, 3);
8661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8662 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
8663 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
8664 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8665 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ IEM_GET_MODRM_RM_8(bRm), 0);
8666 IEM_MC_ARG_CONST(uint16_t, uFpuOpcode, /*=*/ pVCpu->iem.s.uFpuOpcode, 1);
8667 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8668 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8669
8670 IEM_MC_PREPARE_FPU_USAGE();
8671 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8672 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
8673 IEM_MC_STORE_FPUREG_R80_SRC_REF(IEM_GET_MODRM_RM_8(bRm), pr80Value1);
8674 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
8675 } IEM_MC_ELSE() {
8676 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, iemCImpl_fxch_underflow, iStReg, uFpuOpcode);
8677 } IEM_MC_ENDIF();
8678
8679 IEM_MC_ADVANCE_RIP_AND_FINISH();
8680 IEM_MC_END();
8681}
8682
8683
8684/** Opcode 0xd9 11/4, 0xdd 11/2. */
8685FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
8686{
8687 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
8688
8689 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
8690 uint8_t const iDstReg = IEM_GET_MODRM_RM_8(bRm);
8691 if (!iDstReg)
8692 {
8693 IEM_MC_BEGIN(0, 1);
8694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8695 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
8696 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8697 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8698
8699 IEM_MC_PREPARE_FPU_USAGE();
8700 IEM_MC_IF_FPUREG_NOT_EMPTY(0) {
8701 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
8702 } IEM_MC_ELSE() {
8703 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0, pVCpu->iem.s.uFpuOpcode);
8704 } IEM_MC_ENDIF();
8705
8706 IEM_MC_ADVANCE_RIP_AND_FINISH();
8707 IEM_MC_END();
8708 }
8709 else
8710 {
8711 IEM_MC_BEGIN(0, 2);
8712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8713 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
8714 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8715 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8716 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8717
8718 IEM_MC_PREPARE_FPU_USAGE();
8719 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8720 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
8721 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg, pVCpu->iem.s.uFpuOpcode);
8722 } IEM_MC_ELSE() {
8723 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg, pVCpu->iem.s.uFpuOpcode);
8724 } IEM_MC_ENDIF();
8725
8726 IEM_MC_ADVANCE_RIP_AND_FINISH();
8727 IEM_MC_END();
8728 }
8729}
8730
8731
8732/**
8733 * Common worker for FPU instructions working on ST0 and replaces it with the
8734 * result, i.e. unary operators.
8735 *
8736 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8737 */
8738FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
8739{
8740 IEM_MC_BEGIN(2, 1);
8741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8742 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8743 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8744 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
8745
8746 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8747 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8748 IEM_MC_PREPARE_FPU_USAGE();
8749 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8750 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
8751 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
8752 } IEM_MC_ELSE() {
8753 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
8754 } IEM_MC_ENDIF();
8755 IEM_MC_ADVANCE_RIP_AND_FINISH();
8756
8757 IEM_MC_END();
8758}
8759
8760
8761/** Opcode 0xd9 0xe0. */
8762FNIEMOP_DEF(iemOp_fchs)
8763{
8764 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
8765 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
8766}
8767
8768
8769/** Opcode 0xd9 0xe1. */
8770FNIEMOP_DEF(iemOp_fabs)
8771{
8772 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
8773 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
8774}
8775
8776
8777/** Opcode 0xd9 0xe4. */
8778FNIEMOP_DEF(iemOp_ftst)
8779{
8780 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
8781 IEM_MC_BEGIN(2, 1);
8782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8783 IEM_MC_LOCAL(uint16_t, u16Fsw);
8784 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8785 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
8786
8787 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8788 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8789 IEM_MC_PREPARE_FPU_USAGE();
8790 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8791 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_ftst_r80, pu16Fsw, pr80Value);
8792 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
8793 } IEM_MC_ELSE() {
8794 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
8795 } IEM_MC_ENDIF();
8796 IEM_MC_ADVANCE_RIP_AND_FINISH();
8797
8798 IEM_MC_END();
8799}
8800
8801
8802/** Opcode 0xd9 0xe5. */
8803FNIEMOP_DEF(iemOp_fxam)
8804{
8805 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
8806 IEM_MC_BEGIN(2, 1);
8807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8808 IEM_MC_LOCAL(uint16_t, u16Fsw);
8809 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8810 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
8811
8812 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8813 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8814 IEM_MC_PREPARE_FPU_USAGE();
8815 IEM_MC_REF_FPUREG(pr80Value, 0);
8816 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fxam_r80, pu16Fsw, pr80Value);
8817 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
8818 IEM_MC_ADVANCE_RIP_AND_FINISH();
8819
8820 IEM_MC_END();
8821}
8822
8823
8824/**
8825 * Common worker for FPU instructions pushing a constant onto the FPU stack.
8826 *
8827 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8828 */
8829FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
8830{
8831 IEM_MC_BEGIN(1, 1);
8832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8833 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8834 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8835
8836 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8837 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8838 IEM_MC_PREPARE_FPU_USAGE();
8839 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
8840 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
8841 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
8842 } IEM_MC_ELSE() {
8843 IEM_MC_FPU_STACK_PUSH_OVERFLOW(pVCpu->iem.s.uFpuOpcode);
8844 } IEM_MC_ENDIF();
8845 IEM_MC_ADVANCE_RIP_AND_FINISH();
8846
8847 IEM_MC_END();
8848}
8849
8850
8851/** Opcode 0xd9 0xe8. */
8852FNIEMOP_DEF(iemOp_fld1)
8853{
8854 IEMOP_MNEMONIC(fld1, "fld1");
8855 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
8856}
8857
8858
8859/** Opcode 0xd9 0xe9. */
8860FNIEMOP_DEF(iemOp_fldl2t)
8861{
8862 IEMOP_MNEMONIC(fldl2t, "fldl2t");
8863 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
8864}
8865
8866
8867/** Opcode 0xd9 0xea. */
8868FNIEMOP_DEF(iemOp_fldl2e)
8869{
8870 IEMOP_MNEMONIC(fldl2e, "fldl2e");
8871 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
8872}
8873
8874/** Opcode 0xd9 0xeb. */
8875FNIEMOP_DEF(iemOp_fldpi)
8876{
8877 IEMOP_MNEMONIC(fldpi, "fldpi");
8878 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
8879}
8880
8881
8882/** Opcode 0xd9 0xec. */
8883FNIEMOP_DEF(iemOp_fldlg2)
8884{
8885 IEMOP_MNEMONIC(fldlg2, "fldlg2");
8886 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
8887}
8888
8889/** Opcode 0xd9 0xed. */
8890FNIEMOP_DEF(iemOp_fldln2)
8891{
8892 IEMOP_MNEMONIC(fldln2, "fldln2");
8893 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
8894}
8895
8896
8897/** Opcode 0xd9 0xee. */
8898FNIEMOP_DEF(iemOp_fldz)
8899{
8900 IEMOP_MNEMONIC(fldz, "fldz");
8901 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
8902}
8903
8904
8905/** Opcode 0xd9 0xf0.
8906 *
8907 * The f2xm1 instruction works on values +1.0 thru -1.0, currently (the range on
8908 * 287 & 8087 was +0.5 thru 0.0 according to docs). In addition is does appear
8909 * to produce proper results for +Inf and -Inf.
8910 *
8911 * This is probably usful in the implementation pow() and similar.
8912 */
8913FNIEMOP_DEF(iemOp_f2xm1)
8914{
8915 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
8916 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
8917}
8918
8919
8920/**
8921 * Common worker for FPU instructions working on STn and ST0, storing the result
8922 * in STn, and popping the stack unless IE, DE or ZE was raised.
8923 *
8924 * @param bRm Mod R/M byte.
8925 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8926 */
8927FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
8928{
8929 IEM_MC_BEGIN(3, 1);
8930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8931 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8932 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8933 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8934 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8935
8936 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8937 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8938
8939 IEM_MC_PREPARE_FPU_USAGE();
8940 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
8941 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
8942 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
8943 } IEM_MC_ELSE() {
8944 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
8945 } IEM_MC_ENDIF();
8946 IEM_MC_ADVANCE_RIP_AND_FINISH();
8947
8948 IEM_MC_END();
8949}
8950
8951
8952/** Opcode 0xd9 0xf1. */
8953FNIEMOP_DEF(iemOp_fyl2x)
8954{
8955 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
8956 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
8957}
8958
8959
8960/**
8961 * Common worker for FPU instructions working on ST0 and having two outputs, one
8962 * replacing ST0 and one pushed onto the stack.
8963 *
8964 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8965 */
8966FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
8967{
8968 IEM_MC_BEGIN(2, 1);
8969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8970 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
8971 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
8972 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
8973
8974 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8975 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8976 IEM_MC_PREPARE_FPU_USAGE();
8977 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8978 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
8979 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo, pVCpu->iem.s.uFpuOpcode);
8980 } IEM_MC_ELSE() {
8981 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO(pVCpu->iem.s.uFpuOpcode);
8982 } IEM_MC_ENDIF();
8983 IEM_MC_ADVANCE_RIP_AND_FINISH();
8984
8985 IEM_MC_END();
8986}
8987
8988
8989/** Opcode 0xd9 0xf2. */
8990FNIEMOP_DEF(iemOp_fptan)
8991{
8992 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
8993 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
8994}
8995
8996
8997/** Opcode 0xd9 0xf3. */
8998FNIEMOP_DEF(iemOp_fpatan)
8999{
9000 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
9001 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
9002}
9003
9004
9005/** Opcode 0xd9 0xf4. */
9006FNIEMOP_DEF(iemOp_fxtract)
9007{
9008 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
9009 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
9010}
9011
9012
9013/** Opcode 0xd9 0xf5. */
9014FNIEMOP_DEF(iemOp_fprem1)
9015{
9016 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
9017 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
9018}
9019
9020
9021/** Opcode 0xd9 0xf6. */
9022FNIEMOP_DEF(iemOp_fdecstp)
9023{
9024 IEMOP_MNEMONIC(fdecstp, "fdecstp");
9025 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
9026 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
9027 * FINCSTP and FDECSTP. */
9028 IEM_MC_BEGIN(0,0);
9029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9030
9031 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9032 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9033
9034 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9035 IEM_MC_FPU_STACK_DEC_TOP();
9036 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
9037
9038 IEM_MC_ADVANCE_RIP_AND_FINISH();
9039 IEM_MC_END();
9040}
9041
9042
9043/** Opcode 0xd9 0xf7. */
9044FNIEMOP_DEF(iemOp_fincstp)
9045{
9046 IEMOP_MNEMONIC(fincstp, "fincstp");
9047 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
9048 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
9049 * FINCSTP and FDECSTP. */
9050 IEM_MC_BEGIN(0,0);
9051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9052
9053 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9054 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9055
9056 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9057 IEM_MC_FPU_STACK_INC_TOP();
9058 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
9059
9060 IEM_MC_ADVANCE_RIP_AND_FINISH();
9061 IEM_MC_END();
9062}
9063
9064
9065/** Opcode 0xd9 0xf8. */
9066FNIEMOP_DEF(iemOp_fprem)
9067{
9068 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
9069 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
9070}
9071
9072
9073/** Opcode 0xd9 0xf9. */
9074FNIEMOP_DEF(iemOp_fyl2xp1)
9075{
9076 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
9077 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
9078}
9079
9080
9081/** Opcode 0xd9 0xfa. */
9082FNIEMOP_DEF(iemOp_fsqrt)
9083{
9084 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
9085 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
9086}
9087
9088
9089/** Opcode 0xd9 0xfb. */
9090FNIEMOP_DEF(iemOp_fsincos)
9091{
9092 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
9093 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
9094}
9095
9096
9097/** Opcode 0xd9 0xfc. */
9098FNIEMOP_DEF(iemOp_frndint)
9099{
9100 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
9101 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
9102}
9103
9104
9105/** Opcode 0xd9 0xfd. */
9106FNIEMOP_DEF(iemOp_fscale)
9107{
9108 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
9109 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
9110}
9111
9112
9113/** Opcode 0xd9 0xfe. */
9114FNIEMOP_DEF(iemOp_fsin)
9115{
9116 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
9117 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
9118}
9119
9120
9121/** Opcode 0xd9 0xff. */
9122FNIEMOP_DEF(iemOp_fcos)
9123{
9124 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
9125 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
9126}
9127
9128
9129/** Used by iemOp_EscF1. */
9130IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
9131{
9132 /* 0xe0 */ iemOp_fchs,
9133 /* 0xe1 */ iemOp_fabs,
9134 /* 0xe2 */ iemOp_Invalid,
9135 /* 0xe3 */ iemOp_Invalid,
9136 /* 0xe4 */ iemOp_ftst,
9137 /* 0xe5 */ iemOp_fxam,
9138 /* 0xe6 */ iemOp_Invalid,
9139 /* 0xe7 */ iemOp_Invalid,
9140 /* 0xe8 */ iemOp_fld1,
9141 /* 0xe9 */ iemOp_fldl2t,
9142 /* 0xea */ iemOp_fldl2e,
9143 /* 0xeb */ iemOp_fldpi,
9144 /* 0xec */ iemOp_fldlg2,
9145 /* 0xed */ iemOp_fldln2,
9146 /* 0xee */ iemOp_fldz,
9147 /* 0xef */ iemOp_Invalid,
9148 /* 0xf0 */ iemOp_f2xm1,
9149 /* 0xf1 */ iemOp_fyl2x,
9150 /* 0xf2 */ iemOp_fptan,
9151 /* 0xf3 */ iemOp_fpatan,
9152 /* 0xf4 */ iemOp_fxtract,
9153 /* 0xf5 */ iemOp_fprem1,
9154 /* 0xf6 */ iemOp_fdecstp,
9155 /* 0xf7 */ iemOp_fincstp,
9156 /* 0xf8 */ iemOp_fprem,
9157 /* 0xf9 */ iemOp_fyl2xp1,
9158 /* 0xfa */ iemOp_fsqrt,
9159 /* 0xfb */ iemOp_fsincos,
9160 /* 0xfc */ iemOp_frndint,
9161 /* 0xfd */ iemOp_fscale,
9162 /* 0xfe */ iemOp_fsin,
9163 /* 0xff */ iemOp_fcos
9164};
9165
9166
9167/**
9168 * @opcode 0xd9
9169 */
9170FNIEMOP_DEF(iemOp_EscF1)
9171{
9172 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9173 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
9174
9175 if (IEM_IS_MODRM_REG_MODE(bRm))
9176 {
9177 switch (IEM_GET_MODRM_REG_8(bRm))
9178 {
9179 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
9180 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
9181 case 2:
9182 if (bRm == 0xd0)
9183 return FNIEMOP_CALL(iemOp_fnop);
9184 IEMOP_RAISE_INVALID_OPCODE_RET();
9185 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
9186 case 4:
9187 case 5:
9188 case 6:
9189 case 7:
9190 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
9191 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
9192 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9193 }
9194 }
9195 else
9196 {
9197 switch (IEM_GET_MODRM_REG_8(bRm))
9198 {
9199 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
9200 case 1: IEMOP_RAISE_INVALID_OPCODE_RET();
9201 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
9202 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
9203 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
9204 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
9205 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
9206 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
9207 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9208 }
9209 }
9210}
9211
9212
9213/** Opcode 0xda 11/0. */
9214FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
9215{
9216 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
9217 IEM_MC_BEGIN(0, 1);
9218 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9219 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9220
9221 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9222 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9223
9224 IEM_MC_PREPARE_FPU_USAGE();
9225 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9226 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9227 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9228 } IEM_MC_ENDIF();
9229 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9230 } IEM_MC_ELSE() {
9231 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9232 } IEM_MC_ENDIF();
9233 IEM_MC_ADVANCE_RIP_AND_FINISH();
9234
9235 IEM_MC_END();
9236}
9237
9238
9239/** Opcode 0xda 11/1. */
9240FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
9241{
9242 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
9243 IEM_MC_BEGIN(0, 1);
9244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9245 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9246
9247 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9248 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9249
9250 IEM_MC_PREPARE_FPU_USAGE();
9251 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9252 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9253 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9254 } IEM_MC_ENDIF();
9255 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9256 } IEM_MC_ELSE() {
9257 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9258 } IEM_MC_ENDIF();
9259 IEM_MC_ADVANCE_RIP_AND_FINISH();
9260
9261 IEM_MC_END();
9262}
9263
9264
9265/** Opcode 0xda 11/2. */
9266FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
9267{
9268 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
9269 IEM_MC_BEGIN(0, 1);
9270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9271 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9272
9273 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9274 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9275
9276 IEM_MC_PREPARE_FPU_USAGE();
9277 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9278 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9279 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9280 } IEM_MC_ENDIF();
9281 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9282 } IEM_MC_ELSE() {
9283 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9284 } IEM_MC_ENDIF();
9285 IEM_MC_ADVANCE_RIP_AND_FINISH();
9286
9287 IEM_MC_END();
9288}
9289
9290
9291/** Opcode 0xda 11/3. */
9292FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
9293{
9294 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
9295 IEM_MC_BEGIN(0, 1);
9296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9297 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9298
9299 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9300 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9301
9302 IEM_MC_PREPARE_FPU_USAGE();
9303 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9304 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9305 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9306 } IEM_MC_ENDIF();
9307 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9308 } IEM_MC_ELSE() {
9309 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9310 } IEM_MC_ENDIF();
9311 IEM_MC_ADVANCE_RIP_AND_FINISH();
9312
9313 IEM_MC_END();
9314}
9315
9316
9317/**
9318 * Common worker for FPU instructions working on ST0 and ST1, only affecting
9319 * flags, and popping twice when done.
9320 *
9321 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9322 */
9323FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
9324{
9325 IEM_MC_BEGIN(3, 1);
9326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9327 IEM_MC_LOCAL(uint16_t, u16Fsw);
9328 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9329 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9330 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9331
9332 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9333 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9334
9335 IEM_MC_PREPARE_FPU_USAGE();
9336 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1) {
9337 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
9338 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9339 } IEM_MC_ELSE() {
9340 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP(pVCpu->iem.s.uFpuOpcode);
9341 } IEM_MC_ENDIF();
9342 IEM_MC_ADVANCE_RIP_AND_FINISH();
9343
9344 IEM_MC_END();
9345}
9346
9347
9348/** Opcode 0xda 0xe9. */
9349FNIEMOP_DEF(iemOp_fucompp)
9350{
9351 IEMOP_MNEMONIC(fucompp, "fucompp");
9352 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fucom_r80_by_r80);
9353}
9354
9355
9356/**
9357 * Common worker for FPU instructions working on ST0 and an m32i, and storing
9358 * the result in ST0.
9359 *
9360 * @param bRm Mod R/M byte.
9361 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9362 */
9363FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
9364{
9365 IEM_MC_BEGIN(3, 3);
9366 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9367 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9368 IEM_MC_LOCAL(int32_t, i32Val2);
9369 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9370 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9371 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
9372
9373 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9375
9376 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9377 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9378 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9379
9380 IEM_MC_PREPARE_FPU_USAGE();
9381 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9382 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
9383 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9384 } IEM_MC_ELSE() {
9385 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9386 } IEM_MC_ENDIF();
9387 IEM_MC_ADVANCE_RIP_AND_FINISH();
9388
9389 IEM_MC_END();
9390}
9391
9392
9393/** Opcode 0xda !11/0. */
9394FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
9395{
9396 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
9397 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
9398}
9399
9400
9401/** Opcode 0xda !11/1. */
9402FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
9403{
9404 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
9405 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
9406}
9407
9408
9409/** Opcode 0xda !11/2. */
9410FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
9411{
9412 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
9413
9414 IEM_MC_BEGIN(3, 3);
9415 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9416 IEM_MC_LOCAL(uint16_t, u16Fsw);
9417 IEM_MC_LOCAL(int32_t, i32Val2);
9418 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9419 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9420 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
9421
9422 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9424
9425 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9426 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9427 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9428
9429 IEM_MC_PREPARE_FPU_USAGE();
9430 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9431 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
9432 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9433 } IEM_MC_ELSE() {
9434 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9435 } IEM_MC_ENDIF();
9436 IEM_MC_ADVANCE_RIP_AND_FINISH();
9437
9438 IEM_MC_END();
9439}
9440
9441
9442/** Opcode 0xda !11/3. */
9443FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
9444{
9445 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
9446
9447 IEM_MC_BEGIN(3, 3);
9448 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9449 IEM_MC_LOCAL(uint16_t, u16Fsw);
9450 IEM_MC_LOCAL(int32_t, i32Val2);
9451 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9452 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9453 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
9454
9455 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9457
9458 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9459 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9460 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9461
9462 IEM_MC_PREPARE_FPU_USAGE();
9463 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9464 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
9465 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9466 } IEM_MC_ELSE() {
9467 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9468 } IEM_MC_ENDIF();
9469 IEM_MC_ADVANCE_RIP_AND_FINISH();
9470
9471 IEM_MC_END();
9472}
9473
9474
9475/** Opcode 0xda !11/4. */
9476FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
9477{
9478 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
9479 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
9480}
9481
9482
9483/** Opcode 0xda !11/5. */
9484FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
9485{
9486 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
9487 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
9488}
9489
9490
9491/** Opcode 0xda !11/6. */
9492FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
9493{
9494 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
9495 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
9496}
9497
9498
9499/** Opcode 0xda !11/7. */
9500FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
9501{
9502 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
9503 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
9504}
9505
9506
9507/**
9508 * @opcode 0xda
9509 */
9510FNIEMOP_DEF(iemOp_EscF2)
9511{
9512 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9513 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
9514 if (IEM_IS_MODRM_REG_MODE(bRm))
9515 {
9516 switch (IEM_GET_MODRM_REG_8(bRm))
9517 {
9518 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
9519 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
9520 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
9521 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
9522 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
9523 case 5:
9524 if (bRm == 0xe9)
9525 return FNIEMOP_CALL(iemOp_fucompp);
9526 IEMOP_RAISE_INVALID_OPCODE_RET();
9527 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9528 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
9529 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9530 }
9531 }
9532 else
9533 {
9534 switch (IEM_GET_MODRM_REG_8(bRm))
9535 {
9536 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
9537 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
9538 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
9539 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
9540 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
9541 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
9542 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
9543 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
9544 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9545 }
9546 }
9547}
9548
9549
9550/** Opcode 0xdb !11/0. */
9551FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
9552{
9553 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
9554
9555 IEM_MC_BEGIN(2, 3);
9556 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9557 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9558 IEM_MC_LOCAL(int32_t, i32Val);
9559 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9560 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
9561
9562 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9564
9565 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9566 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9567 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9568
9569 IEM_MC_PREPARE_FPU_USAGE();
9570 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
9571 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i32, pFpuRes, pi32Val);
9572 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9573 } IEM_MC_ELSE() {
9574 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9575 } IEM_MC_ENDIF();
9576 IEM_MC_ADVANCE_RIP_AND_FINISH();
9577
9578 IEM_MC_END();
9579}
9580
9581
9582/** Opcode 0xdb !11/1. */
9583FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
9584{
9585 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
9586 IEM_MC_BEGIN(3, 2);
9587 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9588 IEM_MC_LOCAL(uint16_t, u16Fsw);
9589 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9590 IEM_MC_ARG(int32_t *, pi32Dst, 1);
9591 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9592
9593 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9595 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9596 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9597
9598 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9599 IEM_MC_PREPARE_FPU_USAGE();
9600 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9601 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
9602 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9603 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9604 } IEM_MC_ELSE() {
9605 IEM_MC_IF_FCW_IM() {
9606 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
9607 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
9608 } IEM_MC_ENDIF();
9609 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9610 } IEM_MC_ENDIF();
9611 IEM_MC_ADVANCE_RIP_AND_FINISH();
9612
9613 IEM_MC_END();
9614}
9615
9616
9617/** Opcode 0xdb !11/2. */
9618FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
9619{
9620 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
9621 IEM_MC_BEGIN(3, 2);
9622 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9623 IEM_MC_LOCAL(uint16_t, u16Fsw);
9624 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9625 IEM_MC_ARG(int32_t *, pi32Dst, 1);
9626 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9627
9628 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9630 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9631 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9632
9633 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9634 IEM_MC_PREPARE_FPU_USAGE();
9635 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9636 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
9637 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9638 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9639 } IEM_MC_ELSE() {
9640 IEM_MC_IF_FCW_IM() {
9641 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
9642 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
9643 } IEM_MC_ENDIF();
9644 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9645 } IEM_MC_ENDIF();
9646 IEM_MC_ADVANCE_RIP_AND_FINISH();
9647
9648 IEM_MC_END();
9649}
9650
9651
9652/** Opcode 0xdb !11/3. */
9653FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
9654{
9655 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
9656 IEM_MC_BEGIN(3, 2);
9657 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9658 IEM_MC_LOCAL(uint16_t, u16Fsw);
9659 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9660 IEM_MC_ARG(int32_t *, pi32Dst, 1);
9661 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9662
9663 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9665 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9666 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9667
9668 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9669 IEM_MC_PREPARE_FPU_USAGE();
9670 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9671 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
9672 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9673 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9674 } IEM_MC_ELSE() {
9675 IEM_MC_IF_FCW_IM() {
9676 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
9677 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
9678 } IEM_MC_ENDIF();
9679 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9680 } IEM_MC_ENDIF();
9681 IEM_MC_ADVANCE_RIP_AND_FINISH();
9682
9683 IEM_MC_END();
9684}
9685
9686
9687/** Opcode 0xdb !11/5. */
9688FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
9689{
9690 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
9691
9692 IEM_MC_BEGIN(2, 3);
9693 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9694 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9695 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
9696 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9697 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
9698
9699 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9700 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9701
9702 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9703 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9704 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9705
9706 IEM_MC_PREPARE_FPU_USAGE();
9707 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
9708 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
9709 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9710 } IEM_MC_ELSE() {
9711 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9712 } IEM_MC_ENDIF();
9713 IEM_MC_ADVANCE_RIP_AND_FINISH();
9714
9715 IEM_MC_END();
9716}
9717
9718
9719/** Opcode 0xdb !11/7. */
9720FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
9721{
9722 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
9723 IEM_MC_BEGIN(3, 2);
9724 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9725 IEM_MC_LOCAL(uint16_t, u16Fsw);
9726 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9727 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
9728 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9729
9730 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9732 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9733 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9734
9735 IEM_MC_MEM_MAP_EX(pr80Dst, IEM_ACCESS_DATA_W, sizeof(*pr80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
9736 IEM_MC_PREPARE_FPU_USAGE();
9737 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9738 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
9739 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
9740 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9741 } IEM_MC_ELSE() {
9742 IEM_MC_IF_FCW_IM() {
9743 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
9744 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
9745 } IEM_MC_ENDIF();
9746 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9747 } IEM_MC_ENDIF();
9748 IEM_MC_ADVANCE_RIP_AND_FINISH();
9749
9750 IEM_MC_END();
9751}
9752
9753
9754/** Opcode 0xdb 11/0. */
9755FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
9756{
9757 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
9758 IEM_MC_BEGIN(0, 1);
9759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9760 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9761
9762 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9763 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9764
9765 IEM_MC_PREPARE_FPU_USAGE();
9766 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9767 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF) {
9768 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9769 } IEM_MC_ENDIF();
9770 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9771 } IEM_MC_ELSE() {
9772 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9773 } IEM_MC_ENDIF();
9774 IEM_MC_ADVANCE_RIP_AND_FINISH();
9775
9776 IEM_MC_END();
9777}
9778
9779
9780/** Opcode 0xdb 11/1. */
9781FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
9782{
9783 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
9784 IEM_MC_BEGIN(0, 1);
9785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9786 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9787
9788 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9789 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9790
9791 IEM_MC_PREPARE_FPU_USAGE();
9792 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9793 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
9794 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9795 } IEM_MC_ENDIF();
9796 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9797 } IEM_MC_ELSE() {
9798 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9799 } IEM_MC_ENDIF();
9800 IEM_MC_ADVANCE_RIP_AND_FINISH();
9801
9802 IEM_MC_END();
9803}
9804
9805
9806/** Opcode 0xdb 11/2. */
9807FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
9808{
9809 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
9810 IEM_MC_BEGIN(0, 1);
9811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9812 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9813
9814 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9815 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9816
9817 IEM_MC_PREPARE_FPU_USAGE();
9818 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9819 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9820 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9821 } IEM_MC_ENDIF();
9822 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9823 } IEM_MC_ELSE() {
9824 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9825 } IEM_MC_ENDIF();
9826 IEM_MC_ADVANCE_RIP_AND_FINISH();
9827
9828 IEM_MC_END();
9829}
9830
9831
9832/** Opcode 0xdb 11/3. */
9833FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
9834{
9835 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
9836 IEM_MC_BEGIN(0, 1);
9837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9838 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9839
9840 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9841 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9842
9843 IEM_MC_PREPARE_FPU_USAGE();
9844 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9845 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF) {
9846 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9847 } IEM_MC_ENDIF();
9848 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9849 } IEM_MC_ELSE() {
9850 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9851 } IEM_MC_ENDIF();
9852 IEM_MC_ADVANCE_RIP_AND_FINISH();
9853
9854 IEM_MC_END();
9855}
9856
9857
9858/** Opcode 0xdb 0xe0. */
9859FNIEMOP_DEF(iemOp_fneni)
9860{
9861 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
9862 IEM_MC_BEGIN(0,0);
9863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9864 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9865 IEM_MC_ADVANCE_RIP_AND_FINISH();
9866 IEM_MC_END();
9867}
9868
9869
9870/** Opcode 0xdb 0xe1. */
9871FNIEMOP_DEF(iemOp_fndisi)
9872{
9873 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
9874 IEM_MC_BEGIN(0,0);
9875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9876 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9877 IEM_MC_ADVANCE_RIP_AND_FINISH();
9878 IEM_MC_END();
9879}
9880
9881
9882/** Opcode 0xdb 0xe2. */
9883FNIEMOP_DEF(iemOp_fnclex)
9884{
9885 IEMOP_MNEMONIC(fnclex, "fnclex");
9886 IEM_MC_BEGIN(0,0);
9887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9888 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9889 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9890 IEM_MC_CLEAR_FSW_EX();
9891 IEM_MC_ADVANCE_RIP_AND_FINISH();
9892 IEM_MC_END();
9893}
9894
9895
9896/** Opcode 0xdb 0xe3. */
9897FNIEMOP_DEF(iemOp_fninit)
9898{
9899 IEMOP_MNEMONIC(fninit, "fninit");
9900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9901 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_FPU, iemCImpl_finit, false /*fCheckXcpts*/);
9902}
9903
9904
9905/** Opcode 0xdb 0xe4. */
9906FNIEMOP_DEF(iemOp_fnsetpm)
9907{
9908 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
9909 IEM_MC_BEGIN(0,0);
9910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9911 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9912 IEM_MC_ADVANCE_RIP_AND_FINISH();
9913 IEM_MC_END();
9914}
9915
9916
9917/** Opcode 0xdb 0xe5. */
9918FNIEMOP_DEF(iemOp_frstpm)
9919{
9920 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
9921#if 0 /* #UDs on newer CPUs */
9922 IEM_MC_BEGIN(0,0);
9923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9924 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9925 IEM_MC_ADVANCE_RIP_AND_FINISH();
9926 IEM_MC_END();
9927 return VINF_SUCCESS;
9928#else
9929 IEMOP_RAISE_INVALID_OPCODE_RET();
9930#endif
9931}
9932
9933
9934/** Opcode 0xdb 11/5. */
9935FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
9936{
9937 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
9938 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
9939 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fucomi_r80_by_r80,
9940 0 /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
9941}
9942
9943
9944/** Opcode 0xdb 11/6. */
9945FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
9946{
9947 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
9948 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
9949 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80,
9950 false /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
9951}
9952
9953
9954/**
9955 * @opcode 0xdb
9956 */
9957FNIEMOP_DEF(iemOp_EscF3)
9958{
9959 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9960 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
9961 if (IEM_IS_MODRM_REG_MODE(bRm))
9962 {
9963 switch (IEM_GET_MODRM_REG_8(bRm))
9964 {
9965 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
9966 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
9967 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
9968 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
9969 case 4:
9970 switch (bRm)
9971 {
9972 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
9973 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
9974 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
9975 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
9976 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
9977 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
9978 case 0xe6: IEMOP_RAISE_INVALID_OPCODE_RET();
9979 case 0xe7: IEMOP_RAISE_INVALID_OPCODE_RET();
9980 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9981 }
9982 break;
9983 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
9984 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
9985 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
9986 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9987 }
9988 }
9989 else
9990 {
9991 switch (IEM_GET_MODRM_REG_8(bRm))
9992 {
9993 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
9994 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
9995 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
9996 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
9997 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
9998 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
9999 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10000 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
10001 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10002 }
10003 }
10004}
10005
10006
10007/**
10008 * Common worker for FPU instructions working on STn and ST0, and storing the
10009 * result in STn unless IE, DE or ZE was raised.
10010 *
10011 * @param bRm Mod R/M byte.
10012 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10013 */
10014FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
10015{
10016 IEM_MC_BEGIN(3, 1);
10017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10018 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10019 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10020 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10021 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10022
10023 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10024 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10025
10026 IEM_MC_PREPARE_FPU_USAGE();
10027 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
10028 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
10029 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10030 } IEM_MC_ELSE() {
10031 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10032 } IEM_MC_ENDIF();
10033 IEM_MC_ADVANCE_RIP_AND_FINISH();
10034
10035 IEM_MC_END();
10036}
10037
10038
10039/** Opcode 0xdc 11/0. */
10040FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
10041{
10042 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
10043 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
10044}
10045
10046
10047/** Opcode 0xdc 11/1. */
10048FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
10049{
10050 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
10051 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
10052}
10053
10054
10055/** Opcode 0xdc 11/4. */
10056FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
10057{
10058 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
10059 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
10060}
10061
10062
10063/** Opcode 0xdc 11/5. */
10064FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
10065{
10066 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
10067 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
10068}
10069
10070
10071/** Opcode 0xdc 11/6. */
10072FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
10073{
10074 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
10075 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
10076}
10077
10078
10079/** Opcode 0xdc 11/7. */
10080FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
10081{
10082 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
10083 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
10084}
10085
10086
10087/**
10088 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
10089 * memory operand, and storing the result in ST0.
10090 *
10091 * @param bRm Mod R/M byte.
10092 * @param pfnImpl Pointer to the instruction implementation (assembly).
10093 */
10094FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
10095{
10096 IEM_MC_BEGIN(3, 3);
10097 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10098 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10099 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
10100 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10101 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
10102 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
10103
10104 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10105 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10106 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10107 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10108
10109 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10110 IEM_MC_PREPARE_FPU_USAGE();
10111 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0) {
10112 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
10113 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10114 } IEM_MC_ELSE() {
10115 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10116 } IEM_MC_ENDIF();
10117 IEM_MC_ADVANCE_RIP_AND_FINISH();
10118
10119 IEM_MC_END();
10120}
10121
10122
10123/** Opcode 0xdc !11/0. */
10124FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
10125{
10126 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
10127 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
10128}
10129
10130
10131/** Opcode 0xdc !11/1. */
10132FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
10133{
10134 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
10135 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
10136}
10137
10138
10139/** Opcode 0xdc !11/2. */
10140FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
10141{
10142 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
10143
10144 IEM_MC_BEGIN(3, 3);
10145 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10146 IEM_MC_LOCAL(uint16_t, u16Fsw);
10147 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
10148 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10149 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10150 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
10151
10152 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10154
10155 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10156 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10157 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10158
10159 IEM_MC_PREPARE_FPU_USAGE();
10160 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10161 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
10162 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10163 } IEM_MC_ELSE() {
10164 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10165 } IEM_MC_ENDIF();
10166 IEM_MC_ADVANCE_RIP_AND_FINISH();
10167
10168 IEM_MC_END();
10169}
10170
10171
10172/** Opcode 0xdc !11/3. */
10173FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
10174{
10175 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
10176
10177 IEM_MC_BEGIN(3, 3);
10178 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10179 IEM_MC_LOCAL(uint16_t, u16Fsw);
10180 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
10181 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10182 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10183 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
10184
10185 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10187
10188 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10189 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10190 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10191
10192 IEM_MC_PREPARE_FPU_USAGE();
10193 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10194 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
10195 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10196 } IEM_MC_ELSE() {
10197 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10198 } IEM_MC_ENDIF();
10199 IEM_MC_ADVANCE_RIP_AND_FINISH();
10200
10201 IEM_MC_END();
10202}
10203
10204
10205/** Opcode 0xdc !11/4. */
10206FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
10207{
10208 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
10209 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
10210}
10211
10212
10213/** Opcode 0xdc !11/5. */
10214FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
10215{
10216 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
10217 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
10218}
10219
10220
10221/** Opcode 0xdc !11/6. */
10222FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
10223{
10224 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
10225 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
10226}
10227
10228
10229/** Opcode 0xdc !11/7. */
10230FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
10231{
10232 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
10233 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
10234}
10235
10236
10237/**
10238 * @opcode 0xdc
10239 */
10240FNIEMOP_DEF(iemOp_EscF4)
10241{
10242 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10243 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
10244 if (IEM_IS_MODRM_REG_MODE(bRm))
10245 {
10246 switch (IEM_GET_MODRM_REG_8(bRm))
10247 {
10248 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
10249 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
10250 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
10251 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
10252 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
10253 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
10254 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
10255 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
10256 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10257 }
10258 }
10259 else
10260 {
10261 switch (IEM_GET_MODRM_REG_8(bRm))
10262 {
10263 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
10264 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
10265 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
10266 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
10267 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
10268 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
10269 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
10270 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
10271 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10272 }
10273 }
10274}
10275
10276
10277/** Opcode 0xdd !11/0.
10278 * @sa iemOp_fld_m32r */
10279FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
10280{
10281 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
10282
10283 IEM_MC_BEGIN(2, 3);
10284 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10285 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10286 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
10287 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10288 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
10289
10290 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10292 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10293 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10294
10295 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10296 IEM_MC_PREPARE_FPU_USAGE();
10297 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10298 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r64, pFpuRes, pr64Val);
10299 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10300 } IEM_MC_ELSE() {
10301 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10302 } IEM_MC_ENDIF();
10303 IEM_MC_ADVANCE_RIP_AND_FINISH();
10304
10305 IEM_MC_END();
10306}
10307
10308
10309/** Opcode 0xdd !11/0. */
10310FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
10311{
10312 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
10313 IEM_MC_BEGIN(3, 2);
10314 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10315 IEM_MC_LOCAL(uint16_t, u16Fsw);
10316 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10317 IEM_MC_ARG(int64_t *, pi64Dst, 1);
10318 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10319
10320 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10322 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10323 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10324
10325 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10326 IEM_MC_PREPARE_FPU_USAGE();
10327 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10328 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
10329 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10330 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10331 } IEM_MC_ELSE() {
10332 IEM_MC_IF_FCW_IM() {
10333 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
10334 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
10335 } IEM_MC_ENDIF();
10336 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10337 } IEM_MC_ENDIF();
10338 IEM_MC_ADVANCE_RIP_AND_FINISH();
10339
10340 IEM_MC_END();
10341}
10342
10343
10344/** Opcode 0xdd !11/0. */
10345FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
10346{
10347 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
10348 IEM_MC_BEGIN(3, 2);
10349 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10350 IEM_MC_LOCAL(uint16_t, u16Fsw);
10351 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10352 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
10353 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10354
10355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10357 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10358 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10359
10360 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10361 IEM_MC_PREPARE_FPU_USAGE();
10362 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10363 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
10364 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10365 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10366 } IEM_MC_ELSE() {
10367 IEM_MC_IF_FCW_IM() {
10368 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
10369 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
10370 } IEM_MC_ENDIF();
10371 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10372 } IEM_MC_ENDIF();
10373 IEM_MC_ADVANCE_RIP_AND_FINISH();
10374
10375 IEM_MC_END();
10376}
10377
10378
10379
10380
10381/** Opcode 0xdd !11/0. */
10382FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
10383{
10384 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
10385 IEM_MC_BEGIN(3, 2);
10386 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10387 IEM_MC_LOCAL(uint16_t, u16Fsw);
10388 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10389 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
10390 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10391
10392 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10394 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10395 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10396
10397 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10398 IEM_MC_PREPARE_FPU_USAGE();
10399 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10400 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
10401 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10402 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10403 } IEM_MC_ELSE() {
10404 IEM_MC_IF_FCW_IM() {
10405 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
10406 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
10407 } IEM_MC_ENDIF();
10408 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10409 } IEM_MC_ENDIF();
10410 IEM_MC_ADVANCE_RIP_AND_FINISH();
10411
10412 IEM_MC_END();
10413}
10414
10415
10416/** Opcode 0xdd !11/0. */
10417FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
10418{
10419 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
10420 IEM_MC_BEGIN(3, 0);
10421 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
10422 IEM_MC_ARG(uint8_t, iEffSeg, 1);
10423 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
10424 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10426 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10427 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10428 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
10429 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
10430 IEM_MC_END();
10431}
10432
10433
10434/** Opcode 0xdd !11/0. */
10435FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
10436{
10437 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
10438 IEM_MC_BEGIN(3, 0);
10439 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
10440 IEM_MC_ARG(uint8_t, iEffSeg, 1);
10441 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
10442 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10444 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10445 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); /* Note! Implicit fninit after the save, do not use FOR_READ here! */
10446 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
10447 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
10448 IEM_MC_END();
10449}
10450
10451/** Opcode 0xdd !11/0. */
10452FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
10453{
10454 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
10455
10456 IEM_MC_BEGIN(0, 2);
10457 IEM_MC_LOCAL(uint16_t, u16Tmp);
10458 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10459
10460 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10462 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10463
10464 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
10465 IEM_MC_FETCH_FSW(u16Tmp);
10466 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
10467 IEM_MC_ADVANCE_RIP_AND_FINISH();
10468
10469/** @todo Debug / drop a hint to the verifier that things may differ
10470 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
10471 * NT4SP1. (X86_FSW_PE) */
10472 IEM_MC_END();
10473}
10474
10475
10476/** Opcode 0xdd 11/0. */
10477FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
10478{
10479 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
10480 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
10481 unmodified. */
10482 IEM_MC_BEGIN(0, 0);
10483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10484
10485 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10486 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10487
10488 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10489 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
10490 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10491
10492 IEM_MC_ADVANCE_RIP_AND_FINISH();
10493 IEM_MC_END();
10494}
10495
10496
10497/** Opcode 0xdd 11/1. */
10498FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
10499{
10500 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
10501 IEM_MC_BEGIN(0, 2);
10502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10503 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
10504 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10505 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10506 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10507
10508 IEM_MC_PREPARE_FPU_USAGE();
10509 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10510 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
10511 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10512 } IEM_MC_ELSE() {
10513 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10514 } IEM_MC_ENDIF();
10515
10516 IEM_MC_ADVANCE_RIP_AND_FINISH();
10517 IEM_MC_END();
10518}
10519
10520
10521/** Opcode 0xdd 11/3. */
10522FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
10523{
10524 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
10525 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
10526}
10527
10528
10529/** Opcode 0xdd 11/4. */
10530FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
10531{
10532 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
10533 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
10534}
10535
10536
10537/**
10538 * @opcode 0xdd
10539 */
10540FNIEMOP_DEF(iemOp_EscF5)
10541{
10542 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10543 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
10544 if (IEM_IS_MODRM_REG_MODE(bRm))
10545 {
10546 switch (IEM_GET_MODRM_REG_8(bRm))
10547 {
10548 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
10549 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
10550 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
10551 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
10552 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
10553 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
10554 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10555 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
10556 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10557 }
10558 }
10559 else
10560 {
10561 switch (IEM_GET_MODRM_REG_8(bRm))
10562 {
10563 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
10564 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
10565 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
10566 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
10567 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
10568 case 5: IEMOP_RAISE_INVALID_OPCODE_RET();
10569 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
10570 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
10571 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10572 }
10573 }
10574}
10575
10576
10577/** Opcode 0xde 11/0. */
10578FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
10579{
10580 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
10581 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
10582}
10583
10584
10585/** Opcode 0xde 11/0. */
10586FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
10587{
10588 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
10589 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
10590}
10591
10592
10593/** Opcode 0xde 0xd9. */
10594FNIEMOP_DEF(iemOp_fcompp)
10595{
10596 IEMOP_MNEMONIC(fcompp, "fcompp");
10597 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fcom_r80_by_r80);
10598}
10599
10600
10601/** Opcode 0xde 11/4. */
10602FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
10603{
10604 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
10605 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
10606}
10607
10608
10609/** Opcode 0xde 11/5. */
10610FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
10611{
10612 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
10613 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
10614}
10615
10616
10617/** Opcode 0xde 11/6. */
10618FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
10619{
10620 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
10621 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
10622}
10623
10624
10625/** Opcode 0xde 11/7. */
10626FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
10627{
10628 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
10629 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
10630}
10631
10632
10633/**
10634 * Common worker for FPU instructions working on ST0 and an m16i, and storing
10635 * the result in ST0.
10636 *
10637 * @param bRm Mod R/M byte.
10638 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10639 */
10640FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
10641{
10642 IEM_MC_BEGIN(3, 3);
10643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10644 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10645 IEM_MC_LOCAL(int16_t, i16Val2);
10646 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10647 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10648 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
10649
10650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10652
10653 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10654 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10655 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10656
10657 IEM_MC_PREPARE_FPU_USAGE();
10658 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10659 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
10660 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10661 } IEM_MC_ELSE() {
10662 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10663 } IEM_MC_ENDIF();
10664 IEM_MC_ADVANCE_RIP_AND_FINISH();
10665
10666 IEM_MC_END();
10667}
10668
10669
10670/** Opcode 0xde !11/0. */
10671FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
10672{
10673 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
10674 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
10675}
10676
10677
10678/** Opcode 0xde !11/1. */
10679FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
10680{
10681 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
10682 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
10683}
10684
10685
10686/** Opcode 0xde !11/2. */
10687FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
10688{
10689 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
10690
10691 IEM_MC_BEGIN(3, 3);
10692 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10693 IEM_MC_LOCAL(uint16_t, u16Fsw);
10694 IEM_MC_LOCAL(int16_t, i16Val2);
10695 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10696 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10697 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
10698
10699 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10700 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10701
10702 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10703 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10704 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10705
10706 IEM_MC_PREPARE_FPU_USAGE();
10707 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10708 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
10709 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10710 } IEM_MC_ELSE() {
10711 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10712 } IEM_MC_ENDIF();
10713 IEM_MC_ADVANCE_RIP_AND_FINISH();
10714
10715 IEM_MC_END();
10716}
10717
10718
10719/** Opcode 0xde !11/3. */
10720FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
10721{
10722 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
10723
10724 IEM_MC_BEGIN(3, 3);
10725 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10726 IEM_MC_LOCAL(uint16_t, u16Fsw);
10727 IEM_MC_LOCAL(int16_t, i16Val2);
10728 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10729 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10730 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
10731
10732 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10734
10735 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10736 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10737 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10738
10739 IEM_MC_PREPARE_FPU_USAGE();
10740 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10741 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
10742 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10743 } IEM_MC_ELSE() {
10744 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10745 } IEM_MC_ENDIF();
10746 IEM_MC_ADVANCE_RIP_AND_FINISH();
10747
10748 IEM_MC_END();
10749}
10750
10751
10752/** Opcode 0xde !11/4. */
10753FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
10754{
10755 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
10756 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
10757}
10758
10759
10760/** Opcode 0xde !11/5. */
10761FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
10762{
10763 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
10764 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
10765}
10766
10767
10768/** Opcode 0xde !11/6. */
10769FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
10770{
10771 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
10772 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
10773}
10774
10775
10776/** Opcode 0xde !11/7. */
10777FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
10778{
10779 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
10780 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
10781}
10782
10783
10784/**
10785 * @opcode 0xde
10786 */
10787FNIEMOP_DEF(iemOp_EscF6)
10788{
10789 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10790 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
10791 if (IEM_IS_MODRM_REG_MODE(bRm))
10792 {
10793 switch (IEM_GET_MODRM_REG_8(bRm))
10794 {
10795 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
10796 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
10797 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
10798 case 3: if (bRm == 0xd9)
10799 return FNIEMOP_CALL(iemOp_fcompp);
10800 IEMOP_RAISE_INVALID_OPCODE_RET();
10801 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
10802 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
10803 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
10804 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
10805 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10806 }
10807 }
10808 else
10809 {
10810 switch (IEM_GET_MODRM_REG_8(bRm))
10811 {
10812 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
10813 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
10814 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
10815 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
10816 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
10817 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
10818 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
10819 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
10820 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10821 }
10822 }
10823}
10824
10825
10826/** Opcode 0xdf 11/0.
10827 * Undocument instruction, assumed to work like ffree + fincstp. */
10828FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
10829{
10830 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
10831 IEM_MC_BEGIN(0, 0);
10832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10833
10834 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10835 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10836
10837 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10838 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
10839 IEM_MC_FPU_STACK_INC_TOP();
10840 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10841
10842 IEM_MC_ADVANCE_RIP_AND_FINISH();
10843 IEM_MC_END();
10844}
10845
10846
10847/** Opcode 0xdf 0xe0. */
10848FNIEMOP_DEF(iemOp_fnstsw_ax)
10849{
10850 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
10851 IEM_MC_BEGIN(0, 1);
10852 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10853 IEM_MC_LOCAL(uint16_t, u16Tmp);
10854 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10855 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
10856 IEM_MC_FETCH_FSW(u16Tmp);
10857 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
10858 IEM_MC_ADVANCE_RIP_AND_FINISH();
10859 IEM_MC_END();
10860}
10861
10862
10863/** Opcode 0xdf 11/5. */
10864FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
10865{
10866 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
10867 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
10868 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80,
10869 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
10870}
10871
10872
10873/** Opcode 0xdf 11/6. */
10874FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
10875{
10876 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
10877 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
10878 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80,
10879 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
10880}
10881
10882
10883/** Opcode 0xdf !11/0. */
10884FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
10885{
10886 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
10887
10888 IEM_MC_BEGIN(2, 3);
10889 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10890 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10891 IEM_MC_LOCAL(int16_t, i16Val);
10892 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10893 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
10894
10895 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10897
10898 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10899 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10900 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10901
10902 IEM_MC_PREPARE_FPU_USAGE();
10903 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10904 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i16, pFpuRes, pi16Val);
10905 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10906 } IEM_MC_ELSE() {
10907 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10908 } IEM_MC_ENDIF();
10909 IEM_MC_ADVANCE_RIP_AND_FINISH();
10910
10911 IEM_MC_END();
10912}
10913
10914
10915/** Opcode 0xdf !11/1. */
10916FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
10917{
10918 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
10919 IEM_MC_BEGIN(3, 2);
10920 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10921 IEM_MC_LOCAL(uint16_t, u16Fsw);
10922 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10923 IEM_MC_ARG(int16_t *, pi16Dst, 1);
10924 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10925
10926 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10928 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10929 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10930
10931 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10932 IEM_MC_PREPARE_FPU_USAGE();
10933 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10934 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
10935 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
10936 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10937 } IEM_MC_ELSE() {
10938 IEM_MC_IF_FCW_IM() {
10939 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
10940 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
10941 } IEM_MC_ENDIF();
10942 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10943 } IEM_MC_ENDIF();
10944 IEM_MC_ADVANCE_RIP_AND_FINISH();
10945
10946 IEM_MC_END();
10947}
10948
10949
10950/** Opcode 0xdf !11/2. */
10951FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
10952{
10953 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
10954 IEM_MC_BEGIN(3, 2);
10955 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10956 IEM_MC_LOCAL(uint16_t, u16Fsw);
10957 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10958 IEM_MC_ARG(int16_t *, pi16Dst, 1);
10959 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10960
10961 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10963 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10964 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10965
10966 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10967 IEM_MC_PREPARE_FPU_USAGE();
10968 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10969 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
10970 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
10971 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10972 } IEM_MC_ELSE() {
10973 IEM_MC_IF_FCW_IM() {
10974 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
10975 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
10976 } IEM_MC_ENDIF();
10977 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10978 } IEM_MC_ENDIF();
10979 IEM_MC_ADVANCE_RIP_AND_FINISH();
10980
10981 IEM_MC_END();
10982}
10983
10984
10985/** Opcode 0xdf !11/3. */
10986FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
10987{
10988 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
10989 IEM_MC_BEGIN(3, 2);
10990 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10991 IEM_MC_LOCAL(uint16_t, u16Fsw);
10992 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10993 IEM_MC_ARG(int16_t *, pi16Dst, 1);
10994 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10995
10996 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10998 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10999 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11000
11001 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11002 IEM_MC_PREPARE_FPU_USAGE();
11003 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11004 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
11005 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
11006 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11007 } IEM_MC_ELSE() {
11008 IEM_MC_IF_FCW_IM() {
11009 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
11010 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
11011 } IEM_MC_ENDIF();
11012 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11013 } IEM_MC_ENDIF();
11014 IEM_MC_ADVANCE_RIP_AND_FINISH();
11015
11016 IEM_MC_END();
11017}
11018
11019
11020/** Opcode 0xdf !11/4. */
11021FNIEMOP_DEF_1(iemOp_fbld_m80d, uint8_t, bRm)
11022{
11023 IEMOP_MNEMONIC(fbld_m80d, "fbld m80d");
11024
11025 IEM_MC_BEGIN(2, 3);
11026 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11027 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11028 IEM_MC_LOCAL(RTPBCD80U, d80Val);
11029 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11030 IEM_MC_ARG_LOCAL_REF(PCRTPBCD80U, pd80Val, d80Val, 1);
11031
11032 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11034
11035 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11036 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11037 IEM_MC_FETCH_MEM_D80(d80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11038
11039 IEM_MC_PREPARE_FPU_USAGE();
11040 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11041 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_d80, pFpuRes, pd80Val);
11042 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11043 } IEM_MC_ELSE() {
11044 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11045 } IEM_MC_ENDIF();
11046 IEM_MC_ADVANCE_RIP_AND_FINISH();
11047
11048 IEM_MC_END();
11049}
11050
11051
11052/** Opcode 0xdf !11/5. */
11053FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
11054{
11055 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
11056
11057 IEM_MC_BEGIN(2, 3);
11058 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11059 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11060 IEM_MC_LOCAL(int64_t, i64Val);
11061 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11062 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
11063
11064 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11066
11067 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11068 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11069 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11070
11071 IEM_MC_PREPARE_FPU_USAGE();
11072 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11073 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i64, pFpuRes, pi64Val);
11074 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11075 } IEM_MC_ELSE() {
11076 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11077 } IEM_MC_ENDIF();
11078 IEM_MC_ADVANCE_RIP_AND_FINISH();
11079
11080 IEM_MC_END();
11081}
11082
11083
11084/** Opcode 0xdf !11/6. */
11085FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
11086{
11087 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
11088 IEM_MC_BEGIN(3, 2);
11089 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11090 IEM_MC_LOCAL(uint16_t, u16Fsw);
11091 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11092 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
11093 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11094
11095 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11097 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11098 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11099
11100 IEM_MC_MEM_MAP_EX(pd80Dst, IEM_ACCESS_DATA_W, sizeof(*pd80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
11101 IEM_MC_PREPARE_FPU_USAGE();
11102 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11103 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
11104 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pd80Dst, IEM_ACCESS_DATA_W, u16Fsw);
11105 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11106 } IEM_MC_ELSE() {
11107 IEM_MC_IF_FCW_IM() {
11108 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
11109 IEM_MC_MEM_COMMIT_AND_UNMAP(pd80Dst, IEM_ACCESS_DATA_W);
11110 } IEM_MC_ENDIF();
11111 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11112 } IEM_MC_ENDIF();
11113 IEM_MC_ADVANCE_RIP_AND_FINISH();
11114
11115 IEM_MC_END();
11116}
11117
11118
11119/** Opcode 0xdf !11/7. */
11120FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
11121{
11122 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
11123 IEM_MC_BEGIN(3, 2);
11124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11125 IEM_MC_LOCAL(uint16_t, u16Fsw);
11126 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11127 IEM_MC_ARG(int64_t *, pi64Dst, 1);
11128 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11129
11130 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11131 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11132 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11133 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11134
11135 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11136 IEM_MC_PREPARE_FPU_USAGE();
11137 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11138 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
11139 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11140 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11141 } IEM_MC_ELSE() {
11142 IEM_MC_IF_FCW_IM() {
11143 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
11144 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
11145 } IEM_MC_ENDIF();
11146 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11147 } IEM_MC_ENDIF();
11148 IEM_MC_ADVANCE_RIP_AND_FINISH();
11149
11150 IEM_MC_END();
11151}
11152
11153
11154/**
11155 * @opcode 0xdf
11156 */
11157FNIEMOP_DEF(iemOp_EscF7)
11158{
11159 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11160 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdf & 0x7);
11161 if (IEM_IS_MODRM_REG_MODE(bRm))
11162 {
11163 switch (IEM_GET_MODRM_REG_8(bRm))
11164 {
11165 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
11166 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
11167 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
11168 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
11169 case 4: if (bRm == 0xe0)
11170 return FNIEMOP_CALL(iemOp_fnstsw_ax);
11171 IEMOP_RAISE_INVALID_OPCODE_RET();
11172 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
11173 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
11174 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11175 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11176 }
11177 }
11178 else
11179 {
11180 switch (IEM_GET_MODRM_REG_8(bRm))
11181 {
11182 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
11183 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
11184 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
11185 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
11186 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
11187 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
11188 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
11189 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
11190 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11191 }
11192 }
11193}
11194
11195
11196/**
11197 * @opcode 0xe0
11198 */
11199FNIEMOP_DEF(iemOp_loopne_Jb)
11200{
11201 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
11202 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11203 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11204
11205 switch (pVCpu->iem.s.enmEffAddrMode)
11206 {
11207 case IEMMODE_16BIT:
11208 IEM_MC_BEGIN(0,0);
11209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11210 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11211 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11212 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11213 } IEM_MC_ELSE() {
11214 IEM_MC_ADVANCE_RIP_AND_FINISH();
11215 } IEM_MC_ENDIF();
11216 IEM_MC_END();
11217 break;
11218
11219 case IEMMODE_32BIT:
11220 IEM_MC_BEGIN(0,0);
11221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11222 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11223 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11224 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11225 } IEM_MC_ELSE() {
11226 IEM_MC_ADVANCE_RIP_AND_FINISH();
11227 } IEM_MC_ENDIF();
11228 IEM_MC_END();
11229 break;
11230
11231 case IEMMODE_64BIT:
11232 IEM_MC_BEGIN(0,0);
11233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11234 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
11235 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11236 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11237 } IEM_MC_ELSE() {
11238 IEM_MC_ADVANCE_RIP_AND_FINISH();
11239 } IEM_MC_ENDIF();
11240 IEM_MC_END();
11241 break;
11242
11243 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11244 }
11245}
11246
11247
11248/**
11249 * @opcode 0xe1
11250 */
11251FNIEMOP_DEF(iemOp_loope_Jb)
11252{
11253 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
11254 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11255 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11256
11257 switch (pVCpu->iem.s.enmEffAddrMode)
11258 {
11259 case IEMMODE_16BIT:
11260 IEM_MC_BEGIN(0,0);
11261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11262 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11263 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11264 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11265 } IEM_MC_ELSE() {
11266 IEM_MC_ADVANCE_RIP_AND_FINISH();
11267 } IEM_MC_ENDIF();
11268 IEM_MC_END();
11269 break;
11270
11271 case IEMMODE_32BIT:
11272 IEM_MC_BEGIN(0,0);
11273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11274 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11275 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11276 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11277 } IEM_MC_ELSE() {
11278 IEM_MC_ADVANCE_RIP_AND_FINISH();
11279 } IEM_MC_ENDIF();
11280 IEM_MC_END();
11281 break;
11282
11283 case IEMMODE_64BIT:
11284 IEM_MC_BEGIN(0,0);
11285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11286 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
11287 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11288 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11289 } IEM_MC_ELSE() {
11290 IEM_MC_ADVANCE_RIP_AND_FINISH();
11291 } IEM_MC_ENDIF();
11292 IEM_MC_END();
11293 break;
11294
11295 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11296 }
11297}
11298
11299
11300/**
11301 * @opcode 0xe2
11302 */
11303FNIEMOP_DEF(iemOp_loop_Jb)
11304{
11305 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
11306 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11307 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11308
11309 /** @todo Check out the \#GP case if EIP < CS.Base or EIP > CS.Limit when
11310 * using the 32-bit operand size override. How can that be restarted? See
11311 * weird pseudo code in intel manual. */
11312
11313 /* NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
11314 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
11315 * the loop causes guest crashes, but when logging it's nice to skip a few million
11316 * lines of useless output. */
11317#if defined(LOG_ENABLED)
11318 if ((LogIs3Enabled() || LogIs4Enabled()) && -(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm)
11319 switch (pVCpu->iem.s.enmEffAddrMode)
11320 {
11321 case IEMMODE_16BIT:
11322 IEM_MC_BEGIN(0,0);
11323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11324 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
11325 IEM_MC_ADVANCE_RIP_AND_FINISH();
11326 IEM_MC_END();
11327 break;
11328
11329 case IEMMODE_32BIT:
11330 IEM_MC_BEGIN(0,0);
11331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11332 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
11333 IEM_MC_ADVANCE_RIP_AND_FINISH();
11334 IEM_MC_END();
11335 break;
11336
11337 case IEMMODE_64BIT:
11338 IEM_MC_BEGIN(0,0);
11339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11340 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
11341 IEM_MC_ADVANCE_RIP_AND_FINISH();
11342 IEM_MC_END();
11343 break;
11344
11345 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11346 }
11347#endif
11348
11349 switch (pVCpu->iem.s.enmEffAddrMode)
11350 {
11351 case IEMMODE_16BIT:
11352 IEM_MC_BEGIN(0,0);
11353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11354 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11355 IEM_MC_IF_CX_IS_NZ() {
11356 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11357 } IEM_MC_ELSE() {
11358 IEM_MC_ADVANCE_RIP_AND_FINISH();
11359 } IEM_MC_ENDIF();
11360 IEM_MC_END();
11361 break;
11362
11363 case IEMMODE_32BIT:
11364 IEM_MC_BEGIN(0,0);
11365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11366 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11367 IEM_MC_IF_ECX_IS_NZ() {
11368 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11369 } IEM_MC_ELSE() {
11370 IEM_MC_ADVANCE_RIP_AND_FINISH();
11371 } IEM_MC_ENDIF();
11372 IEM_MC_END();
11373 break;
11374
11375 case IEMMODE_64BIT:
11376 IEM_MC_BEGIN(0,0);
11377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11378 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
11379 IEM_MC_IF_RCX_IS_NZ() {
11380 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11381 } IEM_MC_ELSE() {
11382 IEM_MC_ADVANCE_RIP_AND_FINISH();
11383 } IEM_MC_ENDIF();
11384 IEM_MC_END();
11385 break;
11386
11387 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11388 }
11389}
11390
11391
11392/**
11393 * @opcode 0xe3
11394 */
11395FNIEMOP_DEF(iemOp_jecxz_Jb)
11396{
11397 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
11398 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11399 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11400
11401 switch (pVCpu->iem.s.enmEffAddrMode)
11402 {
11403 case IEMMODE_16BIT:
11404 IEM_MC_BEGIN(0,0);
11405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11406 IEM_MC_IF_CX_IS_NZ() {
11407 IEM_MC_ADVANCE_RIP_AND_FINISH();
11408 } IEM_MC_ELSE() {
11409 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11410 } IEM_MC_ENDIF();
11411 IEM_MC_END();
11412 break;
11413
11414 case IEMMODE_32BIT:
11415 IEM_MC_BEGIN(0,0);
11416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11417 IEM_MC_IF_ECX_IS_NZ() {
11418 IEM_MC_ADVANCE_RIP_AND_FINISH();
11419 } IEM_MC_ELSE() {
11420 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11421 } IEM_MC_ENDIF();
11422 IEM_MC_END();
11423 break;
11424
11425 case IEMMODE_64BIT:
11426 IEM_MC_BEGIN(0,0);
11427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11428 IEM_MC_IF_RCX_IS_NZ() {
11429 IEM_MC_ADVANCE_RIP_AND_FINISH();
11430 } IEM_MC_ELSE() {
11431 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11432 } IEM_MC_ENDIF();
11433 IEM_MC_END();
11434 break;
11435
11436 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11437 }
11438}
11439
11440
11441/** Opcode 0xe4 */
11442FNIEMOP_DEF(iemOp_in_AL_Ib)
11443{
11444 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
11445 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11447 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_in, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
11448}
11449
11450
11451/** Opcode 0xe5 */
11452FNIEMOP_DEF(iemOp_in_eAX_Ib)
11453{
11454 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
11455 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11457 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
11458 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
11459}
11460
11461
11462/** Opcode 0xe6 */
11463FNIEMOP_DEF(iemOp_out_Ib_AL)
11464{
11465 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
11466 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11468 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_out, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
11469}
11470
11471
11472/** Opcode 0xe7 */
11473FNIEMOP_DEF(iemOp_out_Ib_eAX)
11474{
11475 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
11476 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11478 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
11479 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
11480}
11481
11482
11483/**
11484 * @opcode 0xe8
11485 */
11486FNIEMOP_DEF(iemOp_call_Jv)
11487{
11488 IEMOP_MNEMONIC(call_Jv, "call Jv");
11489 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
11490 switch (pVCpu->iem.s.enmEffOpSize)
11491 {
11492 case IEMMODE_16BIT:
11493 {
11494 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
11495 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE, iemCImpl_call_rel_16, (int16_t)u16Imm);
11496 }
11497
11498 case IEMMODE_32BIT:
11499 {
11500 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
11501 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE, iemCImpl_call_rel_32, (int32_t)u32Imm);
11502 }
11503
11504 case IEMMODE_64BIT:
11505 {
11506 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
11507 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE, iemCImpl_call_rel_64, u64Imm);
11508 }
11509
11510 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11511 }
11512}
11513
11514
11515/**
11516 * @opcode 0xe9
11517 */
11518FNIEMOP_DEF(iemOp_jmp_Jv)
11519{
11520 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
11521 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
11522 switch (pVCpu->iem.s.enmEffOpSize)
11523 {
11524 case IEMMODE_16BIT:
11525 {
11526 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
11527 IEM_MC_BEGIN(0, 0);
11528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11529 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
11530 IEM_MC_END();
11531 break;
11532 }
11533
11534 case IEMMODE_64BIT:
11535 case IEMMODE_32BIT:
11536 {
11537 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
11538 IEM_MC_BEGIN(0, 0);
11539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11540 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
11541 IEM_MC_END();
11542 break;
11543 }
11544
11545 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11546 }
11547}
11548
11549
11550/**
11551 * @opcode 0xea
11552 */
11553FNIEMOP_DEF(iemOp_jmp_Ap)
11554{
11555 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
11556 IEMOP_HLP_NO_64BIT();
11557
11558 /* Decode the far pointer address and pass it on to the far call C implementation. */
11559 uint32_t off32Seg;
11560 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
11561 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
11562 else
11563 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
11564 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
11565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11566 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
11567 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
11568 iemCImpl_FarJmp, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
11569}
11570
11571
11572/**
11573 * @opcode 0xeb
11574 */
11575FNIEMOP_DEF(iemOp_jmp_Jb)
11576{
11577 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
11578 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11579 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
11580
11581 IEM_MC_BEGIN(0, 0);
11582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11583 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11584 IEM_MC_END();
11585}
11586
11587
11588/** Opcode 0xec */
11589FNIEMOP_DEF(iemOp_in_AL_DX)
11590{
11591 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
11592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11593 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_in_eAX_DX, 1, pVCpu->iem.s.enmEffAddrMode);
11594}
11595
11596
11597/** Opcode 0xed */
11598FNIEMOP_DEF(iemOp_in_eAX_DX)
11599{
11600 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
11601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11602 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT,
11603 iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
11604 pVCpu->iem.s.enmEffAddrMode);
11605}
11606
11607
11608/** Opcode 0xee */
11609FNIEMOP_DEF(iemOp_out_DX_AL)
11610{
11611 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
11612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11613 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_out_DX_eAX, 1, pVCpu->iem.s.enmEffAddrMode);
11614}
11615
11616
11617/** Opcode 0xef */
11618FNIEMOP_DEF(iemOp_out_DX_eAX)
11619{
11620 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
11621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11622 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT,
11623 iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
11624 pVCpu->iem.s.enmEffAddrMode);
11625}
11626
11627
11628/**
11629 * @opcode 0xf0
11630 */
11631FNIEMOP_DEF(iemOp_lock)
11632{
11633 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
11634 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
11635 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
11636
11637 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
11638 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
11639}
11640
11641
11642/**
11643 * @opcode 0xf1
11644 */
11645FNIEMOP_DEF(iemOp_int1)
11646{
11647 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
11648 /** @todo Does not generate \#UD on 286, or so they say... Was allegedly a
11649 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
11650 * LOADALL memo. Needs some testing. */
11651 IEMOP_HLP_MIN_386();
11652 /** @todo testcase! */
11653 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
11654 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
11655 iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
11656}
11657
11658
11659/**
11660 * @opcode 0xf2
11661 */
11662FNIEMOP_DEF(iemOp_repne)
11663{
11664 /* This overrides any previous REPE prefix. */
11665 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
11666 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
11667 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
11668
11669 /* For the 4 entry opcode tables, REPNZ overrides any previous
11670 REPZ and operand size prefixes. */
11671 pVCpu->iem.s.idxPrefix = 3;
11672
11673 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
11674 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
11675}
11676
11677
11678/**
11679 * @opcode 0xf3
11680 */
11681FNIEMOP_DEF(iemOp_repe)
11682{
11683 /* This overrides any previous REPNE prefix. */
11684 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
11685 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
11686 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
11687
11688 /* For the 4 entry opcode tables, REPNZ overrides any previous
11689 REPNZ and operand size prefixes. */
11690 pVCpu->iem.s.idxPrefix = 2;
11691
11692 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
11693 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
11694}
11695
11696
11697/**
11698 * @opcode 0xf4
11699 */
11700FNIEMOP_DEF(iemOp_hlt)
11701{
11702 IEMOP_MNEMONIC(hlt, "hlt");
11703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11704 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, iemCImpl_hlt);
11705}
11706
11707
11708/**
11709 * @opcode 0xf5
11710 */
11711FNIEMOP_DEF(iemOp_cmc)
11712{
11713 IEMOP_MNEMONIC(cmc, "cmc");
11714 IEM_MC_BEGIN(0, 0);
11715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11716 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
11717 IEM_MC_ADVANCE_RIP_AND_FINISH();
11718 IEM_MC_END();
11719}
11720
11721
11722/**
11723 * Body for of 'inc/dec/not/neg Eb'.
11724 */
11725#define IEMOP_BODY_UNARY_Eb(a_bRm, a_fnNormalU8, a_fnLockedU8) \
11726 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
11727 { \
11728 /* register access */ \
11729 IEM_MC_BEGIN(2, 0); \
11730 IEMOP_HLP_DONE_DECODING(); \
11731 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
11732 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
11733 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
11734 IEM_MC_REF_EFLAGS(pEFlags); \
11735 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
11736 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11737 IEM_MC_END(); \
11738 } \
11739 else \
11740 { \
11741 /* memory access. */ \
11742 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
11743 { \
11744 IEM_MC_BEGIN(2, 2); \
11745 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
11746 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11747 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11748 \
11749 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
11750 IEMOP_HLP_DONE_DECODING(); \
11751 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11752 IEM_MC_FETCH_EFLAGS(EFlags); \
11753 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
11754 \
11755 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW); \
11756 IEM_MC_COMMIT_EFLAGS(EFlags); \
11757 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11758 IEM_MC_END(); \
11759 } \
11760 else \
11761 { \
11762 IEM_MC_BEGIN(2, 2); \
11763 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
11764 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11765 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11766 \
11767 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
11768 IEMOP_HLP_DONE_DECODING(); \
11769 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11770 IEM_MC_FETCH_EFLAGS(EFlags); \
11771 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU8, pu8Dst, pEFlags); \
11772 \
11773 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW); \
11774 IEM_MC_COMMIT_EFLAGS(EFlags); \
11775 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11776 IEM_MC_END(); \
11777 } \
11778 } \
11779 (void)0
11780
11781
11782/**
11783 * Body for 'inc/dec/not/neg Ev' (groups 3 and 5).
11784 */
11785#define IEMOP_BODY_UNARY_Ev(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
11786 if (IEM_IS_MODRM_REG_MODE(bRm)) \
11787 { \
11788 /* \
11789 * Register target \
11790 */ \
11791 switch (pVCpu->iem.s.enmEffOpSize) \
11792 { \
11793 case IEMMODE_16BIT: \
11794 IEM_MC_BEGIN(2, 0); \
11795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11796 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
11797 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
11798 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11799 IEM_MC_REF_EFLAGS(pEFlags); \
11800 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
11801 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11802 IEM_MC_END(); \
11803 break; \
11804 \
11805 case IEMMODE_32BIT: \
11806 IEM_MC_BEGIN(2, 0); \
11807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11808 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
11809 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
11810 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11811 IEM_MC_REF_EFLAGS(pEFlags); \
11812 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
11813 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
11814 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11815 IEM_MC_END(); \
11816 break; \
11817 \
11818 case IEMMODE_64BIT: \
11819 IEM_MC_BEGIN(2, 0); \
11820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11821 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
11822 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
11823 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11824 IEM_MC_REF_EFLAGS(pEFlags); \
11825 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
11826 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11827 IEM_MC_END(); \
11828 break; \
11829 \
11830 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11831 } \
11832 } \
11833 else \
11834 { \
11835 /* \
11836 * Memory target. \
11837 */ \
11838 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
11839 { \
11840 switch (pVCpu->iem.s.enmEffOpSize) \
11841 { \
11842 case IEMMODE_16BIT: \
11843 IEM_MC_BEGIN(2, 2); \
11844 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
11845 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11846 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11847 \
11848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11850 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11851 IEM_MC_FETCH_EFLAGS(EFlags); \
11852 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
11853 \
11854 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
11855 IEM_MC_COMMIT_EFLAGS(EFlags); \
11856 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11857 IEM_MC_END(); \
11858 break; \
11859 \
11860 case IEMMODE_32BIT: \
11861 IEM_MC_BEGIN(2, 2); \
11862 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
11863 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11864 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11865 \
11866 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11868 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11869 IEM_MC_FETCH_EFLAGS(EFlags); \
11870 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
11871 \
11872 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); \
11873 IEM_MC_COMMIT_EFLAGS(EFlags); \
11874 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11875 IEM_MC_END(); \
11876 break; \
11877 \
11878 case IEMMODE_64BIT: \
11879 IEM_MC_BEGIN(2, 2); \
11880 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
11881 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11882 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11883 \
11884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11886 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11887 IEM_MC_FETCH_EFLAGS(EFlags); \
11888 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
11889 \
11890 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
11891 IEM_MC_COMMIT_EFLAGS(EFlags); \
11892 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11893 IEM_MC_END(); \
11894 break; \
11895 \
11896 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11897 } \
11898 } \
11899 else \
11900 { \
11901 (void)0
11902
11903#define IEMOP_BODY_UNARY_Ev_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
11904 switch (pVCpu->iem.s.enmEffOpSize) \
11905 { \
11906 case IEMMODE_16BIT: \
11907 IEM_MC_BEGIN(2, 2); \
11908 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
11909 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11910 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11911 \
11912 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11914 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11915 IEM_MC_FETCH_EFLAGS(EFlags); \
11916 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU16, pu16Dst, pEFlags); \
11917 \
11918 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
11919 IEM_MC_COMMIT_EFLAGS(EFlags); \
11920 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11921 IEM_MC_END(); \
11922 break; \
11923 \
11924 case IEMMODE_32BIT: \
11925 IEM_MC_BEGIN(2, 2); \
11926 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
11927 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11928 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11929 \
11930 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11932 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11933 IEM_MC_FETCH_EFLAGS(EFlags); \
11934 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU32, pu32Dst, pEFlags); \
11935 \
11936 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); \
11937 IEM_MC_COMMIT_EFLAGS(EFlags); \
11938 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11939 IEM_MC_END(); \
11940 break; \
11941 \
11942 case IEMMODE_64BIT: \
11943 IEM_MC_BEGIN(2, 2); \
11944 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
11945 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11946 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11947 \
11948 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11950 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11951 IEM_MC_FETCH_EFLAGS(EFlags); \
11952 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU64, pu64Dst, pEFlags); \
11953 \
11954 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
11955 IEM_MC_COMMIT_EFLAGS(EFlags); \
11956 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11957 IEM_MC_END(); \
11958 break; \
11959 \
11960 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11961 } \
11962 } \
11963 } \
11964 (void)0
11965
11966
11967/**
11968 * @opmaps grp3_f6
11969 * @opcode /0
11970 * @todo also /1
11971 */
11972FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
11973{
11974 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
11975 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11976
11977 if (IEM_IS_MODRM_REG_MODE(bRm))
11978 {
11979 /* register access */
11980 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11981 IEM_MC_BEGIN(3, 0);
11982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11983 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11984 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
11985 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11986 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11987 IEM_MC_REF_EFLAGS(pEFlags);
11988 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
11989 IEM_MC_ADVANCE_RIP_AND_FINISH();
11990 IEM_MC_END();
11991 }
11992 else
11993 {
11994 /* memory access. */
11995 IEM_MC_BEGIN(3, 2);
11996 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11997 IEM_MC_ARG(uint8_t, u8Src, 1);
11998 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
11999 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12000
12001 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12002 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12003 IEM_MC_ASSIGN(u8Src, u8Imm);
12004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12005 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12006 IEM_MC_FETCH_EFLAGS(EFlags);
12007 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
12008
12009 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
12010 IEM_MC_COMMIT_EFLAGS(EFlags);
12011 IEM_MC_ADVANCE_RIP_AND_FINISH();
12012 IEM_MC_END();
12013 }
12014}
12015
12016
12017/** Opcode 0xf6 /4, /5, /6 and /7. */
12018FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
12019{
12020 if (IEM_IS_MODRM_REG_MODE(bRm))
12021 {
12022 /* register access */
12023 IEM_MC_BEGIN(3, 1);
12024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12025 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12026 IEM_MC_ARG(uint8_t, u8Value, 1);
12027 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12028 IEM_MC_LOCAL(int32_t, rc);
12029
12030 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12031 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12032 IEM_MC_REF_EFLAGS(pEFlags);
12033 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
12034 IEM_MC_IF_LOCAL_IS_Z(rc) {
12035 IEM_MC_ADVANCE_RIP_AND_FINISH();
12036 } IEM_MC_ELSE() {
12037 IEM_MC_RAISE_DIVIDE_ERROR();
12038 } IEM_MC_ENDIF();
12039
12040 IEM_MC_END();
12041 }
12042 else
12043 {
12044 /* memory access. */
12045 IEM_MC_BEGIN(3, 2);
12046 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12047 IEM_MC_ARG(uint8_t, u8Value, 1);
12048 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12049 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12050 IEM_MC_LOCAL(int32_t, rc);
12051
12052 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12054 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12055 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12056 IEM_MC_REF_EFLAGS(pEFlags);
12057 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
12058 IEM_MC_IF_LOCAL_IS_Z(rc) {
12059 IEM_MC_ADVANCE_RIP_AND_FINISH();
12060 } IEM_MC_ELSE() {
12061 IEM_MC_RAISE_DIVIDE_ERROR();
12062 } IEM_MC_ENDIF();
12063
12064 IEM_MC_END();
12065 }
12066}
12067
12068
12069/** Opcode 0xf7 /4, /5, /6 and /7. */
12070FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
12071{
12072 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12073
12074 if (IEM_IS_MODRM_REG_MODE(bRm))
12075 {
12076 /* register access */
12077 switch (pVCpu->iem.s.enmEffOpSize)
12078 {
12079 case IEMMODE_16BIT:
12080 {
12081 IEM_MC_BEGIN(4, 1);
12082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12083 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12084 IEM_MC_ARG(uint16_t *, pu16DX, 1);
12085 IEM_MC_ARG(uint16_t, u16Value, 2);
12086 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12087 IEM_MC_LOCAL(int32_t, rc);
12088
12089 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12090 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12091 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
12092 IEM_MC_REF_EFLAGS(pEFlags);
12093 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
12094 IEM_MC_IF_LOCAL_IS_Z(rc) {
12095 IEM_MC_ADVANCE_RIP_AND_FINISH();
12096 } IEM_MC_ELSE() {
12097 IEM_MC_RAISE_DIVIDE_ERROR();
12098 } IEM_MC_ENDIF();
12099
12100 IEM_MC_END();
12101 break;
12102 }
12103
12104 case IEMMODE_32BIT:
12105 {
12106 IEM_MC_BEGIN(4, 1);
12107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12108 IEM_MC_ARG(uint32_t *, pu32AX, 0);
12109 IEM_MC_ARG(uint32_t *, pu32DX, 1);
12110 IEM_MC_ARG(uint32_t, u32Value, 2);
12111 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12112 IEM_MC_LOCAL(int32_t, rc);
12113
12114 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12115 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
12116 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
12117 IEM_MC_REF_EFLAGS(pEFlags);
12118 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
12119 IEM_MC_IF_LOCAL_IS_Z(rc) {
12120 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
12121 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
12122 IEM_MC_ADVANCE_RIP_AND_FINISH();
12123 } IEM_MC_ELSE() {
12124 IEM_MC_RAISE_DIVIDE_ERROR();
12125 } IEM_MC_ENDIF();
12126
12127 IEM_MC_END();
12128 break;
12129 }
12130
12131 case IEMMODE_64BIT:
12132 {
12133 IEM_MC_BEGIN(4, 1);
12134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12135 IEM_MC_ARG(uint64_t *, pu64AX, 0);
12136 IEM_MC_ARG(uint64_t *, pu64DX, 1);
12137 IEM_MC_ARG(uint64_t, u64Value, 2);
12138 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12139 IEM_MC_LOCAL(int32_t, rc);
12140
12141 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12142 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
12143 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
12144 IEM_MC_REF_EFLAGS(pEFlags);
12145 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
12146 IEM_MC_IF_LOCAL_IS_Z(rc) {
12147 IEM_MC_ADVANCE_RIP_AND_FINISH();
12148 } IEM_MC_ELSE() {
12149 IEM_MC_RAISE_DIVIDE_ERROR();
12150 } IEM_MC_ENDIF();
12151
12152 IEM_MC_END();
12153 break;
12154 }
12155
12156 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12157 }
12158 }
12159 else
12160 {
12161 /* memory access. */
12162 switch (pVCpu->iem.s.enmEffOpSize)
12163 {
12164 case IEMMODE_16BIT:
12165 {
12166 IEM_MC_BEGIN(4, 2);
12167 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12168 IEM_MC_ARG(uint16_t *, pu16DX, 1);
12169 IEM_MC_ARG(uint16_t, u16Value, 2);
12170 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12171 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12172 IEM_MC_LOCAL(int32_t, rc);
12173
12174 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12176 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12177 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12178 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
12179 IEM_MC_REF_EFLAGS(pEFlags);
12180 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
12181 IEM_MC_IF_LOCAL_IS_Z(rc) {
12182 IEM_MC_ADVANCE_RIP_AND_FINISH();
12183 } IEM_MC_ELSE() {
12184 IEM_MC_RAISE_DIVIDE_ERROR();
12185 } IEM_MC_ENDIF();
12186
12187 IEM_MC_END();
12188 break;
12189 }
12190
12191 case IEMMODE_32BIT:
12192 {
12193 IEM_MC_BEGIN(4, 2);
12194 IEM_MC_ARG(uint32_t *, pu32AX, 0);
12195 IEM_MC_ARG(uint32_t *, pu32DX, 1);
12196 IEM_MC_ARG(uint32_t, u32Value, 2);
12197 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12198 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12199 IEM_MC_LOCAL(int32_t, rc);
12200
12201 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12203 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12204 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
12205 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
12206 IEM_MC_REF_EFLAGS(pEFlags);
12207 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
12208 IEM_MC_IF_LOCAL_IS_Z(rc) {
12209 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
12210 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
12211 IEM_MC_ADVANCE_RIP_AND_FINISH();
12212 } IEM_MC_ELSE() {
12213 IEM_MC_RAISE_DIVIDE_ERROR();
12214 } IEM_MC_ENDIF();
12215
12216 IEM_MC_END();
12217 break;
12218 }
12219
12220 case IEMMODE_64BIT:
12221 {
12222 IEM_MC_BEGIN(4, 2);
12223 IEM_MC_ARG(uint64_t *, pu64AX, 0);
12224 IEM_MC_ARG(uint64_t *, pu64DX, 1);
12225 IEM_MC_ARG(uint64_t, u64Value, 2);
12226 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12227 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12228 IEM_MC_LOCAL(int32_t, rc);
12229
12230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12232 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12233 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
12234 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
12235 IEM_MC_REF_EFLAGS(pEFlags);
12236 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
12237 IEM_MC_IF_LOCAL_IS_Z(rc) {
12238 IEM_MC_ADVANCE_RIP_AND_FINISH();
12239 } IEM_MC_ELSE() {
12240 IEM_MC_RAISE_DIVIDE_ERROR();
12241 } IEM_MC_ENDIF();
12242
12243 IEM_MC_END();
12244 break;
12245 }
12246
12247 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12248 }
12249 }
12250}
12251
12252
12253/**
12254 * @opmaps grp3_f6
12255 * @opcode /2
12256 */
12257FNIEMOP_DEF_1(iemOp_grp3_not_Eb, uint8_t, bRm)
12258{
12259 IEMOP_MNEMONIC(not_Eb, "not Eb");
12260 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_not_u8, iemAImpl_not_u8_locked);
12261}
12262
12263
12264/**
12265 * @opmaps grp3_f6
12266 * @opcode /3
12267 */
12268FNIEMOP_DEF_1(iemOp_grp3_neg_Eb, uint8_t, bRm)
12269{
12270 IEMOP_MNEMONIC(net_Eb, "neg Eb");
12271 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_neg_u8, iemAImpl_neg_u8_locked);
12272}
12273
12274
12275/**
12276 * @opcode 0xf6
12277 */
12278FNIEMOP_DEF(iemOp_Grp3_Eb)
12279{
12280 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12281 switch (IEM_GET_MODRM_REG_8(bRm))
12282 {
12283 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
12284 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
12285 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Eb, bRm);
12286 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Eb, bRm);
12287 case 4:
12288 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
12289 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12290 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
12291 case 5:
12292 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
12293 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12294 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
12295 case 6:
12296 IEMOP_MNEMONIC(div_Eb, "div Eb");
12297 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12298 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
12299 case 7:
12300 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
12301 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12302 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
12303 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12304 }
12305}
12306
12307
12308/** Opcode 0xf7 /0. */
12309FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
12310{
12311 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
12312 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12313
12314 if (IEM_IS_MODRM_REG_MODE(bRm))
12315 {
12316 /* register access */
12317 switch (pVCpu->iem.s.enmEffOpSize)
12318 {
12319 case IEMMODE_16BIT:
12320 {
12321 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12322 IEM_MC_BEGIN(3, 0);
12323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12324 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12325 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
12326 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12327 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12328 IEM_MC_REF_EFLAGS(pEFlags);
12329 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
12330 IEM_MC_ADVANCE_RIP_AND_FINISH();
12331 IEM_MC_END();
12332 break;
12333 }
12334
12335 case IEMMODE_32BIT:
12336 {
12337 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12338 IEM_MC_BEGIN(3, 0);
12339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12340 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12341 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
12342 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12343 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12344 IEM_MC_REF_EFLAGS(pEFlags);
12345 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
12346 /* No clearing the high dword here - test doesn't write back the result. */
12347 IEM_MC_ADVANCE_RIP_AND_FINISH();
12348 IEM_MC_END();
12349 break;
12350 }
12351
12352 case IEMMODE_64BIT:
12353 {
12354 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12355 IEM_MC_BEGIN(3, 0);
12356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12357 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12358 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
12359 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12360 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12361 IEM_MC_REF_EFLAGS(pEFlags);
12362 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
12363 IEM_MC_ADVANCE_RIP_AND_FINISH();
12364 IEM_MC_END();
12365 break;
12366 }
12367
12368 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12369 }
12370 }
12371 else
12372 {
12373 /* memory access. */
12374 switch (pVCpu->iem.s.enmEffOpSize)
12375 {
12376 case IEMMODE_16BIT:
12377 {
12378 IEM_MC_BEGIN(3, 2);
12379 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12380 IEM_MC_ARG(uint16_t, u16Src, 1);
12381 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
12382 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12383
12384 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
12385 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12386 IEM_MC_ASSIGN(u16Src, u16Imm);
12387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12388 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12389 IEM_MC_FETCH_EFLAGS(EFlags);
12390 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
12391
12392 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
12393 IEM_MC_COMMIT_EFLAGS(EFlags);
12394 IEM_MC_ADVANCE_RIP_AND_FINISH();
12395 IEM_MC_END();
12396 break;
12397 }
12398
12399 case IEMMODE_32BIT:
12400 {
12401 IEM_MC_BEGIN(3, 2);
12402 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12403 IEM_MC_ARG(uint32_t, u32Src, 1);
12404 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
12405 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12406
12407 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12408 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12409 IEM_MC_ASSIGN(u32Src, u32Imm);
12410 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12411 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12412 IEM_MC_FETCH_EFLAGS(EFlags);
12413 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
12414
12415 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
12416 IEM_MC_COMMIT_EFLAGS(EFlags);
12417 IEM_MC_ADVANCE_RIP_AND_FINISH();
12418 IEM_MC_END();
12419 break;
12420 }
12421
12422 case IEMMODE_64BIT:
12423 {
12424 IEM_MC_BEGIN(3, 2);
12425 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12426 IEM_MC_ARG(uint64_t, u64Src, 1);
12427 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
12428 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12429
12430 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12431 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12432 IEM_MC_ASSIGN(u64Src, u64Imm);
12433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12434 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12435 IEM_MC_FETCH_EFLAGS(EFlags);
12436 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
12437
12438 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
12439 IEM_MC_COMMIT_EFLAGS(EFlags);
12440 IEM_MC_ADVANCE_RIP_AND_FINISH();
12441 IEM_MC_END();
12442 break;
12443 }
12444
12445 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12446 }
12447 }
12448}
12449
12450
12451/** Opcode 0xf7 /2. */
12452FNIEMOP_DEF_1(iemOp_grp3_not_Ev, uint8_t, bRm)
12453{
12454 IEMOP_MNEMONIC(not_Ev, "not Ev");
12455 IEMOP_BODY_UNARY_Ev( iemAImpl_not_u16, iemAImpl_not_u32, iemAImpl_not_u64);
12456 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_not_u16_locked, iemAImpl_not_u32_locked, iemAImpl_not_u64_locked);
12457}
12458
12459
12460/** Opcode 0xf7 /3. */
12461FNIEMOP_DEF_1(iemOp_grp3_neg_Ev, uint8_t, bRm)
12462{
12463 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
12464 IEMOP_BODY_UNARY_Ev( iemAImpl_neg_u16, iemAImpl_neg_u32, iemAImpl_neg_u64);
12465 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_neg_u16_locked, iemAImpl_neg_u32_locked, iemAImpl_neg_u64_locked);
12466}
12467
12468
12469/**
12470 * @opcode 0xf7
12471 */
12472FNIEMOP_DEF(iemOp_Grp3_Ev)
12473{
12474 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12475 switch (IEM_GET_MODRM_REG_8(bRm))
12476 {
12477 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
12478 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
12479 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Ev, bRm);
12480 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Ev, bRm);
12481 case 4:
12482 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
12483 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12484 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
12485 case 5:
12486 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
12487 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12488 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
12489 case 6:
12490 IEMOP_MNEMONIC(div_Ev, "div Ev");
12491 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12492 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
12493 case 7:
12494 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
12495 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12496 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
12497 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12498 }
12499}
12500
12501
12502/**
12503 * @opcode 0xf8
12504 */
12505FNIEMOP_DEF(iemOp_clc)
12506{
12507 IEMOP_MNEMONIC(clc, "clc");
12508 IEM_MC_BEGIN(0, 0);
12509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12510 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
12511 IEM_MC_ADVANCE_RIP_AND_FINISH();
12512 IEM_MC_END();
12513}
12514
12515
12516/**
12517 * @opcode 0xf9
12518 */
12519FNIEMOP_DEF(iemOp_stc)
12520{
12521 IEMOP_MNEMONIC(stc, "stc");
12522 IEM_MC_BEGIN(0, 0);
12523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12524 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
12525 IEM_MC_ADVANCE_RIP_AND_FINISH();
12526 IEM_MC_END();
12527}
12528
12529
12530/**
12531 * @opcode 0xfa
12532 */
12533FNIEMOP_DEF(iemOp_cli)
12534{
12535 IEMOP_MNEMONIC(cli, "cli");
12536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12537 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, iemCImpl_cli);
12538}
12539
12540
12541FNIEMOP_DEF(iemOp_sti)
12542{
12543 IEMOP_MNEMONIC(sti, "sti");
12544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12545 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, iemCImpl_sti);
12546}
12547
12548
12549/**
12550 * @opcode 0xfc
12551 */
12552FNIEMOP_DEF(iemOp_cld)
12553{
12554 IEMOP_MNEMONIC(cld, "cld");
12555 IEM_MC_BEGIN(0, 0);
12556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12557 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
12558 IEM_MC_ADVANCE_RIP_AND_FINISH();
12559 IEM_MC_END();
12560}
12561
12562
12563/**
12564 * @opcode 0xfd
12565 */
12566FNIEMOP_DEF(iemOp_std)
12567{
12568 IEMOP_MNEMONIC(std, "std");
12569 IEM_MC_BEGIN(0, 0);
12570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12571 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
12572 IEM_MC_ADVANCE_RIP_AND_FINISH();
12573 IEM_MC_END();
12574}
12575
12576
12577/**
12578 * @opmaps grp4
12579 * @opcode /0
12580 */
12581FNIEMOP_DEF_1(iemOp_Grp4_inc_Eb, uint8_t, bRm)
12582{
12583 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
12584 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_inc_u8, iemAImpl_inc_u8_locked);
12585}
12586
12587
12588/**
12589 * @opmaps grp4
12590 * @opcode /1
12591 */
12592FNIEMOP_DEF_1(iemOp_Grp4_dec_Eb, uint8_t, bRm)
12593{
12594 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
12595 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_dec_u8, iemAImpl_dec_u8_locked);
12596}
12597
12598
12599/**
12600 * @opcode 0xfe
12601 */
12602FNIEMOP_DEF(iemOp_Grp4)
12603{
12604 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12605 switch (IEM_GET_MODRM_REG_8(bRm))
12606 {
12607 case 0: return FNIEMOP_CALL_1(iemOp_Grp4_inc_Eb, bRm);
12608 case 1: return FNIEMOP_CALL_1(iemOp_Grp4_dec_Eb, bRm);
12609 default:
12610 /** @todo is the eff-addr decoded? */
12611 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
12612 IEMOP_RAISE_INVALID_OPCODE_RET();
12613 }
12614}
12615
12616/** Opcode 0xff /0. */
12617FNIEMOP_DEF_1(iemOp_Grp5_inc_Ev, uint8_t, bRm)
12618{
12619 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
12620 IEMOP_BODY_UNARY_Ev( iemAImpl_inc_u16, iemAImpl_inc_u32, iemAImpl_inc_u64);
12621 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_inc_u16_locked, iemAImpl_inc_u32_locked, iemAImpl_inc_u64_locked);
12622}
12623
12624
12625/** Opcode 0xff /1. */
12626FNIEMOP_DEF_1(iemOp_Grp5_dec_Ev, uint8_t, bRm)
12627{
12628 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
12629 IEMOP_BODY_UNARY_Ev( iemAImpl_dec_u16, iemAImpl_dec_u32, iemAImpl_dec_u64);
12630 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_dec_u16_locked, iemAImpl_dec_u32_locked, iemAImpl_dec_u64_locked);
12631}
12632
12633
12634/**
12635 * Opcode 0xff /2.
12636 * @param bRm The RM byte.
12637 */
12638FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
12639{
12640 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
12641 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12642
12643 if (IEM_IS_MODRM_REG_MODE(bRm))
12644 {
12645 /* The new RIP is taken from a register. */
12646 switch (pVCpu->iem.s.enmEffOpSize)
12647 {
12648 case IEMMODE_16BIT:
12649 IEM_MC_BEGIN(1, 0);
12650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12651 IEM_MC_ARG(uint16_t, u16Target, 0);
12652 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12653 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_16, u16Target);
12654 IEM_MC_END();
12655 break;
12656
12657 case IEMMODE_32BIT:
12658 IEM_MC_BEGIN(1, 0);
12659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12660 IEM_MC_ARG(uint32_t, u32Target, 0);
12661 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12662 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_32, u32Target);
12663 IEM_MC_END();
12664 break;
12665
12666 case IEMMODE_64BIT:
12667 IEM_MC_BEGIN(1, 0);
12668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12669 IEM_MC_ARG(uint64_t, u64Target, 0);
12670 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12671 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_64, u64Target);
12672 IEM_MC_END();
12673 break;
12674
12675 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12676 }
12677 }
12678 else
12679 {
12680 /* The new RIP is taken from a register. */
12681 switch (pVCpu->iem.s.enmEffOpSize)
12682 {
12683 case IEMMODE_16BIT:
12684 IEM_MC_BEGIN(1, 1);
12685 IEM_MC_ARG(uint16_t, u16Target, 0);
12686 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12687 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12689 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12690 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_16, u16Target);
12691 IEM_MC_END();
12692 break;
12693
12694 case IEMMODE_32BIT:
12695 IEM_MC_BEGIN(1, 1);
12696 IEM_MC_ARG(uint32_t, u32Target, 0);
12697 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12698 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12700 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12701 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_32, u32Target);
12702 IEM_MC_END();
12703 break;
12704
12705 case IEMMODE_64BIT:
12706 IEM_MC_BEGIN(1, 1);
12707 IEM_MC_ARG(uint64_t, u64Target, 0);
12708 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12709 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12711 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12712 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_64, u64Target);
12713 IEM_MC_END();
12714 break;
12715
12716 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12717 }
12718 }
12719}
12720
12721#define IEMOP_BODY_GRP5_FAR_EP(a_bRm, a_fnCImpl) \
12722 /* Registers? How?? */ \
12723 if (RT_LIKELY(IEM_IS_MODRM_MEM_MODE(a_bRm))) \
12724 { /* likely */ } \
12725 else \
12726 IEMOP_RAISE_INVALID_OPCODE_RET(); /* callf eax is not legal */ \
12727 \
12728 /* 64-bit mode: Default is 32-bit, but only intel respects a REX.W prefix. */ \
12729 /** @todo what does VIA do? */ \
12730 if (!IEM_IS_64BIT_CODE(pVCpu) || pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT || IEM_IS_GUEST_CPU_INTEL(pVCpu)) \
12731 { /* likely */ } \
12732 else \
12733 pVCpu->iem.s.enmEffOpSize = IEMMODE_32BIT; \
12734 \
12735 /* Far pointer loaded from memory. */ \
12736 switch (pVCpu->iem.s.enmEffOpSize) \
12737 { \
12738 case IEMMODE_16BIT: \
12739 IEM_MC_BEGIN(3, 1); \
12740 IEM_MC_ARG(uint16_t, u16Sel, 0); \
12741 IEM_MC_ARG(uint16_t, offSeg, 1); \
12742 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2); \
12743 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
12744 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
12745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12746 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
12747 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2); \
12748 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR \
12749 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, \
12750 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
12751 IEM_MC_END(); \
12752 break; \
12753 \
12754 case IEMMODE_32BIT: \
12755 IEM_MC_BEGIN(3, 1); \
12756 IEM_MC_ARG(uint16_t, u16Sel, 0); \
12757 IEM_MC_ARG(uint32_t, offSeg, 1); \
12758 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2); \
12759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
12760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
12761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12762 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
12763 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4); \
12764 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR \
12765 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, \
12766 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
12767 IEM_MC_END(); \
12768 break; \
12769 \
12770 case IEMMODE_64BIT: \
12771 Assert(!IEM_IS_GUEST_CPU_AMD(pVCpu)); \
12772 IEM_MC_BEGIN(3, 1); \
12773 IEM_MC_ARG(uint16_t, u16Sel, 0); \
12774 IEM_MC_ARG(uint64_t, offSeg, 1); \
12775 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_64BIT, 2); \
12776 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
12777 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
12778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12779 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
12780 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8); \
12781 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_MODE /* no gates */, \
12782 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
12783 IEM_MC_END(); \
12784 break; \
12785 \
12786 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12787 } do {} while (0)
12788
12789
12790/**
12791 * Opcode 0xff /3.
12792 * @param bRm The RM byte.
12793 */
12794FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
12795{
12796 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
12797 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_callf);
12798}
12799
12800
12801/**
12802 * Opcode 0xff /4.
12803 * @param bRm The RM byte.
12804 */
12805FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
12806{
12807 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
12808 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12809
12810 if (IEM_IS_MODRM_REG_MODE(bRm))
12811 {
12812 /* The new RIP is taken from a register. */
12813 switch (pVCpu->iem.s.enmEffOpSize)
12814 {
12815 case IEMMODE_16BIT:
12816 IEM_MC_BEGIN(0, 1);
12817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12818 IEM_MC_LOCAL(uint16_t, u16Target);
12819 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12820 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
12821 IEM_MC_END();
12822 break;
12823
12824 case IEMMODE_32BIT:
12825 IEM_MC_BEGIN(0, 1);
12826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12827 IEM_MC_LOCAL(uint32_t, u32Target);
12828 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12829 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
12830 IEM_MC_END();
12831 break;
12832
12833 case IEMMODE_64BIT:
12834 IEM_MC_BEGIN(0, 1);
12835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12836 IEM_MC_LOCAL(uint64_t, u64Target);
12837 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12838 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
12839 IEM_MC_END();
12840 break;
12841
12842 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12843 }
12844 }
12845 else
12846 {
12847 /* The new RIP is taken from a memory location. */
12848 switch (pVCpu->iem.s.enmEffOpSize)
12849 {
12850 case IEMMODE_16BIT:
12851 IEM_MC_BEGIN(0, 2);
12852 IEM_MC_LOCAL(uint16_t, u16Target);
12853 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12854 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12856 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12857 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
12858 IEM_MC_END();
12859 break;
12860
12861 case IEMMODE_32BIT:
12862 IEM_MC_BEGIN(0, 2);
12863 IEM_MC_LOCAL(uint32_t, u32Target);
12864 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12865 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12867 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12868 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
12869 IEM_MC_END();
12870 break;
12871
12872 case IEMMODE_64BIT:
12873 IEM_MC_BEGIN(0, 2);
12874 IEM_MC_LOCAL(uint64_t, u64Target);
12875 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12876 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12878 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12879 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
12880 IEM_MC_END();
12881 break;
12882
12883 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12884 }
12885 }
12886}
12887
12888
12889/**
12890 * Opcode 0xff /5.
12891 * @param bRm The RM byte.
12892 */
12893FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
12894{
12895 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
12896 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_FarJmp);
12897}
12898
12899
12900/**
12901 * Opcode 0xff /6.
12902 * @param bRm The RM byte.
12903 */
12904FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
12905{
12906 IEMOP_MNEMONIC(push_Ev, "push Ev");
12907
12908 /* Registers are handled by a common worker. */
12909 if (IEM_IS_MODRM_REG_MODE(bRm))
12910 return FNIEMOP_CALL_1(iemOpCommonPushGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
12911
12912 /* Memory we do here. */
12913 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12914 switch (pVCpu->iem.s.enmEffOpSize)
12915 {
12916 case IEMMODE_16BIT:
12917 IEM_MC_BEGIN(0, 2);
12918 IEM_MC_LOCAL(uint16_t, u16Src);
12919 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12920 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12922 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12923 IEM_MC_PUSH_U16(u16Src);
12924 IEM_MC_ADVANCE_RIP_AND_FINISH();
12925 IEM_MC_END();
12926 break;
12927
12928 case IEMMODE_32BIT:
12929 IEM_MC_BEGIN(0, 2);
12930 IEM_MC_LOCAL(uint32_t, u32Src);
12931 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12932 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12934 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12935 IEM_MC_PUSH_U32(u32Src);
12936 IEM_MC_ADVANCE_RIP_AND_FINISH();
12937 IEM_MC_END();
12938 break;
12939
12940 case IEMMODE_64BIT:
12941 IEM_MC_BEGIN(0, 2);
12942 IEM_MC_LOCAL(uint64_t, u64Src);
12943 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12944 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12946 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12947 IEM_MC_PUSH_U64(u64Src);
12948 IEM_MC_ADVANCE_RIP_AND_FINISH();
12949 IEM_MC_END();
12950 break;
12951
12952 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12953 }
12954}
12955
12956
12957/**
12958 * @opcode 0xff
12959 */
12960FNIEMOP_DEF(iemOp_Grp5)
12961{
12962 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12963 switch (IEM_GET_MODRM_REG_8(bRm))
12964 {
12965 case 0:
12966 return FNIEMOP_CALL_1(iemOp_Grp5_inc_Ev, bRm);
12967 case 1:
12968 return FNIEMOP_CALL_1(iemOp_Grp5_dec_Ev, bRm);
12969 case 2:
12970 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
12971 case 3:
12972 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
12973 case 4:
12974 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
12975 case 5:
12976 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
12977 case 6:
12978 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
12979 case 7:
12980 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
12981 IEMOP_RAISE_INVALID_OPCODE_RET();
12982 }
12983 AssertFailedReturn(VERR_IEM_IPE_3);
12984}
12985
12986
12987
12988const PFNIEMOP g_apfnOneByteMap[256] =
12989{
12990 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
12991 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
12992 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
12993 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
12994 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
12995 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
12996 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
12997 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
12998 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
12999 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
13000 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
13001 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
13002 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
13003 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
13004 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
13005 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
13006 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
13007 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
13008 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
13009 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
13010 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
13011 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
13012 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
13013 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
13014 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
13015 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
13016 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
13017 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
13018 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
13019 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
13020 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
13021 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
13022 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
13023 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
13024 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
13025 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
13026 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
13027 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
13028 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
13029 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
13030 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
13031 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
13032 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
13033 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
13034 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
13035 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
13036 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
13037 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
13038 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
13039 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
13040 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
13041 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
13042 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
13043 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
13044 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
13045 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
13046 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
13047 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
13048 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
13049 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
13050 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
13051 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
13052 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
13053 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
13054};
13055
13056
13057/** @} */
13058
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette