VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstOneByte.cpp.h@ 100764

Last change on this file since 100764 was 100761, checked in by vboxsync, 17 months ago

VMM/IEM: Check for IRQs every so often, especially after sti, popf and iret. Increased the hash table size. Disabled some debug code. bugref:10369

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 456.2 KB
Line 
1/* $Id: IEMAllInstOneByte.cpp.h 100761 2023-08-01 02:24:11Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*******************************************************************************
30* Global Variables *
31*******************************************************************************/
32extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
33
34/* Instruction group definitions: */
35
36/** @defgroup og_gen General
37 * @{ */
38 /** @defgroup og_gen_arith Arithmetic
39 * @{ */
40 /** @defgroup og_gen_arith_bin Binary numbers */
41 /** @defgroup og_gen_arith_dec Decimal numbers */
42 /** @} */
43/** @} */
44
45/** @defgroup og_stack Stack
46 * @{ */
47 /** @defgroup og_stack_sreg Segment registers */
48/** @} */
49
50/** @defgroup og_prefix Prefixes */
51/** @defgroup og_escapes Escape bytes */
52
53
54
55/** @name One byte opcodes.
56 * @{
57 */
58
59/**
60 * Body for instructions like ADD, AND, OR, TEST, CMP, ++ with a byte
61 * memory/register as the destination.
62 *
63 * Used with IEMOP_BODY_BINARY_rm_r8_NO_LOCK or IEMOP_BODY_BINARY_rm_r8_LOCKED.
64 */
65#define IEMOP_BODY_BINARY_rm_r8(a_fnNormalU8, a_fRW) \
66 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
67 \
68 /* \
69 * If rm is denoting a register, no more instruction bytes. \
70 */ \
71 if (IEM_IS_MODRM_REG_MODE(bRm)) \
72 { \
73 IEM_MC_BEGIN(3, 0); \
74 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
75 IEM_MC_ARG(uint8_t, u8Src, 1); \
76 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
77 \
78 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
79 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
80 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
81 IEM_MC_REF_EFLAGS(pEFlags); \
82 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
83 \
84 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
85 IEM_MC_END(); \
86 } \
87 else \
88 { \
89 /* \
90 * We're accessing memory. \
91 * Note! We're putting the eflags on the stack here so we can commit them \
92 * after the memory. \
93 */ \
94 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
95 { \
96 IEM_MC_BEGIN(3, 2); \
97 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
98 IEM_MC_ARG(uint8_t, u8Src, 1); \
99 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
101 \
102 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
103 IEMOP_HLP_DONE_DECODING(); \
104 IEM_MC_MEM_MAP(pu8Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
105 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
106 IEM_MC_FETCH_EFLAGS(EFlags); \
107 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
108 \
109 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, a_fRW); \
110 IEM_MC_COMMIT_EFLAGS(EFlags); \
111 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
112 IEM_MC_END(); \
113 } \
114 else \
115 { \
116 (void)0
117
118#define IEMOP_BODY_BINARY_rm_r8_NO_LOCK() \
119 IEMOP_HLP_DONE_DECODING(); \
120 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
121 } \
122 } \
123 (void)0
124
125#define IEMOP_BODY_BINARY_rm_r8_LOCKED(a_fnLockedU8) \
126 IEM_MC_BEGIN(3, 2); \
127 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
128 IEM_MC_ARG(uint8_t, u8Src, 1); \
129 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
131 \
132 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
133 IEMOP_HLP_DONE_DECODING(); \
134 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
135 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
136 IEM_MC_FETCH_EFLAGS(EFlags); \
137 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
138 \
139 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW); \
140 IEM_MC_COMMIT_EFLAGS(EFlags); \
141 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
142 IEM_MC_END(); \
143 } \
144 } \
145 (void)0
146
147/**
148 * Body for byte instructions like ADD, AND, OR, ++ with a register as the
149 * destination.
150 */
151#define IEMOP_BODY_BINARY_r8_rm(a_fnNormalU8) \
152 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
153 \
154 /* \
155 * If rm is denoting a register, no more instruction bytes. \
156 */ \
157 if (IEM_IS_MODRM_REG_MODE(bRm)) \
158 { \
159 IEM_MC_BEGIN(3, 0); \
160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
161 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
162 IEM_MC_ARG(uint8_t, u8Src, 1); \
163 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
164 \
165 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
166 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
167 IEM_MC_REF_EFLAGS(pEFlags); \
168 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
169 \
170 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
171 IEM_MC_END(); \
172 } \
173 else \
174 { \
175 /* \
176 * We're accessing memory. \
177 */ \
178 IEM_MC_BEGIN(3, 1); \
179 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
180 IEM_MC_ARG(uint8_t, u8Src, 1); \
181 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
182 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
183 \
184 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
186 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
187 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
188 IEM_MC_REF_EFLAGS(pEFlags); \
189 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
190 \
191 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
192 IEM_MC_END(); \
193 } \
194 (void)0
195
196
197/**
198 * Body for word/dword/qword instructions like ADD, AND, OR, ++ with
199 * memory/register as the destination.
200 */
201#define IEMOP_BODY_BINARY_rm_rv(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fRW) \
202 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
203 \
204 /* \
205 * If rm is denoting a register, no more instruction bytes. \
206 */ \
207 if (IEM_IS_MODRM_REG_MODE(bRm)) \
208 { \
209 switch (pVCpu->iem.s.enmEffOpSize) \
210 { \
211 case IEMMODE_16BIT: \
212 IEM_MC_BEGIN(3, 0); \
213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
214 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
215 IEM_MC_ARG(uint16_t, u16Src, 1); \
216 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
217 \
218 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
219 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
220 IEM_MC_REF_EFLAGS(pEFlags); \
221 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
222 \
223 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
224 IEM_MC_END(); \
225 break; \
226 \
227 case IEMMODE_32BIT: \
228 IEM_MC_BEGIN(3, 0); \
229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
230 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
231 IEM_MC_ARG(uint32_t, u32Src, 1); \
232 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
233 \
234 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
235 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
236 IEM_MC_REF_EFLAGS(pEFlags); \
237 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
238 \
239 if ((a_fRW) == IEM_ACCESS_DATA_RW) /* not TEST and CMP */ \
240 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
241 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
242 IEM_MC_END(); \
243 break; \
244 \
245 case IEMMODE_64BIT: \
246 IEM_MC_BEGIN(3, 0); \
247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
248 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
249 IEM_MC_ARG(uint64_t, u64Src, 1); \
250 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
251 \
252 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
253 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
254 IEM_MC_REF_EFLAGS(pEFlags); \
255 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
256 \
257 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
258 IEM_MC_END(); \
259 break; \
260 \
261 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
262 } \
263 } \
264 else \
265 { \
266 /* \
267 * We're accessing memory. \
268 * Note! We're putting the eflags on the stack here so we can commit them \
269 * after the memory. \
270 */ \
271 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
272 { \
273 switch (pVCpu->iem.s.enmEffOpSize) \
274 { \
275 case IEMMODE_16BIT: \
276 IEM_MC_BEGIN(3, 2); \
277 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
278 IEM_MC_ARG(uint16_t, u16Src, 1); \
279 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
280 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
281 \
282 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
283 IEMOP_HLP_DONE_DECODING(); \
284 IEM_MC_MEM_MAP(pu16Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
285 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
286 IEM_MC_FETCH_EFLAGS(EFlags); \
287 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
288 \
289 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, a_fRW); \
290 IEM_MC_COMMIT_EFLAGS(EFlags); \
291 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
292 IEM_MC_END(); \
293 break; \
294 \
295 case IEMMODE_32BIT: \
296 IEM_MC_BEGIN(3, 2); \
297 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
298 IEM_MC_ARG(uint32_t, u32Src, 1); \
299 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
300 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
301 \
302 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
303 IEMOP_HLP_DONE_DECODING(); \
304 IEM_MC_MEM_MAP(pu32Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
305 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
306 IEM_MC_FETCH_EFLAGS(EFlags); \
307 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
308 \
309 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, a_fRW); \
310 IEM_MC_COMMIT_EFLAGS(EFlags); \
311 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
312 IEM_MC_END(); \
313 break; \
314 \
315 case IEMMODE_64BIT: \
316 IEM_MC_BEGIN(3, 2); \
317 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
318 IEM_MC_ARG(uint64_t, u64Src, 1); \
319 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
320 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
321 \
322 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
323 IEMOP_HLP_DONE_DECODING(); \
324 IEM_MC_MEM_MAP(pu64Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
325 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
326 IEM_MC_FETCH_EFLAGS(EFlags); \
327 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
328 \
329 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, a_fRW); \
330 IEM_MC_COMMIT_EFLAGS(EFlags); \
331 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
332 IEM_MC_END(); \
333 break; \
334 \
335 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
336 } \
337 } \
338 else \
339 { \
340 (void)0
341
342#define IEMOP_BODY_BINARY_rm_rv_NO_LOCK() \
343 IEMOP_HLP_DONE_DECODING(); \
344 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
345 } \
346 } \
347 (void)0
348
349#define IEMOP_BODY_BINARY_rm_rv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
350 switch (pVCpu->iem.s.enmEffOpSize) \
351 { \
352 case IEMMODE_16BIT: \
353 IEM_MC_BEGIN(3, 2); \
354 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
355 IEM_MC_ARG(uint16_t, u16Src, 1); \
356 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
357 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
358 \
359 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
360 IEMOP_HLP_DONE_DECODING(); \
361 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
362 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
363 IEM_MC_FETCH_EFLAGS(EFlags); \
364 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
365 \
366 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
367 IEM_MC_COMMIT_EFLAGS(EFlags); \
368 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
369 IEM_MC_END(); \
370 break; \
371 \
372 case IEMMODE_32BIT: \
373 IEM_MC_BEGIN(3, 2); \
374 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
375 IEM_MC_ARG(uint32_t, u32Src, 1); \
376 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
377 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
378 \
379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
380 IEMOP_HLP_DONE_DECODING(); \
381 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
382 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
383 IEM_MC_FETCH_EFLAGS(EFlags); \
384 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
385 \
386 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW /* CMP,TEST */); \
387 IEM_MC_COMMIT_EFLAGS(EFlags); \
388 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
389 IEM_MC_END(); \
390 break; \
391 \
392 case IEMMODE_64BIT: \
393 IEM_MC_BEGIN(3, 2); \
394 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
395 IEM_MC_ARG(uint64_t, u64Src, 1); \
396 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
398 \
399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
400 IEMOP_HLP_DONE_DECODING(); \
401 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
402 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
403 IEM_MC_FETCH_EFLAGS(EFlags); \
404 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
405 \
406 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
407 IEM_MC_COMMIT_EFLAGS(EFlags); \
408 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
409 IEM_MC_END(); \
410 break; \
411 \
412 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
413 } \
414 } \
415 } \
416 (void)0
417
418
419/**
420 * Body for instructions like ADD, AND, OR, ++ with working on AL with
421 * a byte immediate.
422 */
423#define IEMOP_BODY_BINARY_AL_Ib(a_fnNormalU8) \
424 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
425 \
426 IEM_MC_BEGIN(3, 0); \
427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
428 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
429 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1); \
430 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
431 \
432 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX); \
433 IEM_MC_REF_EFLAGS(pEFlags); \
434 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
435 \
436 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
437 IEM_MC_END()
438
439/**
440 * Body for instructions like ADD, AND, OR, ++ with working on
441 * AX/EAX/RAX with a word/dword immediate.
442 */
443#define IEMOP_BODY_BINARY_rAX_Iz(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fModifiesDstReg) \
444 switch (pVCpu->iem.s.enmEffOpSize) \
445 { \
446 case IEMMODE_16BIT: \
447 { \
448 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
449 \
450 IEM_MC_BEGIN(3, 0); \
451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
452 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
453 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1); \
454 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
455 \
456 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX); \
457 IEM_MC_REF_EFLAGS(pEFlags); \
458 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
459 \
460 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
461 IEM_MC_END(); \
462 } \
463 \
464 case IEMMODE_32BIT: \
465 { \
466 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
467 \
468 IEM_MC_BEGIN(3, 0); \
469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
470 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
471 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1); \
472 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
473 \
474 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX); \
475 IEM_MC_REF_EFLAGS(pEFlags); \
476 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
477 \
478 if (a_fModifiesDstReg) \
479 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
480 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
481 IEM_MC_END(); \
482 } \
483 \
484 case IEMMODE_64BIT: \
485 { \
486 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
487 \
488 IEM_MC_BEGIN(3, 0); \
489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
490 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
491 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1); \
492 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
493 \
494 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX); \
495 IEM_MC_REF_EFLAGS(pEFlags); \
496 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
497 \
498 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
499 IEM_MC_END(); \
500 } \
501 \
502 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
503 } \
504 (void)0
505
506
507
508/* Instruction specification format - work in progress: */
509
510/**
511 * @opcode 0x00
512 * @opmnemonic add
513 * @op1 rm:Eb
514 * @op2 reg:Gb
515 * @opmaps one
516 * @openc ModR/M
517 * @opflmodify cf,pf,af,zf,sf,of
518 * @ophints harmless ignores_op_sizes
519 * @opstats add_Eb_Gb
520 * @opgroup og_gen_arith_bin
521 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
522 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
523 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
524 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
525 */
526FNIEMOP_DEF(iemOp_add_Eb_Gb)
527{
528 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
529 IEMOP_BODY_BINARY_rm_r8( iemAImpl_add_u8, IEM_ACCESS_DATA_RW);
530 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_add_u8_locked);
531}
532
533
534/**
535 * @opcode 0x01
536 * @opgroup og_gen_arith_bin
537 * @opflmodify cf,pf,af,zf,sf,of
538 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
539 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
540 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
541 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
542 */
543FNIEMOP_DEF(iemOp_add_Ev_Gv)
544{
545 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
546 IEMOP_BODY_BINARY_rm_rv( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, IEM_ACCESS_DATA_RW);
547 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
548}
549
550
551/**
552 * @opcode 0x02
553 * @opgroup og_gen_arith_bin
554 * @opflmodify cf,pf,af,zf,sf,of
555 * @opcopytests iemOp_add_Eb_Gb
556 */
557FNIEMOP_DEF(iemOp_add_Gb_Eb)
558{
559 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
560 IEMOP_BODY_BINARY_r8_rm(iemAImpl_add_u8);
561}
562
563
564/**
565 * @opcode 0x03
566 * @opgroup og_gen_arith_bin
567 * @opflmodify cf,pf,af,zf,sf,of
568 * @opcopytests iemOp_add_Ev_Gv
569 */
570FNIEMOP_DEF(iemOp_add_Gv_Ev)
571{
572 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
573 IEMOP_BODY_BINARY_rv_rm(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
574}
575
576
577/**
578 * @opcode 0x04
579 * @opgroup og_gen_arith_bin
580 * @opflmodify cf,pf,af,zf,sf,of
581 * @opcopytests iemOp_add_Eb_Gb
582 */
583FNIEMOP_DEF(iemOp_add_Al_Ib)
584{
585 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
586 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_add_u8);
587}
588
589
590/**
591 * @opcode 0x05
592 * @opgroup og_gen_arith_bin
593 * @opflmodify cf,pf,af,zf,sf,of
594 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
595 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
596 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
597 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
598 */
599FNIEMOP_DEF(iemOp_add_eAX_Iz)
600{
601 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
602 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
603}
604
605
606/**
607 * @opcode 0x06
608 * @opgroup og_stack_sreg
609 */
610FNIEMOP_DEF(iemOp_push_ES)
611{
612 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
613 IEMOP_HLP_NO_64BIT();
614 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
615}
616
617
618/**
619 * @opcode 0x07
620 * @opgroup og_stack_sreg
621 */
622FNIEMOP_DEF(iemOp_pop_ES)
623{
624 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
625 IEMOP_HLP_NO_64BIT();
626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
627 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE, iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
628}
629
630
631/**
632 * @opcode 0x08
633 * @opgroup og_gen_arith_bin
634 * @opflmodify cf,pf,af,zf,sf,of
635 * @opflundef af
636 * @opflclear of,cf
637 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
638 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
639 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
640 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
641 */
642FNIEMOP_DEF(iemOp_or_Eb_Gb)
643{
644 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
645 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
646 IEMOP_BODY_BINARY_rm_r8( iemAImpl_or_u8, IEM_ACCESS_DATA_RW);
647 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_or_u8_locked);
648}
649
650
651/*
652 * @opcode 0x09
653 * @opgroup og_gen_arith_bin
654 * @opflmodify cf,pf,af,zf,sf,of
655 * @opflundef af
656 * @opflclear of,cf
657 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
658 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
659 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
660 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
661 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
662 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
663 */
664FNIEMOP_DEF(iemOp_or_Ev_Gv)
665{
666 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
667 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
668 IEMOP_BODY_BINARY_rm_rv( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, IEM_ACCESS_DATA_RW);
669 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
670}
671
672
673/**
674 * @opcode 0x0a
675 * @opgroup og_gen_arith_bin
676 * @opflmodify cf,pf,af,zf,sf,of
677 * @opflundef af
678 * @opflclear of,cf
679 * @opcopytests iemOp_or_Eb_Gb
680 */
681FNIEMOP_DEF(iemOp_or_Gb_Eb)
682{
683 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
684 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
685 IEMOP_BODY_BINARY_r8_rm(iemAImpl_or_u8);
686}
687
688
689/**
690 * @opcode 0x0b
691 * @opgroup og_gen_arith_bin
692 * @opflmodify cf,pf,af,zf,sf,of
693 * @opflundef af
694 * @opflclear of,cf
695 * @opcopytests iemOp_or_Ev_Gv
696 */
697FNIEMOP_DEF(iemOp_or_Gv_Ev)
698{
699 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
700 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
701 IEMOP_BODY_BINARY_rv_rm(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
702}
703
704
705/**
706 * @opcode 0x0c
707 * @opgroup og_gen_arith_bin
708 * @opflmodify cf,pf,af,zf,sf,of
709 * @opflundef af
710 * @opflclear of,cf
711 * @opcopytests iemOp_or_Eb_Gb
712 */
713FNIEMOP_DEF(iemOp_or_Al_Ib)
714{
715 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
716 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
717 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_or_u8);
718}
719
720
721/**
722 * @opcode 0x0d
723 * @opgroup og_gen_arith_bin
724 * @opflmodify cf,pf,af,zf,sf,of
725 * @opflundef af
726 * @opflclear of,cf
727 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
728 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
729 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
730 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
731 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
732 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
733 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
734 */
735FNIEMOP_DEF(iemOp_or_eAX_Iz)
736{
737 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
738 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
739 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
740}
741
742
743/**
744 * @opcode 0x0e
745 * @opgroup og_stack_sreg
746 */
747FNIEMOP_DEF(iemOp_push_CS)
748{
749 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, 0);
750 IEMOP_HLP_NO_64BIT();
751 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
752}
753
754
755/**
756 * @opcode 0x0f
757 * @opmnemonic EscTwo0f
758 * @openc two0f
759 * @opdisenum OP_2B_ESC
760 * @ophints harmless
761 * @opgroup og_escapes
762 */
763FNIEMOP_DEF(iemOp_2byteEscape)
764{
765#if 0 /// @todo def VBOX_STRICT
766 /* Sanity check the table the first time around. */
767 static bool s_fTested = false;
768 if (RT_LIKELY(s_fTested)) { /* likely */ }
769 else
770 {
771 s_fTested = true;
772 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
773 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
774 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
775 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
776 }
777#endif
778
779 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
780 {
781 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
782 IEMOP_HLP_MIN_286();
783 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
784 }
785 /* @opdone */
786
787 /*
788 * On the 8086 this is a POP CS instruction.
789 * For the time being we don't specify this this.
790 */
791 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, IEMOPHINT_SKIP_PYTHON);
792 IEMOP_HLP_NO_64BIT();
793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
794 /** @todo eliminate END_TB here */
795 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_END_TB,
796 iemCImpl_pop_Sreg, X86_SREG_CS, pVCpu->iem.s.enmEffOpSize);
797}
798
799/**
800 * @opcode 0x10
801 * @opgroup og_gen_arith_bin
802 * @opfltest cf
803 * @opflmodify cf,pf,af,zf,sf,of
804 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
805 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
806 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
807 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
808 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
809 */
810FNIEMOP_DEF(iemOp_adc_Eb_Gb)
811{
812 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
813 IEMOP_BODY_BINARY_rm_r8( iemAImpl_adc_u8, IEM_ACCESS_DATA_RW);
814 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_adc_u8_locked);
815}
816
817
818/**
819 * @opcode 0x11
820 * @opgroup og_gen_arith_bin
821 * @opfltest cf
822 * @opflmodify cf,pf,af,zf,sf,of
823 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
824 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
825 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
826 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
827 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
828 */
829FNIEMOP_DEF(iemOp_adc_Ev_Gv)
830{
831 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
832 IEMOP_BODY_BINARY_rm_rv( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, IEM_ACCESS_DATA_RW);
833 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
834}
835
836
837/**
838 * @opcode 0x12
839 * @opgroup og_gen_arith_bin
840 * @opfltest cf
841 * @opflmodify cf,pf,af,zf,sf,of
842 * @opcopytests iemOp_adc_Eb_Gb
843 */
844FNIEMOP_DEF(iemOp_adc_Gb_Eb)
845{
846 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
847 IEMOP_BODY_BINARY_r8_rm(iemAImpl_adc_u8);
848}
849
850
851/**
852 * @opcode 0x13
853 * @opgroup og_gen_arith_bin
854 * @opfltest cf
855 * @opflmodify cf,pf,af,zf,sf,of
856 * @opcopytests iemOp_adc_Ev_Gv
857 */
858FNIEMOP_DEF(iemOp_adc_Gv_Ev)
859{
860 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
861 IEMOP_BODY_BINARY_rv_rm(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
862}
863
864
865/**
866 * @opcode 0x14
867 * @opgroup og_gen_arith_bin
868 * @opfltest cf
869 * @opflmodify cf,pf,af,zf,sf,of
870 * @opcopytests iemOp_adc_Eb_Gb
871 */
872FNIEMOP_DEF(iemOp_adc_Al_Ib)
873{
874 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
875 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_adc_u8);
876}
877
878
879/**
880 * @opcode 0x15
881 * @opgroup og_gen_arith_bin
882 * @opfltest cf
883 * @opflmodify cf,pf,af,zf,sf,of
884 * @opcopytests iemOp_adc_Ev_Gv
885 */
886FNIEMOP_DEF(iemOp_adc_eAX_Iz)
887{
888 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
889 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
890}
891
892
893/**
894 * @opcode 0x16
895 */
896FNIEMOP_DEF(iemOp_push_SS)
897{
898 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
899 IEMOP_HLP_NO_64BIT();
900 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
901}
902
903
904/**
905 * @opcode 0x17
906 * @opgroup og_gen_arith_bin
907 * @opfltest cf
908 * @opflmodify cf,pf,af,zf,sf,of
909 */
910FNIEMOP_DEF(iemOp_pop_SS)
911{
912 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
914 IEMOP_HLP_NO_64BIT();
915 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE, iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
916}
917
918
919/**
920 * @opcode 0x18
921 * @opgroup og_gen_arith_bin
922 * @opfltest cf
923 * @opflmodify cf,pf,af,zf,sf,of
924 */
925FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
926{
927 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
928 IEMOP_BODY_BINARY_rm_r8( iemAImpl_sbb_u8, IEM_ACCESS_DATA_RW);
929 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sbb_u8_locked);
930}
931
932
933/**
934 * @opcode 0x19
935 * @opgroup og_gen_arith_bin
936 * @opfltest cf
937 * @opflmodify cf,pf,af,zf,sf,of
938 */
939FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
940{
941 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
942 IEMOP_BODY_BINARY_rm_rv( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, IEM_ACCESS_DATA_RW);
943 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
944}
945
946
947/**
948 * @opcode 0x1a
949 * @opgroup og_gen_arith_bin
950 * @opfltest cf
951 * @opflmodify cf,pf,af,zf,sf,of
952 */
953FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
954{
955 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
956 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sbb_u8);
957}
958
959
960/**
961 * @opcode 0x1b
962 * @opgroup og_gen_arith_bin
963 * @opfltest cf
964 * @opflmodify cf,pf,af,zf,sf,of
965 */
966FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
967{
968 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
969 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
970}
971
972
973/**
974 * @opcode 0x1c
975 * @opgroup og_gen_arith_bin
976 * @opfltest cf
977 * @opflmodify cf,pf,af,zf,sf,of
978 */
979FNIEMOP_DEF(iemOp_sbb_Al_Ib)
980{
981 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
982 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sbb_u8);
983}
984
985
986/**
987 * @opcode 0x1d
988 * @opgroup og_gen_arith_bin
989 * @opfltest cf
990 * @opflmodify cf,pf,af,zf,sf,of
991 */
992FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
993{
994 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
995 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
996}
997
998
999/**
1000 * @opcode 0x1e
1001 * @opgroup og_stack_sreg
1002 */
1003FNIEMOP_DEF(iemOp_push_DS)
1004{
1005 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1006 IEMOP_HLP_NO_64BIT();
1007 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
1008}
1009
1010
1011/**
1012 * @opcode 0x1f
1013 * @opgroup og_stack_sreg
1014 */
1015FNIEMOP_DEF(iemOp_pop_DS)
1016{
1017 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1019 IEMOP_HLP_NO_64BIT();
1020 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE, iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
1021}
1022
1023
1024/**
1025 * @opcode 0x20
1026 * @opgroup og_gen_arith_bin
1027 * @opflmodify cf,pf,af,zf,sf,of
1028 * @opflundef af
1029 * @opflclear of,cf
1030 */
1031FNIEMOP_DEF(iemOp_and_Eb_Gb)
1032{
1033 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1034 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1035 IEMOP_BODY_BINARY_rm_r8( iemAImpl_and_u8, IEM_ACCESS_DATA_RW);
1036 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_and_u8_locked);
1037}
1038
1039
1040/**
1041 * @opcode 0x21
1042 * @opgroup og_gen_arith_bin
1043 * @opflmodify cf,pf,af,zf,sf,of
1044 * @opflundef af
1045 * @opflclear of,cf
1046 */
1047FNIEMOP_DEF(iemOp_and_Ev_Gv)
1048{
1049 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1050 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1051 IEMOP_BODY_BINARY_rm_rv( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, IEM_ACCESS_DATA_RW);
1052 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
1053}
1054
1055
1056/**
1057 * @opcode 0x22
1058 * @opgroup og_gen_arith_bin
1059 * @opflmodify cf,pf,af,zf,sf,of
1060 * @opflundef af
1061 * @opflclear of,cf
1062 */
1063FNIEMOP_DEF(iemOp_and_Gb_Eb)
1064{
1065 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1066 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1067 IEMOP_BODY_BINARY_r8_rm(iemAImpl_and_u8);
1068}
1069
1070
1071/**
1072 * @opcode 0x23
1073 * @opgroup og_gen_arith_bin
1074 * @opflmodify cf,pf,af,zf,sf,of
1075 * @opflundef af
1076 * @opflclear of,cf
1077 */
1078FNIEMOP_DEF(iemOp_and_Gv_Ev)
1079{
1080 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1081 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1082 IEMOP_BODY_BINARY_rv_rm(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1083}
1084
1085
1086/**
1087 * @opcode 0x24
1088 * @opgroup og_gen_arith_bin
1089 * @opflmodify cf,pf,af,zf,sf,of
1090 * @opflundef af
1091 * @opflclear of,cf
1092 */
1093FNIEMOP_DEF(iemOp_and_Al_Ib)
1094{
1095 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
1096 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1097 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_and_u8);
1098}
1099
1100
1101/**
1102 * @opcode 0x25
1103 * @opgroup og_gen_arith_bin
1104 * @opflmodify cf,pf,af,zf,sf,of
1105 * @opflundef af
1106 * @opflclear of,cf
1107 */
1108FNIEMOP_DEF(iemOp_and_eAX_Iz)
1109{
1110 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1111 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1112 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1113}
1114
1115
1116/**
1117 * @opcode 0x26
1118 * @opmnemonic SEG
1119 * @op1 ES
1120 * @opgroup og_prefix
1121 * @openc prefix
1122 * @opdisenum OP_SEG
1123 * @ophints harmless
1124 */
1125FNIEMOP_DEF(iemOp_seg_ES)
1126{
1127 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
1128 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
1129 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
1130
1131 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1132 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1133}
1134
1135
1136/**
1137 * @opcode 0x27
1138 * @opfltest af,cf
1139 * @opflmodify cf,pf,af,zf,sf,of
1140 * @opflundef of
1141 */
1142FNIEMOP_DEF(iemOp_daa)
1143{
1144 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1145 IEMOP_HLP_NO_64BIT();
1146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1147 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1148 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_daa);
1149}
1150
1151
1152/**
1153 * @opcode 0x28
1154 * @opgroup og_gen_arith_bin
1155 * @opflmodify cf,pf,af,zf,sf,of
1156 */
1157FNIEMOP_DEF(iemOp_sub_Eb_Gb)
1158{
1159 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1160 IEMOP_BODY_BINARY_rm_r8( iemAImpl_sub_u8, IEM_ACCESS_DATA_RW);
1161 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sub_u8_locked);
1162}
1163
1164
1165/**
1166 * @opcode 0x29
1167 * @opgroup og_gen_arith_bin
1168 * @opflmodify cf,pf,af,zf,sf,of
1169 */
1170FNIEMOP_DEF(iemOp_sub_Ev_Gv)
1171{
1172 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1173 IEMOP_BODY_BINARY_rm_rv( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, IEM_ACCESS_DATA_RW);
1174 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
1175}
1176
1177
1178/**
1179 * @opcode 0x2a
1180 * @opgroup og_gen_arith_bin
1181 * @opflmodify cf,pf,af,zf,sf,of
1182 */
1183FNIEMOP_DEF(iemOp_sub_Gb_Eb)
1184{
1185 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1186 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sub_u8);
1187}
1188
1189
1190/**
1191 * @opcode 0x2b
1192 * @opgroup og_gen_arith_bin
1193 * @opflmodify cf,pf,af,zf,sf,of
1194 */
1195FNIEMOP_DEF(iemOp_sub_Gv_Ev)
1196{
1197 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1198 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1199}
1200
1201
1202/**
1203 * @opcode 0x2c
1204 * @opgroup og_gen_arith_bin
1205 * @opflmodify cf,pf,af,zf,sf,of
1206 */
1207FNIEMOP_DEF(iemOp_sub_Al_Ib)
1208{
1209 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1210 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sub_u8);
1211}
1212
1213
1214/**
1215 * @opcode 0x2d
1216 * @opgroup og_gen_arith_bin
1217 * @opflmodify cf,pf,af,zf,sf,of
1218 */
1219FNIEMOP_DEF(iemOp_sub_eAX_Iz)
1220{
1221 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1222 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1223}
1224
1225
1226/**
1227 * @opcode 0x2e
1228 * @opmnemonic SEG
1229 * @op1 CS
1230 * @opgroup og_prefix
1231 * @openc prefix
1232 * @opdisenum OP_SEG
1233 * @ophints harmless
1234 */
1235FNIEMOP_DEF(iemOp_seg_CS)
1236{
1237 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
1238 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
1239 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
1240
1241 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1242 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1243}
1244
1245
1246/**
1247 * @opcode 0x2f
1248 * @opfltest af,cf
1249 * @opflmodify cf,pf,af,zf,sf,of
1250 * @opflundef of
1251 */
1252FNIEMOP_DEF(iemOp_das)
1253{
1254 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1255 IEMOP_HLP_NO_64BIT();
1256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1257 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1258 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_das);
1259}
1260
1261
1262/**
1263 * @opcode 0x30
1264 * @opgroup og_gen_arith_bin
1265 * @opflmodify cf,pf,af,zf,sf,of
1266 * @opflundef af
1267 * @opflclear of,cf
1268 */
1269FNIEMOP_DEF(iemOp_xor_Eb_Gb)
1270{
1271 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1272 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1273 IEMOP_BODY_BINARY_rm_r8( iemAImpl_xor_u8, IEM_ACCESS_DATA_RW);
1274 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_xor_u8_locked);
1275}
1276
1277
1278/**
1279 * @opcode 0x31
1280 * @opgroup og_gen_arith_bin
1281 * @opflmodify cf,pf,af,zf,sf,of
1282 * @opflundef af
1283 * @opflclear of,cf
1284 */
1285FNIEMOP_DEF(iemOp_xor_Ev_Gv)
1286{
1287 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1288 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1289 IEMOP_BODY_BINARY_rm_rv( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, IEM_ACCESS_DATA_RW);
1290 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
1291}
1292
1293
1294/**
1295 * @opcode 0x32
1296 * @opgroup og_gen_arith_bin
1297 * @opflmodify cf,pf,af,zf,sf,of
1298 * @opflundef af
1299 * @opflclear of,cf
1300 */
1301FNIEMOP_DEF(iemOp_xor_Gb_Eb)
1302{
1303 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1304 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1305 IEMOP_BODY_BINARY_r8_rm(iemAImpl_xor_u8);
1306}
1307
1308
1309/**
1310 * @opcode 0x33
1311 * @opgroup og_gen_arith_bin
1312 * @opflmodify cf,pf,af,zf,sf,of
1313 * @opflundef af
1314 * @opflclear of,cf
1315 */
1316FNIEMOP_DEF(iemOp_xor_Gv_Ev)
1317{
1318 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1319 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1320 IEMOP_BODY_BINARY_rv_rm(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1321}
1322
1323
1324/**
1325 * @opcode 0x34
1326 * @opgroup og_gen_arith_bin
1327 * @opflmodify cf,pf,af,zf,sf,of
1328 * @opflundef af
1329 * @opflclear of,cf
1330 */
1331FNIEMOP_DEF(iemOp_xor_Al_Ib)
1332{
1333 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1334 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1335 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_xor_u8);
1336}
1337
1338
1339/**
1340 * @opcode 0x35
1341 * @opgroup og_gen_arith_bin
1342 * @opflmodify cf,pf,af,zf,sf,of
1343 * @opflundef af
1344 * @opflclear of,cf
1345 */
1346FNIEMOP_DEF(iemOp_xor_eAX_Iz)
1347{
1348 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1349 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1350 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1351}
1352
1353
1354/**
1355 * @opcode 0x36
1356 * @opmnemonic SEG
1357 * @op1 SS
1358 * @opgroup og_prefix
1359 * @openc prefix
1360 * @opdisenum OP_SEG
1361 * @ophints harmless
1362 */
1363FNIEMOP_DEF(iemOp_seg_SS)
1364{
1365 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
1366 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
1367 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
1368
1369 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1370 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1371}
1372
1373
1374/**
1375 * @opcode 0x37
1376 * @opfltest af,cf
1377 * @opflmodify cf,pf,af,zf,sf,of
1378 * @opflundef pf,zf,sf,of
1379 * @opgroup og_gen_arith_dec
1380 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
1381 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
1382 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1383 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1384 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1385 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
1386 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
1387 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
1388 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
1389 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
1390 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
1391 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
1392 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
1393 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
1394 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
1395 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1396 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1397 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1398 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1399 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
1400 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
1401 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
1402 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
1403 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
1404 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
1405 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1406 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
1407 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
1408 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
1409 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
1410 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
1411 */
1412FNIEMOP_DEF(iemOp_aaa)
1413{
1414 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1415 IEMOP_HLP_NO_64BIT();
1416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1417 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1418
1419 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aaa);
1420}
1421
1422
1423/**
1424 * @opcode 0x38
1425 */
1426FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
1427{
1428 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
1429 IEMOP_BODY_BINARY_rm_r8(iemAImpl_cmp_u8, IEM_ACCESS_DATA_R);
1430 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
1431}
1432
1433
1434/**
1435 * @opcode 0x39
1436 */
1437FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
1438{
1439 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
1440 IEMOP_BODY_BINARY_rm_rv(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, IEM_ACCESS_DATA_R);
1441 IEMOP_BODY_BINARY_rm_rv_NO_LOCK();
1442}
1443
1444
1445/**
1446 * @opcode 0x3a
1447 */
1448FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
1449{
1450 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
1451 IEMOP_BODY_BINARY_r8_rm(iemAImpl_cmp_u8);
1452}
1453
1454
1455/**
1456 * @opcode 0x3b
1457 */
1458FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
1459{
1460 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
1461 IEMOP_BODY_BINARY_rv_rm(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1462}
1463
1464
1465/**
1466 * @opcode 0x3c
1467 */
1468FNIEMOP_DEF(iemOp_cmp_Al_Ib)
1469{
1470 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
1471 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_cmp_u8);
1472}
1473
1474
1475/**
1476 * @opcode 0x3d
1477 */
1478FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1479{
1480 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1481 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1482}
1483
1484
1485/**
1486 * @opcode 0x3e
1487 */
1488FNIEMOP_DEF(iemOp_seg_DS)
1489{
1490 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1491 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1492 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1493
1494 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1495 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1496}
1497
1498
1499/**
1500 * @opcode 0x3f
1501 * @opfltest af,cf
1502 * @opflmodify cf,pf,af,zf,sf,of
1503 * @opflundef pf,zf,sf,of
1504 * @opgroup og_gen_arith_dec
1505 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1506 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1507 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1508 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1509 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1510 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1511 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1512 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1513 * @optest amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1514 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1515 * @optest amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1516 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1517 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1518 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1519 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1520 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1521 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1522 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1523 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1524 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1525 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
1526 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1527 * @optest amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
1528 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1529 * @optest amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
1530 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1531 * @optest amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
1532 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1533 * @optest amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
1534 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1535 * @optest amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
1536 * @optest intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1537 * @optest amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1538 * @optest intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1539 * @optest amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1540 */
1541FNIEMOP_DEF(iemOp_aas)
1542{
1543 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1544 IEMOP_HLP_NO_64BIT();
1545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1546 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1547
1548 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aas);
1549}
1550
1551
1552/**
1553 * Common 'inc/dec register' helper.
1554 *
1555 * Not for 64-bit code, only for what became the rex prefixes.
1556 */
1557#define IEMOP_BODY_UNARY_GReg(a_fnNormalU16, a_fnNormalU32, a_iReg) \
1558 switch (pVCpu->iem.s.enmEffOpSize) \
1559 { \
1560 case IEMMODE_16BIT: \
1561 IEM_MC_BEGIN(2, 0); \
1562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1563 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
1564 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1565 IEM_MC_REF_GREG_U16(pu16Dst, a_iReg); \
1566 IEM_MC_REF_EFLAGS(pEFlags); \
1567 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
1568 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1569 IEM_MC_END(); \
1570 break; \
1571 \
1572 case IEMMODE_32BIT: \
1573 IEM_MC_BEGIN(2, 0); \
1574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1575 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
1576 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1577 IEM_MC_REF_GREG_U32(pu32Dst, a_iReg); \
1578 IEM_MC_REF_EFLAGS(pEFlags); \
1579 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
1580 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
1581 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1582 IEM_MC_END(); \
1583 break; \
1584 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1585 } \
1586 (void)0
1587
1588/**
1589 * @opcode 0x40
1590 */
1591FNIEMOP_DEF(iemOp_inc_eAX)
1592{
1593 /*
1594 * This is a REX prefix in 64-bit mode.
1595 */
1596 if (IEM_IS_64BIT_CODE(pVCpu))
1597 {
1598 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1599 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1600
1601 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1602 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1603 }
1604
1605 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1606 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xAX);
1607}
1608
1609
1610/**
1611 * @opcode 0x41
1612 */
1613FNIEMOP_DEF(iemOp_inc_eCX)
1614{
1615 /*
1616 * This is a REX prefix in 64-bit mode.
1617 */
1618 if (IEM_IS_64BIT_CODE(pVCpu))
1619 {
1620 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1621 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1622 pVCpu->iem.s.uRexB = 1 << 3;
1623
1624 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1625 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1626 }
1627
1628 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1629 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xCX);
1630}
1631
1632
1633/**
1634 * @opcode 0x42
1635 */
1636FNIEMOP_DEF(iemOp_inc_eDX)
1637{
1638 /*
1639 * This is a REX prefix in 64-bit mode.
1640 */
1641 if (IEM_IS_64BIT_CODE(pVCpu))
1642 {
1643 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1644 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1645 pVCpu->iem.s.uRexIndex = 1 << 3;
1646
1647 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1648 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1649 }
1650
1651 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1652 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDX);
1653}
1654
1655
1656
1657/**
1658 * @opcode 0x43
1659 */
1660FNIEMOP_DEF(iemOp_inc_eBX)
1661{
1662 /*
1663 * This is a REX prefix in 64-bit mode.
1664 */
1665 if (IEM_IS_64BIT_CODE(pVCpu))
1666 {
1667 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1668 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1669 pVCpu->iem.s.uRexB = 1 << 3;
1670 pVCpu->iem.s.uRexIndex = 1 << 3;
1671
1672 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1673 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1674 }
1675
1676 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1677 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBX);
1678}
1679
1680
1681/**
1682 * @opcode 0x44
1683 */
1684FNIEMOP_DEF(iemOp_inc_eSP)
1685{
1686 /*
1687 * This is a REX prefix in 64-bit mode.
1688 */
1689 if (IEM_IS_64BIT_CODE(pVCpu))
1690 {
1691 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1692 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1693 pVCpu->iem.s.uRexReg = 1 << 3;
1694
1695 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1696 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1697 }
1698
1699 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1700 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSP);
1701}
1702
1703
1704/**
1705 * @opcode 0x45
1706 */
1707FNIEMOP_DEF(iemOp_inc_eBP)
1708{
1709 /*
1710 * This is a REX prefix in 64-bit mode.
1711 */
1712 if (IEM_IS_64BIT_CODE(pVCpu))
1713 {
1714 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1715 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1716 pVCpu->iem.s.uRexReg = 1 << 3;
1717 pVCpu->iem.s.uRexB = 1 << 3;
1718
1719 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1720 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1721 }
1722
1723 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1724 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBP);
1725}
1726
1727
1728/**
1729 * @opcode 0x46
1730 */
1731FNIEMOP_DEF(iemOp_inc_eSI)
1732{
1733 /*
1734 * This is a REX prefix in 64-bit mode.
1735 */
1736 if (IEM_IS_64BIT_CODE(pVCpu))
1737 {
1738 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1739 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1740 pVCpu->iem.s.uRexReg = 1 << 3;
1741 pVCpu->iem.s.uRexIndex = 1 << 3;
1742
1743 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1744 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1745 }
1746
1747 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1748 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSI);
1749}
1750
1751
1752/**
1753 * @opcode 0x47
1754 */
1755FNIEMOP_DEF(iemOp_inc_eDI)
1756{
1757 /*
1758 * This is a REX prefix in 64-bit mode.
1759 */
1760 if (IEM_IS_64BIT_CODE(pVCpu))
1761 {
1762 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1763 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1764 pVCpu->iem.s.uRexReg = 1 << 3;
1765 pVCpu->iem.s.uRexB = 1 << 3;
1766 pVCpu->iem.s.uRexIndex = 1 << 3;
1767
1768 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1769 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1770 }
1771
1772 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1773 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDI);
1774}
1775
1776
1777/**
1778 * @opcode 0x48
1779 */
1780FNIEMOP_DEF(iemOp_dec_eAX)
1781{
1782 /*
1783 * This is a REX prefix in 64-bit mode.
1784 */
1785 if (IEM_IS_64BIT_CODE(pVCpu))
1786 {
1787 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
1788 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
1789 iemRecalEffOpSize(pVCpu);
1790
1791 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1792 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1793 }
1794
1795 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
1796 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xAX);
1797}
1798
1799
1800/**
1801 * @opcode 0x49
1802 */
1803FNIEMOP_DEF(iemOp_dec_eCX)
1804{
1805 /*
1806 * This is a REX prefix in 64-bit mode.
1807 */
1808 if (IEM_IS_64BIT_CODE(pVCpu))
1809 {
1810 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
1811 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1812 pVCpu->iem.s.uRexB = 1 << 3;
1813 iemRecalEffOpSize(pVCpu);
1814
1815 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1816 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1817 }
1818
1819 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
1820 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xCX);
1821}
1822
1823
1824/**
1825 * @opcode 0x4a
1826 */
1827FNIEMOP_DEF(iemOp_dec_eDX)
1828{
1829 /*
1830 * This is a REX prefix in 64-bit mode.
1831 */
1832 if (IEM_IS_64BIT_CODE(pVCpu))
1833 {
1834 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
1835 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1836 pVCpu->iem.s.uRexIndex = 1 << 3;
1837 iemRecalEffOpSize(pVCpu);
1838
1839 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1840 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1841 }
1842
1843 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
1844 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDX);
1845}
1846
1847
1848/**
1849 * @opcode 0x4b
1850 */
1851FNIEMOP_DEF(iemOp_dec_eBX)
1852{
1853 /*
1854 * This is a REX prefix in 64-bit mode.
1855 */
1856 if (IEM_IS_64BIT_CODE(pVCpu))
1857 {
1858 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
1859 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1860 pVCpu->iem.s.uRexB = 1 << 3;
1861 pVCpu->iem.s.uRexIndex = 1 << 3;
1862 iemRecalEffOpSize(pVCpu);
1863
1864 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1865 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1866 }
1867
1868 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
1869 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBX);
1870}
1871
1872
1873/**
1874 * @opcode 0x4c
1875 */
1876FNIEMOP_DEF(iemOp_dec_eSP)
1877{
1878 /*
1879 * This is a REX prefix in 64-bit mode.
1880 */
1881 if (IEM_IS_64BIT_CODE(pVCpu))
1882 {
1883 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
1884 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
1885 pVCpu->iem.s.uRexReg = 1 << 3;
1886 iemRecalEffOpSize(pVCpu);
1887
1888 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1889 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1890 }
1891
1892 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
1893 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSP);
1894}
1895
1896
1897/**
1898 * @opcode 0x4d
1899 */
1900FNIEMOP_DEF(iemOp_dec_eBP)
1901{
1902 /*
1903 * This is a REX prefix in 64-bit mode.
1904 */
1905 if (IEM_IS_64BIT_CODE(pVCpu))
1906 {
1907 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
1908 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1909 pVCpu->iem.s.uRexReg = 1 << 3;
1910 pVCpu->iem.s.uRexB = 1 << 3;
1911 iemRecalEffOpSize(pVCpu);
1912
1913 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1914 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1915 }
1916
1917 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
1918 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBP);
1919}
1920
1921
1922/**
1923 * @opcode 0x4e
1924 */
1925FNIEMOP_DEF(iemOp_dec_eSI)
1926{
1927 /*
1928 * This is a REX prefix in 64-bit mode.
1929 */
1930 if (IEM_IS_64BIT_CODE(pVCpu))
1931 {
1932 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
1933 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1934 pVCpu->iem.s.uRexReg = 1 << 3;
1935 pVCpu->iem.s.uRexIndex = 1 << 3;
1936 iemRecalEffOpSize(pVCpu);
1937
1938 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1939 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1940 }
1941
1942 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
1943 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSI);
1944}
1945
1946
1947/**
1948 * @opcode 0x4f
1949 */
1950FNIEMOP_DEF(iemOp_dec_eDI)
1951{
1952 /*
1953 * This is a REX prefix in 64-bit mode.
1954 */
1955 if (IEM_IS_64BIT_CODE(pVCpu))
1956 {
1957 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
1958 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1959 pVCpu->iem.s.uRexReg = 1 << 3;
1960 pVCpu->iem.s.uRexB = 1 << 3;
1961 pVCpu->iem.s.uRexIndex = 1 << 3;
1962 iemRecalEffOpSize(pVCpu);
1963
1964 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1965 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1966 }
1967
1968 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
1969 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDI);
1970}
1971
1972
1973/**
1974 * Common 'push register' helper.
1975 */
1976FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
1977{
1978 if (IEM_IS_64BIT_CODE(pVCpu))
1979 {
1980 iReg |= pVCpu->iem.s.uRexB;
1981 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1982 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1983 }
1984
1985 switch (pVCpu->iem.s.enmEffOpSize)
1986 {
1987 case IEMMODE_16BIT:
1988 IEM_MC_BEGIN(0, 1);
1989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1990 IEM_MC_LOCAL(uint16_t, u16Value);
1991 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
1992 IEM_MC_PUSH_U16(u16Value);
1993 IEM_MC_ADVANCE_RIP_AND_FINISH();
1994 IEM_MC_END();
1995 break;
1996
1997 case IEMMODE_32BIT:
1998 IEM_MC_BEGIN(0, 1);
1999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2000 IEM_MC_LOCAL(uint32_t, u32Value);
2001 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
2002 IEM_MC_PUSH_U32(u32Value);
2003 IEM_MC_ADVANCE_RIP_AND_FINISH();
2004 IEM_MC_END();
2005 break;
2006
2007 case IEMMODE_64BIT:
2008 IEM_MC_BEGIN(0, 1);
2009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2010 IEM_MC_LOCAL(uint64_t, u64Value);
2011 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
2012 IEM_MC_PUSH_U64(u64Value);
2013 IEM_MC_ADVANCE_RIP_AND_FINISH();
2014 IEM_MC_END();
2015 break;
2016
2017 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2018 }
2019}
2020
2021
2022/**
2023 * @opcode 0x50
2024 */
2025FNIEMOP_DEF(iemOp_push_eAX)
2026{
2027 IEMOP_MNEMONIC(push_rAX, "push rAX");
2028 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
2029}
2030
2031
2032/**
2033 * @opcode 0x51
2034 */
2035FNIEMOP_DEF(iemOp_push_eCX)
2036{
2037 IEMOP_MNEMONIC(push_rCX, "push rCX");
2038 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
2039}
2040
2041
2042/**
2043 * @opcode 0x52
2044 */
2045FNIEMOP_DEF(iemOp_push_eDX)
2046{
2047 IEMOP_MNEMONIC(push_rDX, "push rDX");
2048 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
2049}
2050
2051
2052/**
2053 * @opcode 0x53
2054 */
2055FNIEMOP_DEF(iemOp_push_eBX)
2056{
2057 IEMOP_MNEMONIC(push_rBX, "push rBX");
2058 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
2059}
2060
2061
2062/**
2063 * @opcode 0x54
2064 */
2065FNIEMOP_DEF(iemOp_push_eSP)
2066{
2067 IEMOP_MNEMONIC(push_rSP, "push rSP");
2068 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
2069 {
2070 IEM_MC_BEGIN(0, 1);
2071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2072 IEM_MC_LOCAL(uint16_t, u16Value);
2073 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
2074 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
2075 IEM_MC_PUSH_U16(u16Value);
2076 IEM_MC_ADVANCE_RIP_AND_FINISH();
2077 IEM_MC_END();
2078 }
2079 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
2080}
2081
2082
2083/**
2084 * @opcode 0x55
2085 */
2086FNIEMOP_DEF(iemOp_push_eBP)
2087{
2088 IEMOP_MNEMONIC(push_rBP, "push rBP");
2089 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
2090}
2091
2092
2093/**
2094 * @opcode 0x56
2095 */
2096FNIEMOP_DEF(iemOp_push_eSI)
2097{
2098 IEMOP_MNEMONIC(push_rSI, "push rSI");
2099 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
2100}
2101
2102
2103/**
2104 * @opcode 0x57
2105 */
2106FNIEMOP_DEF(iemOp_push_eDI)
2107{
2108 IEMOP_MNEMONIC(push_rDI, "push rDI");
2109 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
2110}
2111
2112
2113/**
2114 * Common 'pop register' helper.
2115 */
2116FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
2117{
2118 if (IEM_IS_64BIT_CODE(pVCpu))
2119 {
2120 iReg |= pVCpu->iem.s.uRexB;
2121 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2122 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2123 }
2124
2125 switch (pVCpu->iem.s.enmEffOpSize)
2126 {
2127 case IEMMODE_16BIT:
2128 IEM_MC_BEGIN(0, 1);
2129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2130 IEM_MC_LOCAL(uint16_t *, pu16Dst);
2131 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
2132 IEM_MC_POP_U16(pu16Dst);
2133 IEM_MC_ADVANCE_RIP_AND_FINISH();
2134 IEM_MC_END();
2135 break;
2136
2137 case IEMMODE_32BIT:
2138 IEM_MC_BEGIN(0, 1);
2139 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2140 IEM_MC_LOCAL(uint32_t *, pu32Dst);
2141 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
2142 IEM_MC_POP_U32(pu32Dst);
2143 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
2144 IEM_MC_ADVANCE_RIP_AND_FINISH();
2145 IEM_MC_END();
2146 break;
2147
2148 case IEMMODE_64BIT:
2149 IEM_MC_BEGIN(0, 1);
2150 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2151 IEM_MC_LOCAL(uint64_t *, pu64Dst);
2152 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
2153 IEM_MC_POP_U64(pu64Dst);
2154 IEM_MC_ADVANCE_RIP_AND_FINISH();
2155 IEM_MC_END();
2156 break;
2157
2158 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2159 }
2160}
2161
2162
2163/**
2164 * @opcode 0x58
2165 */
2166FNIEMOP_DEF(iemOp_pop_eAX)
2167{
2168 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
2169 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
2170}
2171
2172
2173/**
2174 * @opcode 0x59
2175 */
2176FNIEMOP_DEF(iemOp_pop_eCX)
2177{
2178 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
2179 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
2180}
2181
2182
2183/**
2184 * @opcode 0x5a
2185 */
2186FNIEMOP_DEF(iemOp_pop_eDX)
2187{
2188 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
2189 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
2190}
2191
2192
2193/**
2194 * @opcode 0x5b
2195 */
2196FNIEMOP_DEF(iemOp_pop_eBX)
2197{
2198 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
2199 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
2200}
2201
2202
2203/**
2204 * @opcode 0x5c
2205 */
2206FNIEMOP_DEF(iemOp_pop_eSP)
2207{
2208 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
2209 if (IEM_IS_64BIT_CODE(pVCpu))
2210 {
2211 if (pVCpu->iem.s.uRexB)
2212 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
2213 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2214 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2215 }
2216
2217 /** @todo add testcase for this instruction. */
2218 switch (pVCpu->iem.s.enmEffOpSize)
2219 {
2220 case IEMMODE_16BIT:
2221 IEM_MC_BEGIN(0, 1);
2222 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2223 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2224 IEM_MC_LOCAL(uint16_t, u16Dst);
2225 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
2226 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
2227 IEM_MC_ADVANCE_RIP_AND_FINISH();
2228 IEM_MC_END();
2229 break;
2230
2231 case IEMMODE_32BIT:
2232 IEM_MC_BEGIN(0, 1);
2233 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2234 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2235 IEM_MC_LOCAL(uint32_t, u32Dst);
2236 IEM_MC_POP_U32(&u32Dst);
2237 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
2238 IEM_MC_ADVANCE_RIP_AND_FINISH();
2239 IEM_MC_END();
2240 break;
2241
2242 case IEMMODE_64BIT:
2243 IEM_MC_BEGIN(0, 1);
2244 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2245 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2246 IEM_MC_LOCAL(uint64_t, u64Dst);
2247 IEM_MC_POP_U64(&u64Dst);
2248 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
2249 IEM_MC_ADVANCE_RIP_AND_FINISH();
2250 IEM_MC_END();
2251 break;
2252
2253 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2254 }
2255}
2256
2257
2258/**
2259 * @opcode 0x5d
2260 */
2261FNIEMOP_DEF(iemOp_pop_eBP)
2262{
2263 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
2264 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
2265}
2266
2267
2268/**
2269 * @opcode 0x5e
2270 */
2271FNIEMOP_DEF(iemOp_pop_eSI)
2272{
2273 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
2274 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
2275}
2276
2277
2278/**
2279 * @opcode 0x5f
2280 */
2281FNIEMOP_DEF(iemOp_pop_eDI)
2282{
2283 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
2284 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
2285}
2286
2287
2288/**
2289 * @opcode 0x60
2290 */
2291FNIEMOP_DEF(iemOp_pusha)
2292{
2293 IEMOP_MNEMONIC(pusha, "pusha");
2294 IEMOP_HLP_MIN_186();
2295 IEMOP_HLP_NO_64BIT();
2296 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2297 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_pusha_16);
2298 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2299 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_pusha_32);
2300}
2301
2302
2303/**
2304 * @opcode 0x61
2305 */
2306FNIEMOP_DEF(iemOp_popa__mvex)
2307{
2308 if (!IEM_IS_64BIT_CODE(pVCpu))
2309 {
2310 IEMOP_MNEMONIC(popa, "popa");
2311 IEMOP_HLP_MIN_186();
2312 IEMOP_HLP_NO_64BIT();
2313 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2314 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_popa_16);
2315 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2316 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_popa_32);
2317 }
2318 IEMOP_MNEMONIC(mvex, "mvex");
2319 Log(("mvex prefix is not supported!\n"));
2320 IEMOP_RAISE_INVALID_OPCODE_RET();
2321}
2322
2323
2324/**
2325 * @opcode 0x62
2326 * @opmnemonic bound
2327 * @op1 Gv_RO
2328 * @op2 Ma
2329 * @opmincpu 80186
2330 * @ophints harmless x86_invalid_64
2331 * @optest op1=0 op2=0 ->
2332 * @optest op1=1 op2=0 -> value.xcpt=5
2333 * @optest o16 / op1=0xffff op2=0x0000fffe ->
2334 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
2335 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
2336 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
2337 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
2338 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
2339 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
2340 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
2341 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
2342 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
2343 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
2344 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
2345 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
2346 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
2347 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
2348 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
2349 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
2350 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
2351 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
2352 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
2353 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
2354 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
2355 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
2356 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
2357 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
2358 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
2359 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
2360 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
2361 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
2362 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
2363 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
2364 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
2365 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
2366 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
2367 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
2368 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
2369 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
2370 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
2371 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
2372 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
2373 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
2374 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
2375 */
2376FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
2377{
2378 /* The BOUND instruction is invalid 64-bit mode. In legacy and
2379 compatability mode it is invalid with MOD=3.
2380
2381 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
2382 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
2383 given as R and X without an exact description, so we assume it builds on
2384 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
2385 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
2386 uint8_t bRm;
2387 if (!IEM_IS_64BIT_CODE(pVCpu))
2388 {
2389 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2390 IEMOP_HLP_MIN_186();
2391 IEM_OPCODE_GET_NEXT_U8(&bRm);
2392 if (IEM_IS_MODRM_MEM_MODE(bRm))
2393 {
2394 /** @todo testcase: check that there are two memory accesses involved. Check
2395 * whether they're both read before the \#BR triggers. */
2396 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2397 {
2398 IEM_MC_BEGIN(3, 1);
2399 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2400 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
2401 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
2402 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2403
2404 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2406
2407 IEM_MC_FETCH_GREG_U16(u16Index, IEM_GET_MODRM_REG_8(bRm));
2408 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2409 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
2410
2411 IEM_MC_CALL_CIMPL_3(0, iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
2412 IEM_MC_END();
2413 }
2414 else /* 32-bit operands */
2415 {
2416 IEM_MC_BEGIN(3, 1);
2417 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2418 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
2419 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
2420 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2421
2422 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2424
2425 IEM_MC_FETCH_GREG_U32(u32Index, IEM_GET_MODRM_REG_8(bRm));
2426 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2427 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
2428
2429 IEM_MC_CALL_CIMPL_3(0, iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
2430 IEM_MC_END();
2431 }
2432 }
2433
2434 /*
2435 * @opdone
2436 */
2437 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2438 {
2439 /* Note that there is no need for the CPU to fetch further bytes
2440 here because MODRM.MOD == 3. */
2441 Log(("evex not supported by the guest CPU!\n"));
2442 IEMOP_RAISE_INVALID_OPCODE_RET();
2443 }
2444 }
2445 else
2446 {
2447 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
2448 * does modr/m read, whereas AMD probably doesn't... */
2449 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2450 {
2451 Log(("evex not supported by the guest CPU!\n"));
2452 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
2453 }
2454 IEM_OPCODE_GET_NEXT_U8(&bRm);
2455 }
2456
2457 IEMOP_MNEMONIC(evex, "evex");
2458 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
2459 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
2460 Log(("evex prefix is not implemented!\n"));
2461 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2462}
2463
2464
2465/** Opcode 0x63 - non-64-bit modes. */
2466FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
2467{
2468 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
2469 IEMOP_HLP_MIN_286();
2470 IEMOP_HLP_NO_REAL_OR_V86_MODE();
2471 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2472
2473 if (IEM_IS_MODRM_REG_MODE(bRm))
2474 {
2475 /* Register */
2476 IEM_MC_BEGIN(3, 0);
2477 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2478 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2479 IEM_MC_ARG(uint16_t, u16Src, 1);
2480 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2481
2482 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2483 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM_8(bRm));
2484 IEM_MC_REF_EFLAGS(pEFlags);
2485 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2486
2487 IEM_MC_ADVANCE_RIP_AND_FINISH();
2488 IEM_MC_END();
2489 }
2490 else
2491 {
2492 /* Memory */
2493 IEM_MC_BEGIN(3, 2);
2494 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2495 IEM_MC_ARG(uint16_t, u16Src, 1);
2496 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2497 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2498
2499 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2500 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2501 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2502 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2503 IEM_MC_FETCH_EFLAGS(EFlags);
2504 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2505
2506 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
2507 IEM_MC_COMMIT_EFLAGS(EFlags);
2508 IEM_MC_ADVANCE_RIP_AND_FINISH();
2509 IEM_MC_END();
2510 }
2511}
2512
2513
2514/**
2515 * @opcode 0x63
2516 *
2517 * @note This is a weird one. It works like a regular move instruction if
2518 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2519 * @todo This definitely needs a testcase to verify the odd cases. */
2520FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2521{
2522 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2523
2524 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2525 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2526
2527 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2528 {
2529 if (IEM_IS_MODRM_REG_MODE(bRm))
2530 {
2531 /*
2532 * Register to register.
2533 */
2534 IEM_MC_BEGIN(0, 1);
2535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2536 IEM_MC_LOCAL(uint64_t, u64Value);
2537 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
2538 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2539 IEM_MC_ADVANCE_RIP_AND_FINISH();
2540 IEM_MC_END();
2541 }
2542 else
2543 {
2544 /*
2545 * We're loading a register from memory.
2546 */
2547 IEM_MC_BEGIN(0, 2);
2548 IEM_MC_LOCAL(uint64_t, u64Value);
2549 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2550 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2552 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2553 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2554 IEM_MC_ADVANCE_RIP_AND_FINISH();
2555 IEM_MC_END();
2556 }
2557 }
2558 else
2559 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
2560}
2561
2562
2563/**
2564 * @opcode 0x64
2565 * @opmnemonic segfs
2566 * @opmincpu 80386
2567 * @opgroup og_prefixes
2568 */
2569FNIEMOP_DEF(iemOp_seg_FS)
2570{
2571 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2572 IEMOP_HLP_MIN_386();
2573
2574 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2575 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2576
2577 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2578 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2579}
2580
2581
2582/**
2583 * @opcode 0x65
2584 * @opmnemonic seggs
2585 * @opmincpu 80386
2586 * @opgroup og_prefixes
2587 */
2588FNIEMOP_DEF(iemOp_seg_GS)
2589{
2590 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2591 IEMOP_HLP_MIN_386();
2592
2593 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2594 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2595
2596 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2597 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2598}
2599
2600
2601/**
2602 * @opcode 0x66
2603 * @opmnemonic opsize
2604 * @openc prefix
2605 * @opmincpu 80386
2606 * @ophints harmless
2607 * @opgroup og_prefixes
2608 */
2609FNIEMOP_DEF(iemOp_op_size)
2610{
2611 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2612 IEMOP_HLP_MIN_386();
2613
2614 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2615 iemRecalEffOpSize(pVCpu);
2616
2617 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2618 when REPZ or REPNZ are present. */
2619 if (pVCpu->iem.s.idxPrefix == 0)
2620 pVCpu->iem.s.idxPrefix = 1;
2621
2622 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2623 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2624}
2625
2626
2627/**
2628 * @opcode 0x67
2629 * @opmnemonic addrsize
2630 * @openc prefix
2631 * @opmincpu 80386
2632 * @ophints harmless
2633 * @opgroup og_prefixes
2634 */
2635FNIEMOP_DEF(iemOp_addr_size)
2636{
2637 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2638 IEMOP_HLP_MIN_386();
2639
2640 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2641 switch (pVCpu->iem.s.enmDefAddrMode)
2642 {
2643 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2644 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2645 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2646 default: AssertFailed();
2647 }
2648
2649 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2650 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2651}
2652
2653
2654/**
2655 * @opcode 0x68
2656 */
2657FNIEMOP_DEF(iemOp_push_Iz)
2658{
2659 IEMOP_MNEMONIC(push_Iz, "push Iz");
2660 IEMOP_HLP_MIN_186();
2661 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2662 switch (pVCpu->iem.s.enmEffOpSize)
2663 {
2664 case IEMMODE_16BIT:
2665 {
2666 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2667 IEM_MC_BEGIN(0,0);
2668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2669 IEM_MC_PUSH_U16(u16Imm);
2670 IEM_MC_ADVANCE_RIP_AND_FINISH();
2671 IEM_MC_END();
2672 break;
2673 }
2674
2675 case IEMMODE_32BIT:
2676 {
2677 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2678 IEM_MC_BEGIN(0,0);
2679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2680 IEM_MC_PUSH_U32(u32Imm);
2681 IEM_MC_ADVANCE_RIP_AND_FINISH();
2682 IEM_MC_END();
2683 break;
2684 }
2685
2686 case IEMMODE_64BIT:
2687 {
2688 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2689 IEM_MC_BEGIN(0,0);
2690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2691 IEM_MC_PUSH_U64(u64Imm);
2692 IEM_MC_ADVANCE_RIP_AND_FINISH();
2693 IEM_MC_END();
2694 break;
2695 }
2696
2697 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2698 }
2699}
2700
2701
2702/**
2703 * @opcode 0x69
2704 */
2705FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
2706{
2707 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
2708 IEMOP_HLP_MIN_186();
2709 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2710 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2711
2712 switch (pVCpu->iem.s.enmEffOpSize)
2713 {
2714 case IEMMODE_16BIT:
2715 {
2716 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
2717 if (IEM_IS_MODRM_REG_MODE(bRm))
2718 {
2719 /* register operand */
2720 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2721 IEM_MC_BEGIN(3, 1);
2722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2723 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2724 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
2725 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2726 IEM_MC_LOCAL(uint16_t, u16Tmp);
2727
2728 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2729 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2730 IEM_MC_REF_EFLAGS(pEFlags);
2731 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2732 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2733
2734 IEM_MC_ADVANCE_RIP_AND_FINISH();
2735 IEM_MC_END();
2736 }
2737 else
2738 {
2739 /* memory operand */
2740 IEM_MC_BEGIN(3, 2);
2741 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2742 IEM_MC_ARG(uint16_t, u16Src, 1);
2743 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2744 IEM_MC_LOCAL(uint16_t, u16Tmp);
2745 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2746
2747 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2748 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2749 IEM_MC_ASSIGN(u16Src, u16Imm);
2750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2751 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2752 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2753 IEM_MC_REF_EFLAGS(pEFlags);
2754 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2755 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2756
2757 IEM_MC_ADVANCE_RIP_AND_FINISH();
2758 IEM_MC_END();
2759 }
2760 break;
2761 }
2762
2763 case IEMMODE_32BIT:
2764 {
2765 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
2766 if (IEM_IS_MODRM_REG_MODE(bRm))
2767 {
2768 /* register operand */
2769 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2770 IEM_MC_BEGIN(3, 1);
2771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2772 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2773 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
2774 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2775 IEM_MC_LOCAL(uint32_t, u32Tmp);
2776
2777 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2778 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2779 IEM_MC_REF_EFLAGS(pEFlags);
2780 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2781 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2782
2783 IEM_MC_ADVANCE_RIP_AND_FINISH();
2784 IEM_MC_END();
2785 }
2786 else
2787 {
2788 /* memory operand */
2789 IEM_MC_BEGIN(3, 2);
2790 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2791 IEM_MC_ARG(uint32_t, u32Src, 1);
2792 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2793 IEM_MC_LOCAL(uint32_t, u32Tmp);
2794 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2795
2796 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2797 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2798 IEM_MC_ASSIGN(u32Src, u32Imm);
2799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2800 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2801 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2802 IEM_MC_REF_EFLAGS(pEFlags);
2803 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2804 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2805
2806 IEM_MC_ADVANCE_RIP_AND_FINISH();
2807 IEM_MC_END();
2808 }
2809 break;
2810 }
2811
2812 case IEMMODE_64BIT:
2813 {
2814 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
2815 if (IEM_IS_MODRM_REG_MODE(bRm))
2816 {
2817 /* register operand */
2818 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2819 IEM_MC_BEGIN(3, 1);
2820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2821 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2822 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
2823 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2824 IEM_MC_LOCAL(uint64_t, u64Tmp);
2825
2826 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2827 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2828 IEM_MC_REF_EFLAGS(pEFlags);
2829 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
2830 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2831
2832 IEM_MC_ADVANCE_RIP_AND_FINISH();
2833 IEM_MC_END();
2834 }
2835 else
2836 {
2837 /* memory operand */
2838 IEM_MC_BEGIN(3, 2);
2839 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2840 IEM_MC_ARG(uint64_t, u64Src, 1);
2841 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2842 IEM_MC_LOCAL(uint64_t, u64Tmp);
2843 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2844
2845 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2846 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); /* Not using IEM_OPCODE_GET_NEXT_S32_SX_U64 to reduce the */
2847 IEM_MC_ASSIGN_U32_SX_U64(u64Src, u32Imm); /* parameter count for the threaded function for this block. */
2848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2849 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2850 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2851 IEM_MC_REF_EFLAGS(pEFlags);
2852 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
2853 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2854
2855 IEM_MC_ADVANCE_RIP_AND_FINISH();
2856 IEM_MC_END();
2857 }
2858 break;
2859 }
2860
2861 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2862 }
2863}
2864
2865
2866/**
2867 * @opcode 0x6a
2868 */
2869FNIEMOP_DEF(iemOp_push_Ib)
2870{
2871 IEMOP_MNEMONIC(push_Ib, "push Ib");
2872 IEMOP_HLP_MIN_186();
2873 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2874 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2875
2876 switch (pVCpu->iem.s.enmEffOpSize)
2877 {
2878 case IEMMODE_16BIT:
2879 IEM_MC_BEGIN(0,0);
2880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2881 IEM_MC_PUSH_U16(i8Imm);
2882 IEM_MC_ADVANCE_RIP_AND_FINISH();
2883 IEM_MC_END();
2884 break;
2885 case IEMMODE_32BIT:
2886 IEM_MC_BEGIN(0,0);
2887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2888 IEM_MC_PUSH_U32(i8Imm);
2889 IEM_MC_ADVANCE_RIP_AND_FINISH();
2890 IEM_MC_END();
2891 break;
2892 case IEMMODE_64BIT:
2893 IEM_MC_BEGIN(0,0);
2894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2895 IEM_MC_PUSH_U64(i8Imm);
2896 IEM_MC_ADVANCE_RIP_AND_FINISH();
2897 IEM_MC_END();
2898 break;
2899 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2900 }
2901}
2902
2903
2904/**
2905 * @opcode 0x6b
2906 */
2907FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
2908{
2909 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
2910 IEMOP_HLP_MIN_186();
2911 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2912 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2913
2914 switch (pVCpu->iem.s.enmEffOpSize)
2915 {
2916 case IEMMODE_16BIT:
2917 {
2918 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
2919 if (IEM_IS_MODRM_REG_MODE(bRm))
2920 {
2921 /* register operand */
2922 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2923 IEM_MC_BEGIN(3, 1);
2924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2925 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2926 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
2927 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2928 IEM_MC_LOCAL(uint16_t, u16Tmp);
2929
2930 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2931 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2932 IEM_MC_REF_EFLAGS(pEFlags);
2933 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2934 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2935
2936 IEM_MC_ADVANCE_RIP_AND_FINISH();
2937 IEM_MC_END();
2938 }
2939 else
2940 {
2941 /* memory operand */
2942 IEM_MC_BEGIN(3, 2);
2943 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2944 IEM_MC_ARG(uint16_t, u16Src, 1);
2945 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2946 IEM_MC_LOCAL(uint16_t, u16Tmp);
2947 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2948
2949 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2950 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
2951 IEM_MC_ASSIGN(u16Src, u16Imm);
2952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2953 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2954 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2955 IEM_MC_REF_EFLAGS(pEFlags);
2956 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2957 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2958
2959 IEM_MC_ADVANCE_RIP_AND_FINISH();
2960 IEM_MC_END();
2961 }
2962 break;
2963 }
2964
2965 case IEMMODE_32BIT:
2966 {
2967 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
2968 if (IEM_IS_MODRM_REG_MODE(bRm))
2969 {
2970 /* register operand */
2971 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2972 IEM_MC_BEGIN(3, 1);
2973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2974 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2975 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
2976 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2977 IEM_MC_LOCAL(uint32_t, u32Tmp);
2978
2979 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2980 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2981 IEM_MC_REF_EFLAGS(pEFlags);
2982 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2983 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2984
2985 IEM_MC_ADVANCE_RIP_AND_FINISH();
2986 IEM_MC_END();
2987 }
2988 else
2989 {
2990 /* memory operand */
2991 IEM_MC_BEGIN(3, 2);
2992 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2993 IEM_MC_ARG(uint32_t, u32Src, 1);
2994 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2995 IEM_MC_LOCAL(uint32_t, u32Tmp);
2996 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2997
2998 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2999 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
3000 IEM_MC_ASSIGN(u32Src, u32Imm);
3001 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3002 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3003 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
3004 IEM_MC_REF_EFLAGS(pEFlags);
3005 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3006 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3007
3008 IEM_MC_ADVANCE_RIP_AND_FINISH();
3009 IEM_MC_END();
3010 }
3011 break;
3012 }
3013
3014 case IEMMODE_64BIT:
3015 {
3016 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3017 if (IEM_IS_MODRM_REG_MODE(bRm))
3018 {
3019 /* register operand */
3020 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3021 IEM_MC_BEGIN(3, 1);
3022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3023 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3024 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
3025 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3026 IEM_MC_LOCAL(uint64_t, u64Tmp);
3027
3028 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3029 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3030 IEM_MC_REF_EFLAGS(pEFlags);
3031 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3032 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3033
3034 IEM_MC_ADVANCE_RIP_AND_FINISH();
3035 IEM_MC_END();
3036 }
3037 else
3038 {
3039 /* memory operand */
3040 IEM_MC_BEGIN(3, 2);
3041 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3042 IEM_MC_ARG(uint64_t, u64Src, 1);
3043 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3044 IEM_MC_LOCAL(uint64_t, u64Tmp);
3045 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3046
3047 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3048 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); /* Not using IEM_OPCODE_GET_NEXT_S8_SX_U64 to reduce the */
3049 IEM_MC_ASSIGN_U8_SX_U64(u64Src, u8Imm); /* parameter count for the threaded function for this block. */
3050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3051 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3052 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3053 IEM_MC_REF_EFLAGS(pEFlags);
3054 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3055 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3056
3057 IEM_MC_ADVANCE_RIP_AND_FINISH();
3058 IEM_MC_END();
3059 }
3060 break;
3061 }
3062
3063 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3064 }
3065}
3066
3067
3068/**
3069 * @opcode 0x6c
3070 */
3071FNIEMOP_DEF(iemOp_insb_Yb_DX)
3072{
3073 IEMOP_HLP_MIN_186();
3074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3075 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3076 {
3077 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
3078 switch (pVCpu->iem.s.enmEffAddrMode)
3079 {
3080 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_ins_op8_addr16, false);
3081 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_ins_op8_addr32, false);
3082 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_ins_op8_addr64, false);
3083 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3084 }
3085 }
3086 else
3087 {
3088 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
3089 switch (pVCpu->iem.s.enmEffAddrMode)
3090 {
3091 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_ins_op8_addr16, false);
3092 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_ins_op8_addr32, false);
3093 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_ins_op8_addr64, false);
3094 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3095 }
3096 }
3097}
3098
3099
3100/**
3101 * @opcode 0x6d
3102 */
3103FNIEMOP_DEF(iemOp_inswd_Yv_DX)
3104{
3105 IEMOP_HLP_MIN_186();
3106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3107 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3108 {
3109 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
3110 switch (pVCpu->iem.s.enmEffOpSize)
3111 {
3112 case IEMMODE_16BIT:
3113 switch (pVCpu->iem.s.enmEffAddrMode)
3114 {
3115 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_ins_op16_addr16, false);
3116 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_ins_op16_addr32, false);
3117 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_ins_op16_addr64, false);
3118 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3119 }
3120 break;
3121 case IEMMODE_64BIT:
3122 case IEMMODE_32BIT:
3123 switch (pVCpu->iem.s.enmEffAddrMode)
3124 {
3125 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_ins_op32_addr16, false);
3126 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_ins_op32_addr32, false);
3127 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_ins_op32_addr64, false);
3128 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3129 }
3130 break;
3131 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3132 }
3133 }
3134 else
3135 {
3136 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
3137 switch (pVCpu->iem.s.enmEffOpSize)
3138 {
3139 case IEMMODE_16BIT:
3140 switch (pVCpu->iem.s.enmEffAddrMode)
3141 {
3142 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_ins_op16_addr16, false);
3143 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_ins_op16_addr32, false);
3144 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_ins_op16_addr64, false);
3145 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3146 }
3147 break;
3148 case IEMMODE_64BIT:
3149 case IEMMODE_32BIT:
3150 switch (pVCpu->iem.s.enmEffAddrMode)
3151 {
3152 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_ins_op32_addr16, false);
3153 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_ins_op32_addr32, false);
3154 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_ins_op32_addr64, false);
3155 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3156 }
3157 break;
3158 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3159 }
3160 }
3161}
3162
3163
3164/**
3165 * @opcode 0x6e
3166 */
3167FNIEMOP_DEF(iemOp_outsb_Yb_DX)
3168{
3169 IEMOP_HLP_MIN_186();
3170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3171 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3172 {
3173 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
3174 switch (pVCpu->iem.s.enmEffAddrMode)
3175 {
3176 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3177 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3178 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3179 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3180 }
3181 }
3182 else
3183 {
3184 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
3185 switch (pVCpu->iem.s.enmEffAddrMode)
3186 {
3187 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3188 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3189 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3190 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3191 }
3192 }
3193}
3194
3195
3196/**
3197 * @opcode 0x6f
3198 */
3199FNIEMOP_DEF(iemOp_outswd_Yv_DX)
3200{
3201 IEMOP_HLP_MIN_186();
3202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3203 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3204 {
3205 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
3206 switch (pVCpu->iem.s.enmEffOpSize)
3207 {
3208 case IEMMODE_16BIT:
3209 switch (pVCpu->iem.s.enmEffAddrMode)
3210 {
3211 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3212 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3213 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3214 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3215 }
3216 break;
3217 case IEMMODE_64BIT:
3218 case IEMMODE_32BIT:
3219 switch (pVCpu->iem.s.enmEffAddrMode)
3220 {
3221 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3222 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3223 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3224 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3225 }
3226 break;
3227 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3228 }
3229 }
3230 else
3231 {
3232 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
3233 switch (pVCpu->iem.s.enmEffOpSize)
3234 {
3235 case IEMMODE_16BIT:
3236 switch (pVCpu->iem.s.enmEffAddrMode)
3237 {
3238 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3239 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3240 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3241 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3242 }
3243 break;
3244 case IEMMODE_64BIT:
3245 case IEMMODE_32BIT:
3246 switch (pVCpu->iem.s.enmEffAddrMode)
3247 {
3248 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3249 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3250 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3251 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3252 }
3253 break;
3254 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3255 }
3256 }
3257}
3258
3259
3260/**
3261 * @opcode 0x70
3262 */
3263FNIEMOP_DEF(iemOp_jo_Jb)
3264{
3265 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
3266 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3267 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3268
3269 IEM_MC_BEGIN(0, 0);
3270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3271 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3272 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3273 } IEM_MC_ELSE() {
3274 IEM_MC_ADVANCE_RIP_AND_FINISH();
3275 } IEM_MC_ENDIF();
3276 IEM_MC_END();
3277}
3278
3279
3280/**
3281 * @opcode 0x71
3282 */
3283FNIEMOP_DEF(iemOp_jno_Jb)
3284{
3285 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
3286 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3287 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3288
3289 IEM_MC_BEGIN(0, 0);
3290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3291 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3292 IEM_MC_ADVANCE_RIP_AND_FINISH();
3293 } IEM_MC_ELSE() {
3294 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3295 } IEM_MC_ENDIF();
3296 IEM_MC_END();
3297}
3298
3299/**
3300 * @opcode 0x72
3301 */
3302FNIEMOP_DEF(iemOp_jc_Jb)
3303{
3304 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
3305 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3306 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3307
3308 IEM_MC_BEGIN(0, 0);
3309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3310 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3311 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3312 } IEM_MC_ELSE() {
3313 IEM_MC_ADVANCE_RIP_AND_FINISH();
3314 } IEM_MC_ENDIF();
3315 IEM_MC_END();
3316}
3317
3318
3319/**
3320 * @opcode 0x73
3321 */
3322FNIEMOP_DEF(iemOp_jnc_Jb)
3323{
3324 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
3325 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3326 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3327
3328 IEM_MC_BEGIN(0, 0);
3329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3330 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3331 IEM_MC_ADVANCE_RIP_AND_FINISH();
3332 } IEM_MC_ELSE() {
3333 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3334 } IEM_MC_ENDIF();
3335 IEM_MC_END();
3336}
3337
3338
3339/**
3340 * @opcode 0x74
3341 */
3342FNIEMOP_DEF(iemOp_je_Jb)
3343{
3344 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
3345 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3346 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3347
3348 IEM_MC_BEGIN(0, 0);
3349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3350 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3351 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3352 } IEM_MC_ELSE() {
3353 IEM_MC_ADVANCE_RIP_AND_FINISH();
3354 } IEM_MC_ENDIF();
3355 IEM_MC_END();
3356}
3357
3358
3359/**
3360 * @opcode 0x75
3361 */
3362FNIEMOP_DEF(iemOp_jne_Jb)
3363{
3364 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
3365 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3366 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3367
3368 IEM_MC_BEGIN(0, 0);
3369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3370 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3371 IEM_MC_ADVANCE_RIP_AND_FINISH();
3372 } IEM_MC_ELSE() {
3373 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3374 } IEM_MC_ENDIF();
3375 IEM_MC_END();
3376}
3377
3378
3379/**
3380 * @opcode 0x76
3381 */
3382FNIEMOP_DEF(iemOp_jbe_Jb)
3383{
3384 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
3385 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3386 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3387
3388 IEM_MC_BEGIN(0, 0);
3389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3390 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3391 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3392 } IEM_MC_ELSE() {
3393 IEM_MC_ADVANCE_RIP_AND_FINISH();
3394 } IEM_MC_ENDIF();
3395 IEM_MC_END();
3396}
3397
3398
3399/**
3400 * @opcode 0x77
3401 */
3402FNIEMOP_DEF(iemOp_jnbe_Jb)
3403{
3404 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
3405 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3406 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3407
3408 IEM_MC_BEGIN(0, 0);
3409 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3410 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3411 IEM_MC_ADVANCE_RIP_AND_FINISH();
3412 } IEM_MC_ELSE() {
3413 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3414 } IEM_MC_ENDIF();
3415 IEM_MC_END();
3416}
3417
3418
3419/**
3420 * @opcode 0x78
3421 */
3422FNIEMOP_DEF(iemOp_js_Jb)
3423{
3424 IEMOP_MNEMONIC(js_Jb, "js Jb");
3425 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3426 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3427
3428 IEM_MC_BEGIN(0, 0);
3429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3430 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3431 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3432 } IEM_MC_ELSE() {
3433 IEM_MC_ADVANCE_RIP_AND_FINISH();
3434 } IEM_MC_ENDIF();
3435 IEM_MC_END();
3436}
3437
3438
3439/**
3440 * @opcode 0x79
3441 */
3442FNIEMOP_DEF(iemOp_jns_Jb)
3443{
3444 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
3445 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3446 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3447
3448 IEM_MC_BEGIN(0, 0);
3449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3450 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3451 IEM_MC_ADVANCE_RIP_AND_FINISH();
3452 } IEM_MC_ELSE() {
3453 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3454 } IEM_MC_ENDIF();
3455 IEM_MC_END();
3456}
3457
3458
3459/**
3460 * @opcode 0x7a
3461 */
3462FNIEMOP_DEF(iemOp_jp_Jb)
3463{
3464 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
3465 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3466 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3467
3468 IEM_MC_BEGIN(0, 0);
3469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3470 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3471 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3472 } IEM_MC_ELSE() {
3473 IEM_MC_ADVANCE_RIP_AND_FINISH();
3474 } IEM_MC_ENDIF();
3475 IEM_MC_END();
3476}
3477
3478
3479/**
3480 * @opcode 0x7b
3481 */
3482FNIEMOP_DEF(iemOp_jnp_Jb)
3483{
3484 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
3485 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3486 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3487
3488 IEM_MC_BEGIN(0, 0);
3489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3490 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3491 IEM_MC_ADVANCE_RIP_AND_FINISH();
3492 } IEM_MC_ELSE() {
3493 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3494 } IEM_MC_ENDIF();
3495 IEM_MC_END();
3496}
3497
3498
3499/**
3500 * @opcode 0x7c
3501 */
3502FNIEMOP_DEF(iemOp_jl_Jb)
3503{
3504 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
3505 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3506 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3507
3508 IEM_MC_BEGIN(0, 0);
3509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3510 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3511 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3512 } IEM_MC_ELSE() {
3513 IEM_MC_ADVANCE_RIP_AND_FINISH();
3514 } IEM_MC_ENDIF();
3515 IEM_MC_END();
3516}
3517
3518
3519/**
3520 * @opcode 0x7d
3521 */
3522FNIEMOP_DEF(iemOp_jnl_Jb)
3523{
3524 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
3525 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3526 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3527
3528 IEM_MC_BEGIN(0, 0);
3529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3530 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3531 IEM_MC_ADVANCE_RIP_AND_FINISH();
3532 } IEM_MC_ELSE() {
3533 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3534 } IEM_MC_ENDIF();
3535 IEM_MC_END();
3536}
3537
3538
3539/**
3540 * @opcode 0x7e
3541 */
3542FNIEMOP_DEF(iemOp_jle_Jb)
3543{
3544 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
3545 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3546 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3547
3548 IEM_MC_BEGIN(0, 0);
3549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3550 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3551 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3552 } IEM_MC_ELSE() {
3553 IEM_MC_ADVANCE_RIP_AND_FINISH();
3554 } IEM_MC_ENDIF();
3555 IEM_MC_END();
3556}
3557
3558
3559/**
3560 * @opcode 0x7f
3561 */
3562FNIEMOP_DEF(iemOp_jnle_Jb)
3563{
3564 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
3565 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3566 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3567
3568 IEM_MC_BEGIN(0, 0);
3569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3570 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3571 IEM_MC_ADVANCE_RIP_AND_FINISH();
3572 } IEM_MC_ELSE() {
3573 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3574 } IEM_MC_ENDIF();
3575 IEM_MC_END();
3576}
3577
3578
3579/**
3580 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
3581 * iemOp_Grp1_Eb_Ib_80.
3582 */
3583#define IEMOP_BODY_BINARY_Eb_Ib(a_fnNormalU8, a_fRW) \
3584 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3585 { \
3586 /* register target */ \
3587 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3588 IEM_MC_BEGIN(3, 0); \
3589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3590 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3591 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3592 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3593 \
3594 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3595 IEM_MC_REF_EFLAGS(pEFlags); \
3596 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3597 \
3598 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3599 IEM_MC_END(); \
3600 } \
3601 else \
3602 { \
3603 /* memory target */ \
3604 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
3605 { \
3606 IEM_MC_BEGIN(3, 2); \
3607 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3608 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3609 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3610 \
3611 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3612 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3613 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3614 IEMOP_HLP_DONE_DECODING(); \
3615 \
3616 IEM_MC_MEM_MAP(pu8Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3617 IEM_MC_FETCH_EFLAGS(EFlags); \
3618 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3619 \
3620 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, a_fRW); \
3621 IEM_MC_COMMIT_EFLAGS(EFlags); \
3622 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3623 IEM_MC_END(); \
3624 } \
3625 else \
3626 { \
3627 (void)0
3628
3629#define IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK() \
3630 IEMOP_HLP_DONE_DECODING(); \
3631 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
3632 } \
3633 } \
3634 (void)0
3635
3636#define IEMOP_BODY_BINARY_Eb_Ib_LOCKED(a_fnLockedU8) \
3637 IEM_MC_BEGIN(3, 2); \
3638 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3639 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3640 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3641 \
3642 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3643 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3644 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3645 IEMOP_HLP_DONE_DECODING(); \
3646 \
3647 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3648 IEM_MC_FETCH_EFLAGS(EFlags); \
3649 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
3650 \
3651 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW); \
3652 IEM_MC_COMMIT_EFLAGS(EFlags); \
3653 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3654 IEM_MC_END(); \
3655 } \
3656 } \
3657 (void)0
3658
3659
3660/**
3661 * @opmaps grp1_80,grp1_83
3662 * @opcode /0
3663 */
3664FNIEMOP_DEF_1(iemOp_Grp1_add_Eb_Ib, uint8_t, bRm)
3665{
3666 IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib");
3667 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_add_u8, IEM_ACCESS_DATA_RW);
3668 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_add_u8_locked);
3669}
3670
3671
3672/**
3673 * @opmaps grp1_80,grp1_83
3674 * @opcode /1
3675 */
3676FNIEMOP_DEF_1(iemOp_Grp1_or_Eb_Ib, uint8_t, bRm)
3677{
3678 IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib");
3679 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_or_u8, IEM_ACCESS_DATA_RW);
3680 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_or_u8_locked);
3681}
3682
3683
3684/**
3685 * @opmaps grp1_80,grp1_83
3686 * @opcode /2
3687 */
3688FNIEMOP_DEF_1(iemOp_Grp1_adc_Eb_Ib, uint8_t, bRm)
3689{
3690 IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib");
3691 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_adc_u8, IEM_ACCESS_DATA_RW);
3692 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_adc_u8_locked);
3693}
3694
3695
3696/**
3697 * @opmaps grp1_80,grp1_83
3698 * @opcode /3
3699 */
3700FNIEMOP_DEF_1(iemOp_Grp1_sbb_Eb_Ib, uint8_t, bRm)
3701{
3702 IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib");
3703 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_sbb_u8, IEM_ACCESS_DATA_RW);
3704 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sbb_u8_locked);
3705}
3706
3707
3708/**
3709 * @opmaps grp1_80,grp1_83
3710 * @opcode /4
3711 */
3712FNIEMOP_DEF_1(iemOp_Grp1_and_Eb_Ib, uint8_t, bRm)
3713{
3714 IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib");
3715 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_and_u8, IEM_ACCESS_DATA_RW);
3716 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_and_u8_locked);
3717}
3718
3719
3720/**
3721 * @opmaps grp1_80,grp1_83
3722 * @opcode /5
3723 */
3724FNIEMOP_DEF_1(iemOp_Grp1_sub_Eb_Ib, uint8_t, bRm)
3725{
3726 IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib");
3727 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_sub_u8, IEM_ACCESS_DATA_RW);
3728 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sub_u8_locked);
3729}
3730
3731
3732/**
3733 * @opmaps grp1_80,grp1_83
3734 * @opcode /6
3735 */
3736FNIEMOP_DEF_1(iemOp_Grp1_xor_Eb_Ib, uint8_t, bRm)
3737{
3738 IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib");
3739 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_xor_u8, IEM_ACCESS_DATA_RW);
3740 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_xor_u8_locked);
3741}
3742
3743
3744/**
3745 * @opmaps grp1_80,grp1_83
3746 * @opcode /7
3747 */
3748FNIEMOP_DEF_1(iemOp_Grp1_cmp_Eb_Ib, uint8_t, bRm)
3749{
3750 IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib");
3751 IEMOP_BODY_BINARY_Eb_Ib(iemAImpl_cmp_u8, IEM_ACCESS_DATA_R);
3752 IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK();
3753}
3754
3755
3756/**
3757 * @opcode 0x80
3758 */
3759FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
3760{
3761 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3762 switch (IEM_GET_MODRM_REG_8(bRm))
3763 {
3764 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Eb_Ib, bRm);
3765 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Eb_Ib, bRm);
3766 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Eb_Ib, bRm);
3767 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Eb_Ib, bRm);
3768 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Eb_Ib, bRm);
3769 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Eb_Ib, bRm);
3770 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Eb_Ib, bRm);
3771 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Eb_Ib, bRm);
3772 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3773 }
3774}
3775
3776
3777/**
3778 * Body for a group 1 binary operator.
3779 */
3780#define IEMOP_BODY_BINARY_Ev_Iz(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fRW) \
3781 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3782 { \
3783 /* register target */ \
3784 switch (pVCpu->iem.s.enmEffOpSize) \
3785 { \
3786 case IEMMODE_16BIT: \
3787 { \
3788 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
3789 IEM_MC_BEGIN(3, 0); \
3790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3791 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
3792 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
3793 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3794 \
3795 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3796 IEM_MC_REF_EFLAGS(pEFlags); \
3797 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
3798 \
3799 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3800 IEM_MC_END(); \
3801 break; \
3802 } \
3803 \
3804 case IEMMODE_32BIT: \
3805 { \
3806 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
3807 IEM_MC_BEGIN(3, 0); \
3808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3809 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
3810 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
3811 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3812 \
3813 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3814 IEM_MC_REF_EFLAGS(pEFlags); \
3815 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
3816 if (a_fRW == IEM_ACCESS_DATA_RW) \
3817 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
3818 \
3819 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3820 IEM_MC_END(); \
3821 break; \
3822 } \
3823 \
3824 case IEMMODE_64BIT: \
3825 { \
3826 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
3827 IEM_MC_BEGIN(3, 0); \
3828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3829 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
3830 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
3831 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3832 \
3833 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3834 IEM_MC_REF_EFLAGS(pEFlags); \
3835 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
3836 \
3837 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3838 IEM_MC_END(); \
3839 break; \
3840 } \
3841 \
3842 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
3843 } \
3844 } \
3845 else \
3846 { \
3847 /* memory target */ \
3848 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
3849 { \
3850 switch (pVCpu->iem.s.enmEffOpSize) \
3851 { \
3852 case IEMMODE_16BIT: \
3853 { \
3854 IEM_MC_BEGIN(3, 2); \
3855 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
3856 IEM_MC_ARG(uint16_t, u16Src, 1); \
3857 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3858 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3859 \
3860 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
3861 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
3862 IEM_MC_ASSIGN(u16Src, u16Imm); \
3863 IEMOP_HLP_DONE_DECODING(); \
3864 IEM_MC_MEM_MAP(pu16Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3865 IEM_MC_FETCH_EFLAGS(EFlags); \
3866 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
3867 \
3868 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, a_fRW); \
3869 IEM_MC_COMMIT_EFLAGS(EFlags); \
3870 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3871 IEM_MC_END(); \
3872 break; \
3873 } \
3874 \
3875 case IEMMODE_32BIT: \
3876 { \
3877 IEM_MC_BEGIN(3, 2); \
3878 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
3879 IEM_MC_ARG(uint32_t, u32Src, 1); \
3880 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3881 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3882 \
3883 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
3884 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
3885 IEM_MC_ASSIGN(u32Src, u32Imm); \
3886 IEMOP_HLP_DONE_DECODING(); \
3887 IEM_MC_MEM_MAP(pu32Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3888 IEM_MC_FETCH_EFLAGS(EFlags); \
3889 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
3890 \
3891 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, a_fRW); \
3892 IEM_MC_COMMIT_EFLAGS(EFlags); \
3893 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3894 IEM_MC_END(); \
3895 break; \
3896 } \
3897 \
3898 case IEMMODE_64BIT: \
3899 { \
3900 IEM_MC_BEGIN(3, 2); \
3901 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
3902 IEM_MC_ARG(uint64_t, u64Src, 1); \
3903 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3904 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3905 \
3906 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
3907 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
3908 IEMOP_HLP_DONE_DECODING(); \
3909 IEM_MC_ASSIGN(u64Src, u64Imm); \
3910 IEM_MC_MEM_MAP(pu64Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3911 IEM_MC_FETCH_EFLAGS(EFlags); \
3912 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
3913 \
3914 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, a_fRW); \
3915 IEM_MC_COMMIT_EFLAGS(EFlags); \
3916 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3917 IEM_MC_END(); \
3918 break; \
3919 } \
3920 \
3921 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
3922 } \
3923 } \
3924 else \
3925 { \
3926 (void)0
3927
3928#define IEMOP_BODY_BINARY_Ev_Iz_NO_LOCK() \
3929 IEMOP_HLP_DONE_DECODING(); \
3930 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
3931 } \
3932 } \
3933 (void)0
3934
3935#define IEMOP_BODY_BINARY_Ev_Iz_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
3936 switch (pVCpu->iem.s.enmEffOpSize) \
3937 { \
3938 case IEMMODE_16BIT: \
3939 { \
3940 IEM_MC_BEGIN(3, 2); \
3941 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
3942 IEM_MC_ARG(uint16_t, u16Src, 1); \
3943 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3944 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3945 \
3946 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
3947 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
3948 IEM_MC_ASSIGN(u16Src, u16Imm); \
3949 IEMOP_HLP_DONE_DECODING(); \
3950 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3951 IEM_MC_FETCH_EFLAGS(EFlags); \
3952 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
3953 \
3954 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
3955 IEM_MC_COMMIT_EFLAGS(EFlags); \
3956 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3957 IEM_MC_END(); \
3958 break; \
3959 } \
3960 \
3961 case IEMMODE_32BIT: \
3962 { \
3963 IEM_MC_BEGIN(3, 2); \
3964 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
3965 IEM_MC_ARG(uint32_t, u32Src, 1); \
3966 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3967 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3968 \
3969 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
3970 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
3971 IEM_MC_ASSIGN(u32Src, u32Imm); \
3972 IEMOP_HLP_DONE_DECODING(); \
3973 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3974 IEM_MC_FETCH_EFLAGS(EFlags); \
3975 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
3976 \
3977 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); \
3978 IEM_MC_COMMIT_EFLAGS(EFlags); \
3979 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3980 IEM_MC_END(); \
3981 break; \
3982 } \
3983 \
3984 case IEMMODE_64BIT: \
3985 { \
3986 IEM_MC_BEGIN(3, 2); \
3987 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
3988 IEM_MC_ARG(uint64_t, u64Src, 1); \
3989 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3990 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3991 \
3992 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
3993 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
3994 IEMOP_HLP_DONE_DECODING(); \
3995 IEM_MC_ASSIGN(u64Src, u64Imm); \
3996 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3997 IEM_MC_FETCH_EFLAGS(EFlags); \
3998 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
3999 \
4000 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
4001 IEM_MC_COMMIT_EFLAGS(EFlags); \
4002 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4003 IEM_MC_END(); \
4004 break; \
4005 } \
4006 \
4007 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4008 } \
4009 } \
4010 } \
4011 (void)0
4012
4013
4014/**
4015 * @opmaps grp1_81
4016 * @opcode /0
4017 */
4018FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Iz, uint8_t, bRm)
4019{
4020 IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz");
4021 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, IEM_ACCESS_DATA_RW);
4022 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4023}
4024
4025
4026/**
4027 * @opmaps grp1_81
4028 * @opcode /1
4029 */
4030FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Iz, uint8_t, bRm)
4031{
4032 IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz");
4033 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, IEM_ACCESS_DATA_RW);
4034 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4035}
4036
4037
4038/**
4039 * @opmaps grp1_81
4040 * @opcode /2
4041 */
4042FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Iz, uint8_t, bRm)
4043{
4044 IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz");
4045 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, IEM_ACCESS_DATA_RW);
4046 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
4047}
4048
4049
4050/**
4051 * @opmaps grp1_81
4052 * @opcode /3
4053 */
4054FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Iz, uint8_t, bRm)
4055{
4056 IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz");
4057 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, IEM_ACCESS_DATA_RW);
4058 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
4059}
4060
4061
4062/**
4063 * @opmaps grp1_81
4064 * @opcode /4
4065 */
4066FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Iz, uint8_t, bRm)
4067{
4068 IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz");
4069 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, IEM_ACCESS_DATA_RW);
4070 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
4071}
4072
4073
4074/**
4075 * @opmaps grp1_81
4076 * @opcode /5
4077 */
4078FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Iz, uint8_t, bRm)
4079{
4080 IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz");
4081 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, IEM_ACCESS_DATA_RW);
4082 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
4083}
4084
4085
4086/**
4087 * @opmaps grp1_81
4088 * @opcode /6
4089 */
4090FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Iz, uint8_t, bRm)
4091{
4092 IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz");
4093 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, IEM_ACCESS_DATA_RW);
4094 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
4095}
4096
4097
4098/**
4099 * @opmaps grp1_81
4100 * @opcode /7
4101 */
4102FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Iz, uint8_t, bRm)
4103{
4104 IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz");
4105 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, IEM_ACCESS_DATA_R);
4106 IEMOP_BODY_BINARY_Ev_Iz_NO_LOCK();
4107}
4108
4109
4110/**
4111 * @opcode 0x81
4112 */
4113FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
4114{
4115 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4116 switch (IEM_GET_MODRM_REG_8(bRm))
4117 {
4118 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Iz, bRm);
4119 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Iz, bRm);
4120 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Iz, bRm);
4121 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Iz, bRm);
4122 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Iz, bRm);
4123 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Iz, bRm);
4124 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Iz, bRm);
4125 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Iz, bRm);
4126 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4127 }
4128}
4129
4130
4131/**
4132 * @opcode 0x82
4133 * @opmnemonic grp1_82
4134 * @opgroup og_groups
4135 */
4136FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
4137{
4138 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
4139 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
4140}
4141
4142
4143/**
4144 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
4145 * iemOp_Grp1_Ev_Ib.
4146 */
4147#define IEMOP_BODY_BINARY_Ev_Ib(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fRW) \
4148 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4149 { \
4150 /* \
4151 * Register target \
4152 */ \
4153 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4154 switch (pVCpu->iem.s.enmEffOpSize) \
4155 { \
4156 case IEMMODE_16BIT: \
4157 { \
4158 IEM_MC_BEGIN(3, 0); \
4159 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4160 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4161 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4162 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4163 \
4164 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4165 IEM_MC_REF_EFLAGS(pEFlags); \
4166 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4167 \
4168 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4169 IEM_MC_END(); \
4170 break; \
4171 } \
4172 \
4173 case IEMMODE_32BIT: \
4174 { \
4175 IEM_MC_BEGIN(3, 0); \
4176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4177 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4178 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4179 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4180 \
4181 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4182 IEM_MC_REF_EFLAGS(pEFlags); \
4183 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4184 if ((a_fRW) != IEM_ACCESS_DATA_R) \
4185 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
4186 \
4187 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4188 IEM_MC_END(); \
4189 break; \
4190 } \
4191 \
4192 case IEMMODE_64BIT: \
4193 { \
4194 IEM_MC_BEGIN(3, 0); \
4195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4196 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4197 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4198 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4199 \
4200 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4201 IEM_MC_REF_EFLAGS(pEFlags); \
4202 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4203 \
4204 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4205 IEM_MC_END(); \
4206 break; \
4207 } \
4208 \
4209 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4210 } \
4211 } \
4212 else \
4213 { \
4214 /* \
4215 * Memory target. \
4216 */ \
4217 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4218 { \
4219 switch (pVCpu->iem.s.enmEffOpSize) \
4220 { \
4221 case IEMMODE_16BIT: \
4222 { \
4223 IEM_MC_BEGIN(3, 2); \
4224 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4225 IEM_MC_ARG(uint16_t, u16Src, 1); \
4226 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4227 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4228 \
4229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4230 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4231 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm); \
4232 IEMOP_HLP_DONE_DECODING(); \
4233 IEM_MC_MEM_MAP(pu16Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4234 IEM_MC_FETCH_EFLAGS(EFlags); \
4235 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4236 \
4237 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, a_fRW); \
4238 IEM_MC_COMMIT_EFLAGS(EFlags); \
4239 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4240 IEM_MC_END(); \
4241 break; \
4242 } \
4243 \
4244 case IEMMODE_32BIT: \
4245 { \
4246 IEM_MC_BEGIN(3, 2); \
4247 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4248 IEM_MC_ARG(uint32_t, u32Src, 1); \
4249 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4250 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4251 \
4252 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4253 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4254 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm); \
4255 IEMOP_HLP_DONE_DECODING(); \
4256 IEM_MC_MEM_MAP(pu32Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4257 IEM_MC_FETCH_EFLAGS(EFlags); \
4258 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4259 \
4260 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, a_fRW); \
4261 IEM_MC_COMMIT_EFLAGS(EFlags); \
4262 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4263 IEM_MC_END(); \
4264 break; \
4265 } \
4266 \
4267 case IEMMODE_64BIT: \
4268 { \
4269 IEM_MC_BEGIN(3, 2); \
4270 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4271 IEM_MC_ARG(uint64_t, u64Src, 1); \
4272 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4273 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4274 \
4275 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4276 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4277 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm); \
4278 IEMOP_HLP_DONE_DECODING(); \
4279 IEM_MC_MEM_MAP(pu64Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4280 IEM_MC_FETCH_EFLAGS(EFlags); \
4281 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4282 \
4283 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, a_fRW); \
4284 IEM_MC_COMMIT_EFLAGS(EFlags); \
4285 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4286 IEM_MC_END(); \
4287 break; \
4288 } \
4289 \
4290 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4291 } \
4292 } \
4293 else \
4294 { \
4295 (void)0
4296
4297#define IEMOP_BODY_BINARY_Ev_Ib_NO_LOCK() \
4298 IEMOP_HLP_DONE_DECODING(); \
4299 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4300 } \
4301 } \
4302 (void)0
4303
4304#define IEMOP_BODY_BINARY_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4305 switch (pVCpu->iem.s.enmEffOpSize) \
4306 { \
4307 case IEMMODE_16BIT: \
4308 { \
4309 IEM_MC_BEGIN(3, 2); \
4310 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4311 IEM_MC_ARG(uint16_t, u16Src, 1); \
4312 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4313 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4314 \
4315 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4316 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4317 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm); \
4318 IEMOP_HLP_DONE_DECODING(); \
4319 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4320 IEM_MC_FETCH_EFLAGS(EFlags); \
4321 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4322 \
4323 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
4324 IEM_MC_COMMIT_EFLAGS(EFlags); \
4325 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4326 IEM_MC_END(); \
4327 break; \
4328 } \
4329 \
4330 case IEMMODE_32BIT: \
4331 { \
4332 IEM_MC_BEGIN(3, 2); \
4333 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4334 IEM_MC_ARG(uint32_t, u32Src, 1); \
4335 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4336 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4337 \
4338 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4339 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4340 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm); \
4341 IEMOP_HLP_DONE_DECODING(); \
4342 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4343 IEM_MC_FETCH_EFLAGS(EFlags); \
4344 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4345 \
4346 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); \
4347 IEM_MC_COMMIT_EFLAGS(EFlags); \
4348 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4349 IEM_MC_END(); \
4350 break; \
4351 } \
4352 \
4353 case IEMMODE_64BIT: \
4354 { \
4355 IEM_MC_BEGIN(3, 2); \
4356 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4357 IEM_MC_ARG(uint64_t, u64Src, 1); \
4358 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4359 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4360 \
4361 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4362 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4363 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm); \
4364 IEMOP_HLP_DONE_DECODING(); \
4365 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4366 IEM_MC_FETCH_EFLAGS(EFlags); \
4367 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4368 \
4369 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
4370 IEM_MC_COMMIT_EFLAGS(EFlags); \
4371 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4372 IEM_MC_END(); \
4373 break; \
4374 } \
4375 \
4376 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4377 } \
4378 } \
4379 } \
4380 (void)0
4381
4382/**
4383 * @opmaps grp1_83
4384 * @opcode /0
4385 */
4386FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Ib, uint8_t, bRm)
4387{
4388 IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib");
4389 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, IEM_ACCESS_DATA_RW);
4390 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4391}
4392
4393
4394/**
4395 * @opmaps grp1_83
4396 * @opcode /1
4397 */
4398FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Ib, uint8_t, bRm)
4399{
4400 IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib");
4401 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, IEM_ACCESS_DATA_RW);
4402 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4403}
4404
4405
4406/**
4407 * @opmaps grp1_83
4408 * @opcode /2
4409 */
4410FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Ib, uint8_t, bRm)
4411{
4412 IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib");
4413 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, IEM_ACCESS_DATA_RW);
4414 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
4415}
4416
4417
4418/**
4419 * @opmaps grp1_83
4420 * @opcode /3
4421 */
4422FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Ib, uint8_t, bRm)
4423{
4424 IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib");
4425 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, IEM_ACCESS_DATA_RW);
4426 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
4427}
4428
4429
4430/**
4431 * @opmaps grp1_83
4432 * @opcode /4
4433 */
4434FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Ib, uint8_t, bRm)
4435{
4436 IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib");
4437 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, IEM_ACCESS_DATA_RW);
4438 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
4439}
4440
4441
4442/**
4443 * @opmaps grp1_83
4444 * @opcode /5
4445 */
4446FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Ib, uint8_t, bRm)
4447{
4448 IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib");
4449 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, IEM_ACCESS_DATA_RW);
4450 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
4451}
4452
4453
4454/**
4455 * @opmaps grp1_83
4456 * @opcode /6
4457 */
4458FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Ib, uint8_t, bRm)
4459{
4460 IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib");
4461 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, IEM_ACCESS_DATA_RW);
4462 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
4463}
4464
4465
4466/**
4467 * @opmaps grp1_83
4468 * @opcode /7
4469 */
4470FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Ib, uint8_t, bRm)
4471{
4472 IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib");
4473 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, IEM_ACCESS_DATA_R);
4474 IEMOP_BODY_BINARY_Ev_Ib_NO_LOCK();
4475}
4476
4477
4478/**
4479 * @opcode 0x83
4480 */
4481FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
4482{
4483 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
4484 to the 386 even if absent in the intel reference manuals and some
4485 3rd party opcode listings. */
4486 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4487 switch (IEM_GET_MODRM_REG_8(bRm))
4488 {
4489 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Ib, bRm);
4490 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Ib, bRm);
4491 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Ib, bRm);
4492 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Ib, bRm);
4493 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Ib, bRm);
4494 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Ib, bRm);
4495 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Ib, bRm);
4496 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Ib, bRm);
4497 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4498 }
4499}
4500
4501
4502/**
4503 * @opcode 0x84
4504 */
4505FNIEMOP_DEF(iemOp_test_Eb_Gb)
4506{
4507 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
4508 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
4509 IEMOP_BODY_BINARY_rm_r8(iemAImpl_test_u8, IEM_ACCESS_DATA_R);
4510 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
4511}
4512
4513
4514/**
4515 * @opcode 0x85
4516 */
4517FNIEMOP_DEF(iemOp_test_Ev_Gv)
4518{
4519 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
4520 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
4521 IEMOP_BODY_BINARY_rm_rv(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, IEM_ACCESS_DATA_R);
4522 IEMOP_BODY_BINARY_rm_rv_NO_LOCK();
4523}
4524
4525
4526/**
4527 * @opcode 0x86
4528 */
4529FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
4530{
4531 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4532 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
4533
4534 /*
4535 * If rm is denoting a register, no more instruction bytes.
4536 */
4537 if (IEM_IS_MODRM_REG_MODE(bRm))
4538 {
4539 IEM_MC_BEGIN(0, 2);
4540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4541 IEM_MC_LOCAL(uint8_t, uTmp1);
4542 IEM_MC_LOCAL(uint8_t, uTmp2);
4543
4544 IEM_MC_FETCH_GREG_U8(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
4545 IEM_MC_FETCH_GREG_U8(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
4546 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
4547 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
4548
4549 IEM_MC_ADVANCE_RIP_AND_FINISH();
4550 IEM_MC_END();
4551 }
4552 else
4553 {
4554 /*
4555 * We're accessing memory.
4556 */
4557/** @todo the register must be committed separately! */
4558 IEM_MC_BEGIN(2, 2);
4559 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
4560 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
4561 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4562
4563 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4565 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
4566 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
4567 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
4568 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_locked, pu8Mem, pu8Reg);
4569 else
4570 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_unlocked, pu8Mem, pu8Reg);
4571 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
4572
4573 IEM_MC_ADVANCE_RIP_AND_FINISH();
4574 IEM_MC_END();
4575 }
4576}
4577
4578
4579/**
4580 * @opcode 0x87
4581 */
4582FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
4583{
4584 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
4585 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4586
4587 /*
4588 * If rm is denoting a register, no more instruction bytes.
4589 */
4590 if (IEM_IS_MODRM_REG_MODE(bRm))
4591 {
4592 switch (pVCpu->iem.s.enmEffOpSize)
4593 {
4594 case IEMMODE_16BIT:
4595 IEM_MC_BEGIN(0, 2);
4596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4597 IEM_MC_LOCAL(uint16_t, uTmp1);
4598 IEM_MC_LOCAL(uint16_t, uTmp2);
4599
4600 IEM_MC_FETCH_GREG_U16(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
4601 IEM_MC_FETCH_GREG_U16(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
4602 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
4603 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
4604
4605 IEM_MC_ADVANCE_RIP_AND_FINISH();
4606 IEM_MC_END();
4607 break;
4608
4609 case IEMMODE_32BIT:
4610 IEM_MC_BEGIN(0, 2);
4611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4612 IEM_MC_LOCAL(uint32_t, uTmp1);
4613 IEM_MC_LOCAL(uint32_t, uTmp2);
4614
4615 IEM_MC_FETCH_GREG_U32(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
4616 IEM_MC_FETCH_GREG_U32(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
4617 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
4618 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
4619
4620 IEM_MC_ADVANCE_RIP_AND_FINISH();
4621 IEM_MC_END();
4622 break;
4623
4624 case IEMMODE_64BIT:
4625 IEM_MC_BEGIN(0, 2);
4626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4627 IEM_MC_LOCAL(uint64_t, uTmp1);
4628 IEM_MC_LOCAL(uint64_t, uTmp2);
4629
4630 IEM_MC_FETCH_GREG_U64(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
4631 IEM_MC_FETCH_GREG_U64(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
4632 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
4633 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
4634
4635 IEM_MC_ADVANCE_RIP_AND_FINISH();
4636 IEM_MC_END();
4637 break;
4638
4639 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4640 }
4641 }
4642 else
4643 {
4644 /*
4645 * We're accessing memory.
4646 */
4647 switch (pVCpu->iem.s.enmEffOpSize)
4648 {
4649/** @todo the register must be committed separately! */
4650 case IEMMODE_16BIT:
4651 IEM_MC_BEGIN(2, 2);
4652 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
4653 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
4654 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4655
4656 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4658 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
4659 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
4660 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
4661 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_locked, pu16Mem, pu16Reg);
4662 else
4663 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_unlocked, pu16Mem, pu16Reg);
4664 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
4665
4666 IEM_MC_ADVANCE_RIP_AND_FINISH();
4667 IEM_MC_END();
4668 break;
4669
4670 case IEMMODE_32BIT:
4671 IEM_MC_BEGIN(2, 2);
4672 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
4673 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
4674 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4675
4676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4678 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
4679 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
4680 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
4681 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_locked, pu32Mem, pu32Reg);
4682 else
4683 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_unlocked, pu32Mem, pu32Reg);
4684 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
4685
4686 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
4687 IEM_MC_ADVANCE_RIP_AND_FINISH();
4688 IEM_MC_END();
4689 break;
4690
4691 case IEMMODE_64BIT:
4692 IEM_MC_BEGIN(2, 2);
4693 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
4694 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
4695 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4696
4697 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4699 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
4700 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
4701 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
4702 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_locked, pu64Mem, pu64Reg);
4703 else
4704 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_unlocked, pu64Mem, pu64Reg);
4705 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
4706
4707 IEM_MC_ADVANCE_RIP_AND_FINISH();
4708 IEM_MC_END();
4709 break;
4710
4711 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4712 }
4713 }
4714}
4715
4716
4717/**
4718 * @opcode 0x88
4719 */
4720FNIEMOP_DEF(iemOp_mov_Eb_Gb)
4721{
4722 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
4723
4724 uint8_t bRm;
4725 IEM_OPCODE_GET_NEXT_U8(&bRm);
4726
4727 /*
4728 * If rm is denoting a register, no more instruction bytes.
4729 */
4730 if (IEM_IS_MODRM_REG_MODE(bRm))
4731 {
4732 IEM_MC_BEGIN(0, 1);
4733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4734 IEM_MC_LOCAL(uint8_t, u8Value);
4735 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4736 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Value);
4737 IEM_MC_ADVANCE_RIP_AND_FINISH();
4738 IEM_MC_END();
4739 }
4740 else
4741 {
4742 /*
4743 * We're writing a register to memory.
4744 */
4745 IEM_MC_BEGIN(0, 2);
4746 IEM_MC_LOCAL(uint8_t, u8Value);
4747 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4750 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4751 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
4752 IEM_MC_ADVANCE_RIP_AND_FINISH();
4753 IEM_MC_END();
4754 }
4755}
4756
4757
4758/**
4759 * @opcode 0x89
4760 */
4761FNIEMOP_DEF(iemOp_mov_Ev_Gv)
4762{
4763 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
4764
4765 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4766
4767 /*
4768 * If rm is denoting a register, no more instruction bytes.
4769 */
4770 if (IEM_IS_MODRM_REG_MODE(bRm))
4771 {
4772 switch (pVCpu->iem.s.enmEffOpSize)
4773 {
4774 case IEMMODE_16BIT:
4775 IEM_MC_BEGIN(0, 1);
4776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4777 IEM_MC_LOCAL(uint16_t, u16Value);
4778 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4779 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
4780 IEM_MC_ADVANCE_RIP_AND_FINISH();
4781 IEM_MC_END();
4782 break;
4783
4784 case IEMMODE_32BIT:
4785 IEM_MC_BEGIN(0, 1);
4786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4787 IEM_MC_LOCAL(uint32_t, u32Value);
4788 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4789 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
4790 IEM_MC_ADVANCE_RIP_AND_FINISH();
4791 IEM_MC_END();
4792 break;
4793
4794 case IEMMODE_64BIT:
4795 IEM_MC_BEGIN(0, 1);
4796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4797 IEM_MC_LOCAL(uint64_t, u64Value);
4798 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4799 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
4800 IEM_MC_ADVANCE_RIP_AND_FINISH();
4801 IEM_MC_END();
4802 break;
4803
4804 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4805 }
4806 }
4807 else
4808 {
4809 /*
4810 * We're writing a register to memory.
4811 */
4812 switch (pVCpu->iem.s.enmEffOpSize)
4813 {
4814 case IEMMODE_16BIT:
4815 IEM_MC_BEGIN(0, 2);
4816 IEM_MC_LOCAL(uint16_t, u16Value);
4817 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4818 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4820 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4821 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
4822 IEM_MC_ADVANCE_RIP_AND_FINISH();
4823 IEM_MC_END();
4824 break;
4825
4826 case IEMMODE_32BIT:
4827 IEM_MC_BEGIN(0, 2);
4828 IEM_MC_LOCAL(uint32_t, u32Value);
4829 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4830 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4832 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4833 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
4834 IEM_MC_ADVANCE_RIP_AND_FINISH();
4835 IEM_MC_END();
4836 break;
4837
4838 case IEMMODE_64BIT:
4839 IEM_MC_BEGIN(0, 2);
4840 IEM_MC_LOCAL(uint64_t, u64Value);
4841 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4842 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4844 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4845 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
4846 IEM_MC_ADVANCE_RIP_AND_FINISH();
4847 IEM_MC_END();
4848 break;
4849
4850 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4851 }
4852 }
4853}
4854
4855
4856/**
4857 * @opcode 0x8a
4858 */
4859FNIEMOP_DEF(iemOp_mov_Gb_Eb)
4860{
4861 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
4862
4863 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4864
4865 /*
4866 * If rm is denoting a register, no more instruction bytes.
4867 */
4868 if (IEM_IS_MODRM_REG_MODE(bRm))
4869 {
4870 IEM_MC_BEGIN(0, 1);
4871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4872 IEM_MC_LOCAL(uint8_t, u8Value);
4873 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4874 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
4875 IEM_MC_ADVANCE_RIP_AND_FINISH();
4876 IEM_MC_END();
4877 }
4878 else
4879 {
4880 /*
4881 * We're loading a register from memory.
4882 */
4883 IEM_MC_BEGIN(0, 2);
4884 IEM_MC_LOCAL(uint8_t, u8Value);
4885 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4886 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4888 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4889 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
4890 IEM_MC_ADVANCE_RIP_AND_FINISH();
4891 IEM_MC_END();
4892 }
4893}
4894
4895
4896/**
4897 * @opcode 0x8b
4898 */
4899FNIEMOP_DEF(iemOp_mov_Gv_Ev)
4900{
4901 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
4902
4903 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4904
4905 /*
4906 * If rm is denoting a register, no more instruction bytes.
4907 */
4908 if (IEM_IS_MODRM_REG_MODE(bRm))
4909 {
4910 switch (pVCpu->iem.s.enmEffOpSize)
4911 {
4912 case IEMMODE_16BIT:
4913 IEM_MC_BEGIN(0, 1);
4914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4915 IEM_MC_LOCAL(uint16_t, u16Value);
4916 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4917 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
4918 IEM_MC_ADVANCE_RIP_AND_FINISH();
4919 IEM_MC_END();
4920 break;
4921
4922 case IEMMODE_32BIT:
4923 IEM_MC_BEGIN(0, 1);
4924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4925 IEM_MC_LOCAL(uint32_t, u32Value);
4926 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4927 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
4928 IEM_MC_ADVANCE_RIP_AND_FINISH();
4929 IEM_MC_END();
4930 break;
4931
4932 case IEMMODE_64BIT:
4933 IEM_MC_BEGIN(0, 1);
4934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4935 IEM_MC_LOCAL(uint64_t, u64Value);
4936 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4937 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
4938 IEM_MC_ADVANCE_RIP_AND_FINISH();
4939 IEM_MC_END();
4940 break;
4941
4942 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4943 }
4944 }
4945 else
4946 {
4947 /*
4948 * We're loading a register from memory.
4949 */
4950 switch (pVCpu->iem.s.enmEffOpSize)
4951 {
4952 case IEMMODE_16BIT:
4953 IEM_MC_BEGIN(0, 2);
4954 IEM_MC_LOCAL(uint16_t, u16Value);
4955 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4956 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4958 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4959 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
4960 IEM_MC_ADVANCE_RIP_AND_FINISH();
4961 IEM_MC_END();
4962 break;
4963
4964 case IEMMODE_32BIT:
4965 IEM_MC_BEGIN(0, 2);
4966 IEM_MC_LOCAL(uint32_t, u32Value);
4967 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4968 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4970 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4971 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
4972 IEM_MC_ADVANCE_RIP_AND_FINISH();
4973 IEM_MC_END();
4974 break;
4975
4976 case IEMMODE_64BIT:
4977 IEM_MC_BEGIN(0, 2);
4978 IEM_MC_LOCAL(uint64_t, u64Value);
4979 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4982 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4983 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
4984 IEM_MC_ADVANCE_RIP_AND_FINISH();
4985 IEM_MC_END();
4986 break;
4987
4988 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4989 }
4990 }
4991}
4992
4993
4994/**
4995 * opcode 0x63
4996 * @todo Table fixme
4997 */
4998FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
4999{
5000 if (!IEM_IS_64BIT_CODE(pVCpu))
5001 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
5002 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
5003 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
5004 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
5005}
5006
5007
5008/**
5009 * @opcode 0x8c
5010 */
5011FNIEMOP_DEF(iemOp_mov_Ev_Sw)
5012{
5013 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
5014
5015 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5016
5017 /*
5018 * Check that the destination register exists. The REX.R prefix is ignored.
5019 */
5020 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5021 if (iSegReg > X86_SREG_GS)
5022 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5023
5024 /*
5025 * If rm is denoting a register, no more instruction bytes.
5026 * In that case, the operand size is respected and the upper bits are
5027 * cleared (starting with some pentium).
5028 */
5029 if (IEM_IS_MODRM_REG_MODE(bRm))
5030 {
5031 switch (pVCpu->iem.s.enmEffOpSize)
5032 {
5033 case IEMMODE_16BIT:
5034 IEM_MC_BEGIN(0, 1);
5035 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5036 IEM_MC_LOCAL(uint16_t, u16Value);
5037 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5038 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5039 IEM_MC_ADVANCE_RIP_AND_FINISH();
5040 IEM_MC_END();
5041 break;
5042
5043 case IEMMODE_32BIT:
5044 IEM_MC_BEGIN(0, 1);
5045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5046 IEM_MC_LOCAL(uint32_t, u32Value);
5047 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
5048 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5049 IEM_MC_ADVANCE_RIP_AND_FINISH();
5050 IEM_MC_END();
5051 break;
5052
5053 case IEMMODE_64BIT:
5054 IEM_MC_BEGIN(0, 1);
5055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5056 IEM_MC_LOCAL(uint64_t, u64Value);
5057 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
5058 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5059 IEM_MC_ADVANCE_RIP_AND_FINISH();
5060 IEM_MC_END();
5061 break;
5062
5063 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5064 }
5065 }
5066 else
5067 {
5068 /*
5069 * We're saving the register to memory. The access is word sized
5070 * regardless of operand size prefixes.
5071 */
5072#if 0 /* not necessary */
5073 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5074#endif
5075 IEM_MC_BEGIN(0, 2);
5076 IEM_MC_LOCAL(uint16_t, u16Value);
5077 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5078 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5080 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5081 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5082 IEM_MC_ADVANCE_RIP_AND_FINISH();
5083 IEM_MC_END();
5084 }
5085}
5086
5087
5088
5089
5090/**
5091 * @opcode 0x8d
5092 */
5093FNIEMOP_DEF(iemOp_lea_Gv_M)
5094{
5095 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
5096 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5097 if (IEM_IS_MODRM_REG_MODE(bRm))
5098 IEMOP_RAISE_INVALID_OPCODE_RET(); /* no register form */
5099
5100 switch (pVCpu->iem.s.enmEffOpSize)
5101 {
5102 case IEMMODE_16BIT:
5103 IEM_MC_BEGIN(0, 2);
5104 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5105 IEM_MC_LOCAL(uint16_t, u16Cast);
5106 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5108 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
5109 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Cast);
5110 IEM_MC_ADVANCE_RIP_AND_FINISH();
5111 IEM_MC_END();
5112 break;
5113
5114 case IEMMODE_32BIT:
5115 IEM_MC_BEGIN(0, 2);
5116 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5117 IEM_MC_LOCAL(uint32_t, u32Cast);
5118 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5120 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
5121 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Cast);
5122 IEM_MC_ADVANCE_RIP_AND_FINISH();
5123 IEM_MC_END();
5124 break;
5125
5126 case IEMMODE_64BIT:
5127 IEM_MC_BEGIN(0, 1);
5128 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5129 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5131 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), GCPtrEffSrc);
5132 IEM_MC_ADVANCE_RIP_AND_FINISH();
5133 IEM_MC_END();
5134 break;
5135
5136 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5137 }
5138}
5139
5140
5141/**
5142 * @opcode 0x8e
5143 */
5144FNIEMOP_DEF(iemOp_mov_Sw_Ev)
5145{
5146 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
5147
5148 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5149
5150 /*
5151 * The practical operand size is 16-bit.
5152 */
5153#if 0 /* not necessary */
5154 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5155#endif
5156
5157 /*
5158 * Check that the destination register exists and can be used with this
5159 * instruction. The REX.R prefix is ignored.
5160 */
5161 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5162 /** @todo r=bird: What does 8086 do here wrt CS? */
5163 if ( iSegReg == X86_SREG_CS
5164 || iSegReg > X86_SREG_GS)
5165 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5166
5167 /*
5168 * If rm is denoting a register, no more instruction bytes.
5169 */
5170 if (IEM_IS_MODRM_REG_MODE(bRm))
5171 {
5172 IEM_MC_BEGIN(2, 0);
5173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5174 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
5175 IEM_MC_ARG(uint16_t, u16Value, 1);
5176 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5177 if (iSRegArg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5178 IEM_MC_CALL_CIMPL_2( 0, iemCImpl_load_SReg, iSRegArg, u16Value);
5179 else
5180 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE, iemCImpl_load_SReg, iSRegArg, u16Value);
5181 IEM_MC_END();
5182 }
5183 else
5184 {
5185 /*
5186 * We're loading the register from memory. The access is word sized
5187 * regardless of operand size prefixes.
5188 */
5189 IEM_MC_BEGIN(2, 1);
5190 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
5191 IEM_MC_ARG(uint16_t, u16Value, 1);
5192 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5195 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5196 if (iSRegArg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5197 IEM_MC_CALL_CIMPL_2( 0, iemCImpl_load_SReg, iSRegArg, u16Value);
5198 else
5199 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE, iemCImpl_load_SReg, iSRegArg, u16Value);
5200 IEM_MC_END();
5201 }
5202}
5203
5204
5205/** Opcode 0x8f /0. */
5206FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
5207{
5208 /* This bugger is rather annoying as it requires rSP to be updated before
5209 doing the effective address calculations. Will eventually require a
5210 split between the R/M+SIB decoding and the effective address
5211 calculation - which is something that is required for any attempt at
5212 reusing this code for a recompiler. It may also be good to have if we
5213 need to delay #UD exception caused by invalid lock prefixes.
5214
5215 For now, we'll do a mostly safe interpreter-only implementation here. */
5216 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
5217 * now until tests show it's checked.. */
5218 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
5219
5220 /* Register access is relatively easy and can share code. */
5221 if (IEM_IS_MODRM_REG_MODE(bRm))
5222 return FNIEMOP_CALL_1(iemOpCommonPopGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
5223
5224 /*
5225 * Memory target.
5226 *
5227 * Intel says that RSP is incremented before it's used in any effective
5228 * address calcuations. This means some serious extra annoyance here since
5229 * we decode and calculate the effective address in one step and like to
5230 * delay committing registers till everything is done.
5231 *
5232 * So, we'll decode and calculate the effective address twice. This will
5233 * require some recoding if turned into a recompiler.
5234 */
5235 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
5236
5237#if 1 /* This can be compiled, optimize later if needed. */
5238 switch (pVCpu->iem.s.enmEffOpSize)
5239 {
5240 case IEMMODE_16BIT:
5241 {
5242 IEM_MC_BEGIN(2, 0);
5243 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5244 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5245 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2 << 8);
5246 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5247 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5248 IEM_MC_CALL_CIMPL_2(0, iemCImpl_pop_mem16, iEffSeg, GCPtrEffDst);
5249 IEM_MC_END();
5250 }
5251
5252 case IEMMODE_32BIT:
5253 {
5254 IEM_MC_BEGIN(2, 0);
5255 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5256 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4 << 8);
5258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5259 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5260 IEM_MC_CALL_CIMPL_2(0, iemCImpl_pop_mem32, iEffSeg, GCPtrEffDst);
5261 IEM_MC_END();
5262 }
5263
5264 case IEMMODE_64BIT:
5265 {
5266 IEM_MC_BEGIN(2, 0);
5267 IEM_MC_ARG(uint8_t, iEffSeg, 0);
5268 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5269 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 8 << 8);
5270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5271 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
5272 IEM_MC_CALL_CIMPL_2(0, iemCImpl_pop_mem64, iEffSeg, GCPtrEffDst);
5273 IEM_MC_END();
5274 }
5275
5276 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5277 }
5278
5279#else
5280# ifndef TST_IEM_CHECK_MC
5281 /* Calc effective address with modified ESP. */
5282/** @todo testcase */
5283 RTGCPTR GCPtrEff;
5284 VBOXSTRICTRC rcStrict;
5285 switch (pVCpu->iem.s.enmEffOpSize)
5286 {
5287 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 2 << 8, &GCPtrEff); break;
5288 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 4 << 8, &GCPtrEff); break;
5289 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 8 << 8, &GCPtrEff); break;
5290 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5291 }
5292 if (rcStrict != VINF_SUCCESS)
5293 return rcStrict;
5294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5295
5296 /* Perform the operation - this should be CImpl. */
5297 RTUINT64U TmpRsp;
5298 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
5299 switch (pVCpu->iem.s.enmEffOpSize)
5300 {
5301 case IEMMODE_16BIT:
5302 {
5303 uint16_t u16Value;
5304 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
5305 if (rcStrict == VINF_SUCCESS)
5306 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
5307 break;
5308 }
5309
5310 case IEMMODE_32BIT:
5311 {
5312 uint32_t u32Value;
5313 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
5314 if (rcStrict == VINF_SUCCESS)
5315 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
5316 break;
5317 }
5318
5319 case IEMMODE_64BIT:
5320 {
5321 uint64_t u64Value;
5322 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
5323 if (rcStrict == VINF_SUCCESS)
5324 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
5325 break;
5326 }
5327
5328 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5329 }
5330 if (rcStrict == VINF_SUCCESS)
5331 {
5332 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
5333 return iemRegUpdateRipAndFinishClearingRF(pVCpu);
5334 }
5335 return rcStrict;
5336
5337# else
5338 return VERR_IEM_IPE_2;
5339# endif
5340#endif
5341}
5342
5343
5344/**
5345 * @opcode 0x8f
5346 */
5347FNIEMOP_DEF(iemOp_Grp1A__xop)
5348{
5349 /*
5350 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
5351 * three byte VEX prefix, except that the mmmmm field cannot have the values
5352 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
5353 */
5354 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5355 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
5356 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
5357
5358 IEMOP_MNEMONIC(xop, "xop");
5359 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
5360 {
5361 /** @todo Test when exctly the XOP conformance checks kick in during
5362 * instruction decoding and fetching (using \#PF). */
5363 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
5364 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
5365 if ( ( pVCpu->iem.s.fPrefixes
5366 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
5367 == 0)
5368 {
5369 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
5370 if ((bXop2 & 0x80 /* XOP.W */) && IEM_IS_64BIT_CODE(pVCpu))
5371 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
5372 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
5373 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
5374 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
5375 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
5376 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
5377 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
5378
5379 /** @todo XOP: Just use new tables and decoders. */
5380 switch (bRm & 0x1f)
5381 {
5382 case 8: /* xop opcode map 8. */
5383 IEMOP_BITCH_ABOUT_STUB();
5384 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5385
5386 case 9: /* xop opcode map 9. */
5387 IEMOP_BITCH_ABOUT_STUB();
5388 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5389
5390 case 10: /* xop opcode map 10. */
5391 IEMOP_BITCH_ABOUT_STUB();
5392 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5393
5394 default:
5395 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
5396 IEMOP_RAISE_INVALID_OPCODE_RET();
5397 }
5398 }
5399 else
5400 Log(("XOP: Invalid prefix mix!\n"));
5401 }
5402 else
5403 Log(("XOP: XOP support disabled!\n"));
5404 IEMOP_RAISE_INVALID_OPCODE_RET();
5405}
5406
5407
5408/**
5409 * Common 'xchg reg,rAX' helper.
5410 */
5411FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
5412{
5413 iReg |= pVCpu->iem.s.uRexB;
5414 switch (pVCpu->iem.s.enmEffOpSize)
5415 {
5416 case IEMMODE_16BIT:
5417 IEM_MC_BEGIN(0, 2);
5418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5419 IEM_MC_LOCAL(uint16_t, u16Tmp1);
5420 IEM_MC_LOCAL(uint16_t, u16Tmp2);
5421 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
5422 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
5423 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
5424 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
5425 IEM_MC_ADVANCE_RIP_AND_FINISH();
5426 IEM_MC_END();
5427 break;
5428
5429 case IEMMODE_32BIT:
5430 IEM_MC_BEGIN(0, 2);
5431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5432 IEM_MC_LOCAL(uint32_t, u32Tmp1);
5433 IEM_MC_LOCAL(uint32_t, u32Tmp2);
5434 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
5435 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
5436 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
5437 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
5438 IEM_MC_ADVANCE_RIP_AND_FINISH();
5439 IEM_MC_END();
5440 break;
5441
5442 case IEMMODE_64BIT:
5443 IEM_MC_BEGIN(0, 2);
5444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5445 IEM_MC_LOCAL(uint64_t, u64Tmp1);
5446 IEM_MC_LOCAL(uint64_t, u64Tmp2);
5447 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
5448 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
5449 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
5450 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
5451 IEM_MC_ADVANCE_RIP_AND_FINISH();
5452 IEM_MC_END();
5453 break;
5454
5455 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5456 }
5457}
5458
5459
5460/**
5461 * @opcode 0x90
5462 */
5463FNIEMOP_DEF(iemOp_nop)
5464{
5465 /* R8/R8D and RAX/EAX can be exchanged. */
5466 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
5467 {
5468 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
5469 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
5470 }
5471
5472 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
5473 {
5474 IEMOP_MNEMONIC(pause, "pause");
5475 /* ASSUMING that we keep the IEM_F_X86_CTX_IN_GUEST, IEM_F_X86_CTX_VMX
5476 and IEM_F_X86_CTX_SVM in the TB key, we can safely do the following: */
5477 if (!IEM_IS_IN_GUEST(pVCpu))
5478 { /* probable */ }
5479#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
5480 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_VMX)
5481 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmx_pause);
5482#endif
5483#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
5484 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_SVM)
5485 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_svm_pause);
5486#endif
5487 }
5488 else
5489 IEMOP_MNEMONIC(nop, "nop");
5490 /** @todo testcase: lock nop; lock pause */
5491 IEM_MC_BEGIN(0, 0);
5492 IEMOP_HLP_DONE_DECODING();
5493 IEM_MC_ADVANCE_RIP_AND_FINISH();
5494 IEM_MC_END();
5495}
5496
5497
5498/**
5499 * @opcode 0x91
5500 */
5501FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
5502{
5503 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
5504 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
5505}
5506
5507
5508/**
5509 * @opcode 0x92
5510 */
5511FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
5512{
5513 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
5514 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
5515}
5516
5517
5518/**
5519 * @opcode 0x93
5520 */
5521FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
5522{
5523 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
5524 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
5525}
5526
5527
5528/**
5529 * @opcode 0x94
5530 */
5531FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
5532{
5533 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
5534 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
5535}
5536
5537
5538/**
5539 * @opcode 0x95
5540 */
5541FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
5542{
5543 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
5544 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
5545}
5546
5547
5548/**
5549 * @opcode 0x96
5550 */
5551FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
5552{
5553 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
5554 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
5555}
5556
5557
5558/**
5559 * @opcode 0x97
5560 */
5561FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
5562{
5563 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
5564 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
5565}
5566
5567
5568/**
5569 * @opcode 0x98
5570 */
5571FNIEMOP_DEF(iemOp_cbw)
5572{
5573 switch (pVCpu->iem.s.enmEffOpSize)
5574 {
5575 case IEMMODE_16BIT:
5576 IEMOP_MNEMONIC(cbw, "cbw");
5577 IEM_MC_BEGIN(0, 1);
5578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5579 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
5580 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
5581 } IEM_MC_ELSE() {
5582 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
5583 } IEM_MC_ENDIF();
5584 IEM_MC_ADVANCE_RIP_AND_FINISH();
5585 IEM_MC_END();
5586 break;
5587
5588 case IEMMODE_32BIT:
5589 IEMOP_MNEMONIC(cwde, "cwde");
5590 IEM_MC_BEGIN(0, 1);
5591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5592 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
5593 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
5594 } IEM_MC_ELSE() {
5595 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
5596 } IEM_MC_ENDIF();
5597 IEM_MC_ADVANCE_RIP_AND_FINISH();
5598 IEM_MC_END();
5599 break;
5600
5601 case IEMMODE_64BIT:
5602 IEMOP_MNEMONIC(cdqe, "cdqe");
5603 IEM_MC_BEGIN(0, 1);
5604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5605 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
5606 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
5607 } IEM_MC_ELSE() {
5608 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
5609 } IEM_MC_ENDIF();
5610 IEM_MC_ADVANCE_RIP_AND_FINISH();
5611 IEM_MC_END();
5612 break;
5613
5614 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5615 }
5616}
5617
5618
5619/**
5620 * @opcode 0x99
5621 */
5622FNIEMOP_DEF(iemOp_cwd)
5623{
5624 switch (pVCpu->iem.s.enmEffOpSize)
5625 {
5626 case IEMMODE_16BIT:
5627 IEMOP_MNEMONIC(cwd, "cwd");
5628 IEM_MC_BEGIN(0, 1);
5629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5630 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
5631 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
5632 } IEM_MC_ELSE() {
5633 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
5634 } IEM_MC_ENDIF();
5635 IEM_MC_ADVANCE_RIP_AND_FINISH();
5636 IEM_MC_END();
5637 break;
5638
5639 case IEMMODE_32BIT:
5640 IEMOP_MNEMONIC(cdq, "cdq");
5641 IEM_MC_BEGIN(0, 1);
5642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5643 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
5644 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
5645 } IEM_MC_ELSE() {
5646 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
5647 } IEM_MC_ENDIF();
5648 IEM_MC_ADVANCE_RIP_AND_FINISH();
5649 IEM_MC_END();
5650 break;
5651
5652 case IEMMODE_64BIT:
5653 IEMOP_MNEMONIC(cqo, "cqo");
5654 IEM_MC_BEGIN(0, 1);
5655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5656 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
5657 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
5658 } IEM_MC_ELSE() {
5659 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
5660 } IEM_MC_ENDIF();
5661 IEM_MC_ADVANCE_RIP_AND_FINISH();
5662 IEM_MC_END();
5663 break;
5664
5665 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5666 }
5667}
5668
5669
5670/**
5671 * @opcode 0x9a
5672 */
5673FNIEMOP_DEF(iemOp_call_Ap)
5674{
5675 IEMOP_MNEMONIC(call_Ap, "call Ap");
5676 IEMOP_HLP_NO_64BIT();
5677
5678 /* Decode the far pointer address and pass it on to the far call C implementation. */
5679 uint32_t off32Seg;
5680 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
5681 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
5682 else
5683 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
5684 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
5685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5686 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
5687 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
5688 iemCImpl_callf, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
5689}
5690
5691
5692/** Opcode 0x9b. (aka fwait) */
5693FNIEMOP_DEF(iemOp_wait)
5694{
5695 IEMOP_MNEMONIC(wait, "wait");
5696 IEM_MC_BEGIN(0, 0);
5697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5698 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
5699 IEM_MC_MAYBE_RAISE_FPU_XCPT();
5700 IEM_MC_ADVANCE_RIP_AND_FINISH();
5701 IEM_MC_END();
5702}
5703
5704
5705/**
5706 * @opcode 0x9c
5707 */
5708FNIEMOP_DEF(iemOp_pushf_Fv)
5709{
5710 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
5711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5712 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5713 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
5714}
5715
5716
5717/**
5718 * @opcode 0x9d
5719 */
5720FNIEMOP_DEF(iemOp_popf_Fv)
5721{
5722 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
5723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5724 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5725 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ,
5726 iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
5727}
5728
5729
5730/**
5731 * @opcode 0x9e
5732 */
5733FNIEMOP_DEF(iemOp_sahf)
5734{
5735 IEMOP_MNEMONIC(sahf, "sahf");
5736 if ( IEM_IS_64BIT_CODE(pVCpu)
5737 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
5738 IEMOP_RAISE_INVALID_OPCODE_RET();
5739 IEM_MC_BEGIN(0, 2);
5740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5741 IEM_MC_LOCAL(uint32_t, u32Flags);
5742 IEM_MC_LOCAL(uint32_t, EFlags);
5743 IEM_MC_FETCH_EFLAGS(EFlags);
5744 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
5745 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
5746 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
5747 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
5748 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
5749 IEM_MC_COMMIT_EFLAGS(EFlags);
5750 IEM_MC_ADVANCE_RIP_AND_FINISH();
5751 IEM_MC_END();
5752}
5753
5754
5755/**
5756 * @opcode 0x9f
5757 */
5758FNIEMOP_DEF(iemOp_lahf)
5759{
5760 IEMOP_MNEMONIC(lahf, "lahf");
5761 if ( IEM_IS_64BIT_CODE(pVCpu)
5762 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
5763 IEMOP_RAISE_INVALID_OPCODE_RET();
5764 IEM_MC_BEGIN(0, 1);
5765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5766 IEM_MC_LOCAL(uint8_t, u8Flags);
5767 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
5768 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
5769 IEM_MC_ADVANCE_RIP_AND_FINISH();
5770 IEM_MC_END();
5771}
5772
5773
5774/**
5775 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
5776 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode.
5777 * Will return/throw on failures.
5778 * @param a_GCPtrMemOff The variable to store the offset in.
5779 */
5780#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
5781 do \
5782 { \
5783 switch (pVCpu->iem.s.enmEffAddrMode) \
5784 { \
5785 case IEMMODE_16BIT: \
5786 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
5787 break; \
5788 case IEMMODE_32BIT: \
5789 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
5790 break; \
5791 case IEMMODE_64BIT: \
5792 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
5793 break; \
5794 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5795 } \
5796 } while (0)
5797
5798/**
5799 * @opcode 0xa0
5800 */
5801FNIEMOP_DEF(iemOp_mov_AL_Ob)
5802{
5803 /*
5804 * Get the offset.
5805 */
5806 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
5807 RTGCPTR GCPtrMemOff;
5808 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
5809
5810 /*
5811 * Fetch AL.
5812 */
5813 IEM_MC_BEGIN(0,1);
5814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5815 IEM_MC_LOCAL(uint8_t, u8Tmp);
5816 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
5817 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
5818 IEM_MC_ADVANCE_RIP_AND_FINISH();
5819 IEM_MC_END();
5820}
5821
5822
5823/**
5824 * @opcode 0xa1
5825 */
5826FNIEMOP_DEF(iemOp_mov_rAX_Ov)
5827{
5828 /*
5829 * Get the offset.
5830 */
5831 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
5832 RTGCPTR GCPtrMemOff;
5833 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
5834
5835 /*
5836 * Fetch rAX.
5837 */
5838 switch (pVCpu->iem.s.enmEffOpSize)
5839 {
5840 case IEMMODE_16BIT:
5841 IEM_MC_BEGIN(0,1);
5842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5843 IEM_MC_LOCAL(uint16_t, u16Tmp);
5844 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
5845 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
5846 IEM_MC_ADVANCE_RIP_AND_FINISH();
5847 IEM_MC_END();
5848 break;
5849
5850 case IEMMODE_32BIT:
5851 IEM_MC_BEGIN(0,1);
5852 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5853 IEM_MC_LOCAL(uint32_t, u32Tmp);
5854 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
5855 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
5856 IEM_MC_ADVANCE_RIP_AND_FINISH();
5857 IEM_MC_END();
5858 break;
5859
5860 case IEMMODE_64BIT:
5861 IEM_MC_BEGIN(0,1);
5862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5863 IEM_MC_LOCAL(uint64_t, u64Tmp);
5864 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
5865 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
5866 IEM_MC_ADVANCE_RIP_AND_FINISH();
5867 IEM_MC_END();
5868 break;
5869
5870 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5871 }
5872}
5873
5874
5875/**
5876 * @opcode 0xa2
5877 */
5878FNIEMOP_DEF(iemOp_mov_Ob_AL)
5879{
5880 /*
5881 * Get the offset.
5882 */
5883 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
5884 RTGCPTR GCPtrMemOff;
5885 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
5886
5887 /*
5888 * Store AL.
5889 */
5890 IEM_MC_BEGIN(0,1);
5891 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5892 IEM_MC_LOCAL(uint8_t, u8Tmp);
5893 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
5894 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
5895 IEM_MC_ADVANCE_RIP_AND_FINISH();
5896 IEM_MC_END();
5897}
5898
5899
5900/**
5901 * @opcode 0xa3
5902 */
5903FNIEMOP_DEF(iemOp_mov_Ov_rAX)
5904{
5905 /*
5906 * Get the offset.
5907 */
5908 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
5909 RTGCPTR GCPtrMemOff;
5910 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
5911
5912 /*
5913 * Store rAX.
5914 */
5915 switch (pVCpu->iem.s.enmEffOpSize)
5916 {
5917 case IEMMODE_16BIT:
5918 IEM_MC_BEGIN(0,1);
5919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5920 IEM_MC_LOCAL(uint16_t, u16Tmp);
5921 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
5922 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
5923 IEM_MC_ADVANCE_RIP_AND_FINISH();
5924 IEM_MC_END();
5925 break;
5926
5927 case IEMMODE_32BIT:
5928 IEM_MC_BEGIN(0,1);
5929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5930 IEM_MC_LOCAL(uint32_t, u32Tmp);
5931 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
5932 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
5933 IEM_MC_ADVANCE_RIP_AND_FINISH();
5934 IEM_MC_END();
5935 break;
5936
5937 case IEMMODE_64BIT:
5938 IEM_MC_BEGIN(0,1);
5939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5940 IEM_MC_LOCAL(uint64_t, u64Tmp);
5941 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
5942 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
5943 IEM_MC_ADVANCE_RIP_AND_FINISH();
5944 IEM_MC_END();
5945 break;
5946
5947 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5948 }
5949}
5950
5951/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
5952#define IEM_MOVS_CASE(ValBits, AddrBits) \
5953 IEM_MC_BEGIN(0, 2); \
5954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5955 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5956 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5957 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
5958 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
5959 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5960 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
5961 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5962 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5963 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5964 } IEM_MC_ELSE() { \
5965 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5966 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5967 } IEM_MC_ENDIF(); \
5968 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5969 IEM_MC_END() \
5970
5971/**
5972 * @opcode 0xa4
5973 */
5974FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
5975{
5976 /*
5977 * Use the C implementation if a repeat prefix is encountered.
5978 */
5979 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5980 {
5981 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
5982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5983 switch (pVCpu->iem.s.enmEffAddrMode)
5984 {
5985 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
5986 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
5987 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
5988 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5989 }
5990 }
5991
5992 /*
5993 * Sharing case implementation with movs[wdq] below.
5994 */
5995 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
5996 switch (pVCpu->iem.s.enmEffAddrMode)
5997 {
5998 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
5999 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
6000 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
6001 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6002 }
6003}
6004
6005
6006/**
6007 * @opcode 0xa5
6008 */
6009FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
6010{
6011
6012 /*
6013 * Use the C implementation if a repeat prefix is encountered.
6014 */
6015 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6016 {
6017 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
6018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6019 switch (pVCpu->iem.s.enmEffOpSize)
6020 {
6021 case IEMMODE_16BIT:
6022 switch (pVCpu->iem.s.enmEffAddrMode)
6023 {
6024 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
6025 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
6026 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
6027 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6028 }
6029 break;
6030 case IEMMODE_32BIT:
6031 switch (pVCpu->iem.s.enmEffAddrMode)
6032 {
6033 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
6034 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
6035 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
6036 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6037 }
6038 case IEMMODE_64BIT:
6039 switch (pVCpu->iem.s.enmEffAddrMode)
6040 {
6041 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
6042 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
6043 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
6044 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6045 }
6046 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6047 }
6048 }
6049
6050 /*
6051 * Annoying double switch here.
6052 * Using ugly macro for implementing the cases, sharing it with movsb.
6053 */
6054 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
6055 switch (pVCpu->iem.s.enmEffOpSize)
6056 {
6057 case IEMMODE_16BIT:
6058 switch (pVCpu->iem.s.enmEffAddrMode)
6059 {
6060 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
6061 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
6062 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
6063 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6064 }
6065 break;
6066
6067 case IEMMODE_32BIT:
6068 switch (pVCpu->iem.s.enmEffAddrMode)
6069 {
6070 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
6071 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
6072 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
6073 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6074 }
6075 break;
6076
6077 case IEMMODE_64BIT:
6078 switch (pVCpu->iem.s.enmEffAddrMode)
6079 {
6080 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6081 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
6082 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
6083 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6084 }
6085 break;
6086 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6087 }
6088}
6089
6090#undef IEM_MOVS_CASE
6091
6092/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
6093#define IEM_CMPS_CASE(ValBits, AddrBits) \
6094 IEM_MC_BEGIN(3, 3); \
6095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6096 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
6097 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
6098 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
6099 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
6100 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6101 \
6102 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6103 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
6104 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6105 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
6106 IEM_MC_REF_LOCAL(puValue1, uValue1); \
6107 IEM_MC_REF_EFLAGS(pEFlags); \
6108 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
6109 \
6110 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6111 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6112 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6113 } IEM_MC_ELSE() { \
6114 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6115 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6116 } IEM_MC_ENDIF(); \
6117 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6118 IEM_MC_END() \
6119
6120/**
6121 * @opcode 0xa6
6122 */
6123FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
6124{
6125
6126 /*
6127 * Use the C implementation if a repeat prefix is encountered.
6128 */
6129 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6130 {
6131 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
6132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6133 switch (pVCpu->iem.s.enmEffAddrMode)
6134 {
6135 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6136 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6137 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6138 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6139 }
6140 }
6141 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6142 {
6143 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
6144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6145 switch (pVCpu->iem.s.enmEffAddrMode)
6146 {
6147 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6148 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6149 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6150 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6151 }
6152 }
6153
6154 /*
6155 * Sharing case implementation with cmps[wdq] below.
6156 */
6157 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
6158 switch (pVCpu->iem.s.enmEffAddrMode)
6159 {
6160 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
6161 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
6162 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
6163 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6164 }
6165}
6166
6167
6168/**
6169 * @opcode 0xa7
6170 */
6171FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
6172{
6173 /*
6174 * Use the C implementation if a repeat prefix is encountered.
6175 */
6176 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6177 {
6178 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
6179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6180 switch (pVCpu->iem.s.enmEffOpSize)
6181 {
6182 case IEMMODE_16BIT:
6183 switch (pVCpu->iem.s.enmEffAddrMode)
6184 {
6185 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
6186 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
6187 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
6188 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6189 }
6190 break;
6191 case IEMMODE_32BIT:
6192 switch (pVCpu->iem.s.enmEffAddrMode)
6193 {
6194 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
6195 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
6196 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
6197 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6198 }
6199 case IEMMODE_64BIT:
6200 switch (pVCpu->iem.s.enmEffAddrMode)
6201 {
6202 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
6203 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
6204 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
6205 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6206 }
6207 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6208 }
6209 }
6210
6211 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6212 {
6213 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
6214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6215 switch (pVCpu->iem.s.enmEffOpSize)
6216 {
6217 case IEMMODE_16BIT:
6218 switch (pVCpu->iem.s.enmEffAddrMode)
6219 {
6220 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
6221 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
6222 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
6223 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6224 }
6225 break;
6226 case IEMMODE_32BIT:
6227 switch (pVCpu->iem.s.enmEffAddrMode)
6228 {
6229 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
6230 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
6231 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
6232 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6233 }
6234 case IEMMODE_64BIT:
6235 switch (pVCpu->iem.s.enmEffAddrMode)
6236 {
6237 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
6238 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
6239 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
6240 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6241 }
6242 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6243 }
6244 }
6245
6246 /*
6247 * Annoying double switch here.
6248 * Using ugly macro for implementing the cases, sharing it with cmpsb.
6249 */
6250 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
6251 switch (pVCpu->iem.s.enmEffOpSize)
6252 {
6253 case IEMMODE_16BIT:
6254 switch (pVCpu->iem.s.enmEffAddrMode)
6255 {
6256 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
6257 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
6258 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
6259 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6260 }
6261 break;
6262
6263 case IEMMODE_32BIT:
6264 switch (pVCpu->iem.s.enmEffAddrMode)
6265 {
6266 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
6267 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
6268 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
6269 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6270 }
6271 break;
6272
6273 case IEMMODE_64BIT:
6274 switch (pVCpu->iem.s.enmEffAddrMode)
6275 {
6276 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6277 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
6278 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
6279 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6280 }
6281 break;
6282 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6283 }
6284}
6285
6286#undef IEM_CMPS_CASE
6287
6288/**
6289 * @opcode 0xa8
6290 */
6291FNIEMOP_DEF(iemOp_test_AL_Ib)
6292{
6293 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
6294 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6295 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_test_u8);
6296}
6297
6298
6299/**
6300 * @opcode 0xa9
6301 */
6302FNIEMOP_DEF(iemOp_test_eAX_Iz)
6303{
6304 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
6305 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6306 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, 0);
6307}
6308
6309
6310/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
6311#define IEM_STOS_CASE(ValBits, AddrBits) \
6312 IEM_MC_BEGIN(0, 2); \
6313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6314 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6315 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6316 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
6317 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6318 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
6319 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6320 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6321 } IEM_MC_ELSE() { \
6322 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6323 } IEM_MC_ENDIF(); \
6324 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6325 IEM_MC_END() \
6326
6327/**
6328 * @opcode 0xaa
6329 */
6330FNIEMOP_DEF(iemOp_stosb_Yb_AL)
6331{
6332 /*
6333 * Use the C implementation if a repeat prefix is encountered.
6334 */
6335 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6336 {
6337 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
6338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6339 switch (pVCpu->iem.s.enmEffAddrMode)
6340 {
6341 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP, iemCImpl_stos_al_m16);
6342 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP, iemCImpl_stos_al_m32);
6343 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP, iemCImpl_stos_al_m64);
6344 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6345 }
6346 }
6347
6348 /*
6349 * Sharing case implementation with stos[wdq] below.
6350 */
6351 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
6352 switch (pVCpu->iem.s.enmEffAddrMode)
6353 {
6354 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
6355 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
6356 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
6357 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6358 }
6359}
6360
6361
6362/**
6363 * @opcode 0xab
6364 */
6365FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
6366{
6367 /*
6368 * Use the C implementation if a repeat prefix is encountered.
6369 */
6370 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6371 {
6372 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
6373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6374 switch (pVCpu->iem.s.enmEffOpSize)
6375 {
6376 case IEMMODE_16BIT:
6377 switch (pVCpu->iem.s.enmEffAddrMode)
6378 {
6379 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_ax_m16);
6380 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_ax_m32);
6381 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_ax_m64);
6382 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6383 }
6384 break;
6385 case IEMMODE_32BIT:
6386 switch (pVCpu->iem.s.enmEffAddrMode)
6387 {
6388 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_eax_m16);
6389 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_eax_m32);
6390 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_eax_m64);
6391 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6392 }
6393 case IEMMODE_64BIT:
6394 switch (pVCpu->iem.s.enmEffAddrMode)
6395 {
6396 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
6397 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_rax_m32);
6398 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_rax_m64);
6399 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6400 }
6401 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6402 }
6403 }
6404
6405 /*
6406 * Annoying double switch here.
6407 * Using ugly macro for implementing the cases, sharing it with stosb.
6408 */
6409 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
6410 switch (pVCpu->iem.s.enmEffOpSize)
6411 {
6412 case IEMMODE_16BIT:
6413 switch (pVCpu->iem.s.enmEffAddrMode)
6414 {
6415 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
6416 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
6417 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
6418 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6419 }
6420 break;
6421
6422 case IEMMODE_32BIT:
6423 switch (pVCpu->iem.s.enmEffAddrMode)
6424 {
6425 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
6426 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
6427 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
6428 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6429 }
6430 break;
6431
6432 case IEMMODE_64BIT:
6433 switch (pVCpu->iem.s.enmEffAddrMode)
6434 {
6435 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6436 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
6437 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
6438 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6439 }
6440 break;
6441 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6442 }
6443}
6444
6445#undef IEM_STOS_CASE
6446
6447/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
6448#define IEM_LODS_CASE(ValBits, AddrBits) \
6449 IEM_MC_BEGIN(0, 2); \
6450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6451 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6452 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6453 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6454 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
6455 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
6456 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6457 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6458 } IEM_MC_ELSE() { \
6459 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6460 } IEM_MC_ENDIF(); \
6461 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6462 IEM_MC_END() \
6463
6464/**
6465 * @opcode 0xac
6466 */
6467FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
6468{
6469 /*
6470 * Use the C implementation if a repeat prefix is encountered.
6471 */
6472 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6473 {
6474 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
6475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6476 switch (pVCpu->iem.s.enmEffAddrMode)
6477 {
6478 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
6479 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
6480 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
6481 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6482 }
6483 }
6484
6485 /*
6486 * Sharing case implementation with stos[wdq] below.
6487 */
6488 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
6489 switch (pVCpu->iem.s.enmEffAddrMode)
6490 {
6491 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
6492 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
6493 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
6494 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6495 }
6496}
6497
6498
6499/**
6500 * @opcode 0xad
6501 */
6502FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
6503{
6504 /*
6505 * Use the C implementation if a repeat prefix is encountered.
6506 */
6507 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6508 {
6509 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
6510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6511 switch (pVCpu->iem.s.enmEffOpSize)
6512 {
6513 case IEMMODE_16BIT:
6514 switch (pVCpu->iem.s.enmEffAddrMode)
6515 {
6516 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
6517 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
6518 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
6519 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6520 }
6521 break;
6522 case IEMMODE_32BIT:
6523 switch (pVCpu->iem.s.enmEffAddrMode)
6524 {
6525 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
6526 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
6527 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
6528 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6529 }
6530 case IEMMODE_64BIT:
6531 switch (pVCpu->iem.s.enmEffAddrMode)
6532 {
6533 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
6534 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
6535 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
6536 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6537 }
6538 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6539 }
6540 }
6541
6542 /*
6543 * Annoying double switch here.
6544 * Using ugly macro for implementing the cases, sharing it with lodsb.
6545 */
6546 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
6547 switch (pVCpu->iem.s.enmEffOpSize)
6548 {
6549 case IEMMODE_16BIT:
6550 switch (pVCpu->iem.s.enmEffAddrMode)
6551 {
6552 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
6553 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
6554 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
6555 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6556 }
6557 break;
6558
6559 case IEMMODE_32BIT:
6560 switch (pVCpu->iem.s.enmEffAddrMode)
6561 {
6562 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
6563 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
6564 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
6565 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6566 }
6567 break;
6568
6569 case IEMMODE_64BIT:
6570 switch (pVCpu->iem.s.enmEffAddrMode)
6571 {
6572 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6573 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
6574 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
6575 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6576 }
6577 break;
6578 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6579 }
6580}
6581
6582#undef IEM_LODS_CASE
6583
6584/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
6585#define IEM_SCAS_CASE(ValBits, AddrBits) \
6586 IEM_MC_BEGIN(3, 2); \
6587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6588 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
6589 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
6590 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
6591 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6592 \
6593 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6594 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
6595 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
6596 IEM_MC_REF_EFLAGS(pEFlags); \
6597 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
6598 \
6599 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6600 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6601 } IEM_MC_ELSE() { \
6602 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6603 } IEM_MC_ENDIF(); \
6604 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6605 IEM_MC_END();
6606
6607/**
6608 * @opcode 0xae
6609 */
6610FNIEMOP_DEF(iemOp_scasb_AL_Xb)
6611{
6612 /*
6613 * Use the C implementation if a repeat prefix is encountered.
6614 */
6615 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6616 {
6617 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
6618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6619 switch (pVCpu->iem.s.enmEffAddrMode)
6620 {
6621 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_al_m16);
6622 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_al_m32);
6623 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_al_m64);
6624 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6625 }
6626 }
6627 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6628 {
6629 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
6630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6631 switch (pVCpu->iem.s.enmEffAddrMode)
6632 {
6633 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_al_m16);
6634 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_al_m32);
6635 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_al_m64);
6636 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6637 }
6638 }
6639
6640 /*
6641 * Sharing case implementation with stos[wdq] below.
6642 */
6643 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
6644 switch (pVCpu->iem.s.enmEffAddrMode)
6645 {
6646 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
6647 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
6648 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
6649 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6650 }
6651}
6652
6653
6654/**
6655 * @opcode 0xaf
6656 */
6657FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
6658{
6659 /*
6660 * Use the C implementation if a repeat prefix is encountered.
6661 */
6662 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6663 {
6664 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
6665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6666 switch (pVCpu->iem.s.enmEffOpSize)
6667 {
6668 case IEMMODE_16BIT:
6669 switch (pVCpu->iem.s.enmEffAddrMode)
6670 {
6671 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_ax_m16);
6672 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_ax_m32);
6673 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_ax_m64);
6674 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6675 }
6676 break;
6677 case IEMMODE_32BIT:
6678 switch (pVCpu->iem.s.enmEffAddrMode)
6679 {
6680 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_eax_m16);
6681 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_eax_m32);
6682 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_eax_m64);
6683 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6684 }
6685 case IEMMODE_64BIT:
6686 switch (pVCpu->iem.s.enmEffAddrMode)
6687 {
6688 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
6689 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_rax_m32);
6690 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_rax_m64);
6691 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6692 }
6693 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6694 }
6695 }
6696 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6697 {
6698 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
6699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6700 switch (pVCpu->iem.s.enmEffOpSize)
6701 {
6702 case IEMMODE_16BIT:
6703 switch (pVCpu->iem.s.enmEffAddrMode)
6704 {
6705 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_ax_m16);
6706 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_ax_m32);
6707 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_ax_m64);
6708 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6709 }
6710 break;
6711 case IEMMODE_32BIT:
6712 switch (pVCpu->iem.s.enmEffAddrMode)
6713 {
6714 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_eax_m16);
6715 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_eax_m32);
6716 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_eax_m64);
6717 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6718 }
6719 case IEMMODE_64BIT:
6720 switch (pVCpu->iem.s.enmEffAddrMode)
6721 {
6722 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
6723 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_rax_m32);
6724 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_rax_m64);
6725 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6726 }
6727 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6728 }
6729 }
6730
6731 /*
6732 * Annoying double switch here.
6733 * Using ugly macro for implementing the cases, sharing it with scasb.
6734 */
6735 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
6736 switch (pVCpu->iem.s.enmEffOpSize)
6737 {
6738 case IEMMODE_16BIT:
6739 switch (pVCpu->iem.s.enmEffAddrMode)
6740 {
6741 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
6742 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
6743 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
6744 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6745 }
6746 break;
6747
6748 case IEMMODE_32BIT:
6749 switch (pVCpu->iem.s.enmEffAddrMode)
6750 {
6751 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
6752 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
6753 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
6754 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6755 }
6756 break;
6757
6758 case IEMMODE_64BIT:
6759 switch (pVCpu->iem.s.enmEffAddrMode)
6760 {
6761 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6762 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
6763 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
6764 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6765 }
6766 break;
6767 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6768 }
6769}
6770
6771#undef IEM_SCAS_CASE
6772
6773/**
6774 * Common 'mov r8, imm8' helper.
6775 */
6776FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iFixedReg)
6777{
6778 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6779 IEM_MC_BEGIN(0, 1);
6780 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6781 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
6782 IEM_MC_STORE_GREG_U8(iFixedReg, u8Value);
6783 IEM_MC_ADVANCE_RIP_AND_FINISH();
6784 IEM_MC_END();
6785}
6786
6787
6788/**
6789 * @opcode 0xb0
6790 */
6791FNIEMOP_DEF(iemOp_mov_AL_Ib)
6792{
6793 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
6794 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
6795}
6796
6797
6798/**
6799 * @opcode 0xb1
6800 */
6801FNIEMOP_DEF(iemOp_CL_Ib)
6802{
6803 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
6804 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
6805}
6806
6807
6808/**
6809 * @opcode 0xb2
6810 */
6811FNIEMOP_DEF(iemOp_DL_Ib)
6812{
6813 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
6814 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
6815}
6816
6817
6818/**
6819 * @opcode 0xb3
6820 */
6821FNIEMOP_DEF(iemOp_BL_Ib)
6822{
6823 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
6824 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
6825}
6826
6827
6828/**
6829 * @opcode 0xb4
6830 */
6831FNIEMOP_DEF(iemOp_mov_AH_Ib)
6832{
6833 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
6834 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
6835}
6836
6837
6838/**
6839 * @opcode 0xb5
6840 */
6841FNIEMOP_DEF(iemOp_CH_Ib)
6842{
6843 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
6844 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
6845}
6846
6847
6848/**
6849 * @opcode 0xb6
6850 */
6851FNIEMOP_DEF(iemOp_DH_Ib)
6852{
6853 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
6854 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
6855}
6856
6857
6858/**
6859 * @opcode 0xb7
6860 */
6861FNIEMOP_DEF(iemOp_BH_Ib)
6862{
6863 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
6864 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
6865}
6866
6867
6868/**
6869 * Common 'mov regX,immX' helper.
6870 */
6871FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iFixedReg)
6872{
6873 switch (pVCpu->iem.s.enmEffOpSize)
6874 {
6875 case IEMMODE_16BIT:
6876 {
6877 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6878 IEM_MC_BEGIN(0, 1);
6879 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6880 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
6881 IEM_MC_STORE_GREG_U16(iFixedReg, u16Value);
6882 IEM_MC_ADVANCE_RIP_AND_FINISH();
6883 IEM_MC_END();
6884 break;
6885 }
6886
6887 case IEMMODE_32BIT:
6888 {
6889 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6890 IEM_MC_BEGIN(0, 1);
6891 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6892 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
6893 IEM_MC_STORE_GREG_U32(iFixedReg, u32Value);
6894 IEM_MC_ADVANCE_RIP_AND_FINISH();
6895 IEM_MC_END();
6896 break;
6897 }
6898 case IEMMODE_64BIT:
6899 {
6900 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
6901 IEM_MC_BEGIN(0, 1);
6902 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6903 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
6904 IEM_MC_STORE_GREG_U64(iFixedReg, u64Value);
6905 IEM_MC_ADVANCE_RIP_AND_FINISH();
6906 IEM_MC_END();
6907 break;
6908 }
6909 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6910 }
6911}
6912
6913
6914/**
6915 * @opcode 0xb8
6916 */
6917FNIEMOP_DEF(iemOp_eAX_Iv)
6918{
6919 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
6920 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
6921}
6922
6923
6924/**
6925 * @opcode 0xb9
6926 */
6927FNIEMOP_DEF(iemOp_eCX_Iv)
6928{
6929 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
6930 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
6931}
6932
6933
6934/**
6935 * @opcode 0xba
6936 */
6937FNIEMOP_DEF(iemOp_eDX_Iv)
6938{
6939 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
6940 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
6941}
6942
6943
6944/**
6945 * @opcode 0xbb
6946 */
6947FNIEMOP_DEF(iemOp_eBX_Iv)
6948{
6949 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
6950 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
6951}
6952
6953
6954/**
6955 * @opcode 0xbc
6956 */
6957FNIEMOP_DEF(iemOp_eSP_Iv)
6958{
6959 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
6960 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
6961}
6962
6963
6964/**
6965 * @opcode 0xbd
6966 */
6967FNIEMOP_DEF(iemOp_eBP_Iv)
6968{
6969 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
6970 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
6971}
6972
6973
6974/**
6975 * @opcode 0xbe
6976 */
6977FNIEMOP_DEF(iemOp_eSI_Iv)
6978{
6979 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
6980 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
6981}
6982
6983
6984/**
6985 * @opcode 0xbf
6986 */
6987FNIEMOP_DEF(iemOp_eDI_Iv)
6988{
6989 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
6990 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
6991}
6992
6993
6994/**
6995 * @opcode 0xc0
6996 */
6997FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
6998{
6999 IEMOP_HLP_MIN_186();
7000 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7001 PCIEMOPSHIFTSIZES pImpl;
7002 switch (IEM_GET_MODRM_REG_8(bRm))
7003 {
7004 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
7005 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
7006 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
7007 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
7008 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
7009 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
7010 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
7011 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7012 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
7013 }
7014 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7015
7016 if (IEM_IS_MODRM_REG_MODE(bRm))
7017 {
7018 /* register */
7019 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7020 IEM_MC_BEGIN(3, 0);
7021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7022 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7023 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7024 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7025 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7026 IEM_MC_REF_EFLAGS(pEFlags);
7027 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7028 IEM_MC_ADVANCE_RIP_AND_FINISH();
7029 IEM_MC_END();
7030 }
7031 else
7032 {
7033 /* memory */
7034 IEM_MC_BEGIN(3, 2);
7035 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7036 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7037 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7038 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7039
7040 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7041 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7042 IEM_MC_ASSIGN(cShiftArg, cShift);
7043 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7044 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7045 IEM_MC_FETCH_EFLAGS(EFlags);
7046 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7047
7048 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7049 IEM_MC_COMMIT_EFLAGS(EFlags);
7050 IEM_MC_ADVANCE_RIP_AND_FINISH();
7051 IEM_MC_END();
7052 }
7053}
7054
7055
7056/**
7057 * @opcode 0xc1
7058 */
7059FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
7060{
7061 IEMOP_HLP_MIN_186();
7062 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7063 PCIEMOPSHIFTSIZES pImpl;
7064 switch (IEM_GET_MODRM_REG_8(bRm))
7065 {
7066 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
7067 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
7068 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
7069 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
7070 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
7071 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
7072 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
7073 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7074 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
7075 }
7076 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7077
7078 if (IEM_IS_MODRM_REG_MODE(bRm))
7079 {
7080 /* register */
7081 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7082 switch (pVCpu->iem.s.enmEffOpSize)
7083 {
7084 case IEMMODE_16BIT:
7085 IEM_MC_BEGIN(3, 0);
7086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7087 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7088 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7089 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7090 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7091 IEM_MC_REF_EFLAGS(pEFlags);
7092 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7093 IEM_MC_ADVANCE_RIP_AND_FINISH();
7094 IEM_MC_END();
7095 break;
7096
7097 case IEMMODE_32BIT:
7098 IEM_MC_BEGIN(3, 0);
7099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7100 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7101 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7102 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7103 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7104 IEM_MC_REF_EFLAGS(pEFlags);
7105 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7106 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7107 IEM_MC_ADVANCE_RIP_AND_FINISH();
7108 IEM_MC_END();
7109 break;
7110
7111 case IEMMODE_64BIT:
7112 IEM_MC_BEGIN(3, 0);
7113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7114 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7115 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7116 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7117 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7118 IEM_MC_REF_EFLAGS(pEFlags);
7119 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7120 IEM_MC_ADVANCE_RIP_AND_FINISH();
7121 IEM_MC_END();
7122 break;
7123
7124 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7125 }
7126 }
7127 else
7128 {
7129 /* memory */
7130 switch (pVCpu->iem.s.enmEffOpSize)
7131 {
7132 case IEMMODE_16BIT:
7133 IEM_MC_BEGIN(3, 2);
7134 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7135 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7136 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7137 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7138
7139 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7140 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7141 IEM_MC_ASSIGN(cShiftArg, cShift);
7142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7143 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7144 IEM_MC_FETCH_EFLAGS(EFlags);
7145 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7146
7147 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7148 IEM_MC_COMMIT_EFLAGS(EFlags);
7149 IEM_MC_ADVANCE_RIP_AND_FINISH();
7150 IEM_MC_END();
7151 break;
7152
7153 case IEMMODE_32BIT:
7154 IEM_MC_BEGIN(3, 2);
7155 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7156 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7157 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7158 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7159
7160 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7161 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7162 IEM_MC_ASSIGN(cShiftArg, cShift);
7163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7164 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7165 IEM_MC_FETCH_EFLAGS(EFlags);
7166 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7167
7168 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7169 IEM_MC_COMMIT_EFLAGS(EFlags);
7170 IEM_MC_ADVANCE_RIP_AND_FINISH();
7171 IEM_MC_END();
7172 break;
7173
7174 case IEMMODE_64BIT:
7175 IEM_MC_BEGIN(3, 2);
7176 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7177 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7178 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7179 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7180
7181 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7182 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7183 IEM_MC_ASSIGN(cShiftArg, cShift);
7184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7185 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7186 IEM_MC_FETCH_EFLAGS(EFlags);
7187 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7188
7189 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7190 IEM_MC_COMMIT_EFLAGS(EFlags);
7191 IEM_MC_ADVANCE_RIP_AND_FINISH();
7192 IEM_MC_END();
7193 break;
7194
7195 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7196 }
7197 }
7198}
7199
7200
7201/**
7202 * @opcode 0xc2
7203 */
7204FNIEMOP_DEF(iemOp_retn_Iw)
7205{
7206 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
7207 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7208 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7210 switch (pVCpu->iem.s.enmEffOpSize)
7211 {
7212 case IEMMODE_16BIT:
7213 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_iw_16, u16Imm);
7214 case IEMMODE_32BIT:
7215 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_iw_32, u16Imm);
7216 case IEMMODE_64BIT:
7217 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_iw_64, u16Imm);
7218 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7219 }
7220}
7221
7222
7223/**
7224 * @opcode 0xc3
7225 */
7226FNIEMOP_DEF(iemOp_retn)
7227{
7228 IEMOP_MNEMONIC(retn, "retn");
7229 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7231 switch (pVCpu->iem.s.enmEffOpSize)
7232 {
7233 case IEMMODE_16BIT:
7234 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_16);
7235 case IEMMODE_32BIT:
7236 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_32);
7237 case IEMMODE_64BIT:
7238 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_retn_64);
7239 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7240 }
7241}
7242
7243
7244/**
7245 * @opcode 0xc4
7246 */
7247FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
7248{
7249 /* The LDS instruction is invalid 64-bit mode. In legacy and
7250 compatability mode it is invalid with MOD=3.
7251 The use as a VEX prefix is made possible by assigning the inverted
7252 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
7253 outside of 64-bit mode. VEX is not available in real or v86 mode. */
7254 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7255 if ( IEM_IS_64BIT_CODE(pVCpu)
7256 || IEM_IS_MODRM_REG_MODE(bRm) )
7257 {
7258 IEMOP_MNEMONIC(vex3_prefix, "vex3");
7259 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
7260 {
7261 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
7262 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
7263 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
7264 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
7265 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
7266 if ((bVex2 & 0x80 /* VEX.W */) && IEM_IS_64BIT_CODE(pVCpu))
7267 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
7268 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
7269 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
7270 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
7271 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
7272 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
7273 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
7274
7275 switch (bRm & 0x1f)
7276 {
7277 case 1: /* 0x0f lead opcode byte. */
7278#ifdef IEM_WITH_VEX
7279 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7280#else
7281 IEMOP_BITCH_ABOUT_STUB();
7282 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7283#endif
7284
7285 case 2: /* 0x0f 0x38 lead opcode bytes. */
7286#ifdef IEM_WITH_VEX
7287 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7288#else
7289 IEMOP_BITCH_ABOUT_STUB();
7290 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7291#endif
7292
7293 case 3: /* 0x0f 0x3a lead opcode bytes. */
7294#ifdef IEM_WITH_VEX
7295 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7296#else
7297 IEMOP_BITCH_ABOUT_STUB();
7298 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7299#endif
7300
7301 default:
7302 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
7303 IEMOP_RAISE_INVALID_OPCODE_RET();
7304 }
7305 }
7306 Log(("VEX3: VEX support disabled!\n"));
7307 IEMOP_RAISE_INVALID_OPCODE_RET();
7308 }
7309
7310 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
7311 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
7312}
7313
7314
7315/**
7316 * @opcode 0xc5
7317 */
7318FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
7319{
7320 /* The LES instruction is invalid 64-bit mode. In legacy and
7321 compatability mode it is invalid with MOD=3.
7322 The use as a VEX prefix is made possible by assigning the inverted
7323 REX.R to the top MOD bit, and the top bit in the inverted register
7324 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
7325 to accessing registers 0..7 in this VEX form. */
7326 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7327 if ( IEM_IS_64BIT_CODE(pVCpu)
7328 || IEM_IS_MODRM_REG_MODE(bRm))
7329 {
7330 IEMOP_MNEMONIC(vex2_prefix, "vex2");
7331 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
7332 {
7333 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
7334 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
7335 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
7336 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
7337 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
7338 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
7339 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
7340 pVCpu->iem.s.idxPrefix = bRm & 0x3;
7341
7342#ifdef IEM_WITH_VEX
7343 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7344#else
7345 IEMOP_BITCH_ABOUT_STUB();
7346 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7347#endif
7348 }
7349
7350 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
7351 Log(("VEX2: VEX support disabled!\n"));
7352 IEMOP_RAISE_INVALID_OPCODE_RET();
7353 }
7354
7355 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
7356 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
7357}
7358
7359
7360/**
7361 * @opcode 0xc6
7362 */
7363FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
7364{
7365 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7366 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
7367 IEMOP_RAISE_INVALID_OPCODE_RET();
7368 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
7369
7370 if (IEM_IS_MODRM_REG_MODE(bRm))
7371 {
7372 /* register access */
7373 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7374 IEM_MC_BEGIN(0, 0);
7375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7376 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Imm);
7377 IEM_MC_ADVANCE_RIP_AND_FINISH();
7378 IEM_MC_END();
7379 }
7380 else
7381 {
7382 /* memory access. */
7383 IEM_MC_BEGIN(0, 1);
7384 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7385 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7386 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7388 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
7389 IEM_MC_ADVANCE_RIP_AND_FINISH();
7390 IEM_MC_END();
7391 }
7392}
7393
7394
7395/**
7396 * @opcode 0xc7
7397 */
7398FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
7399{
7400 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7401 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
7402 IEMOP_RAISE_INVALID_OPCODE_RET();
7403 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
7404
7405 if (IEM_IS_MODRM_REG_MODE(bRm))
7406 {
7407 /* register access */
7408 switch (pVCpu->iem.s.enmEffOpSize)
7409 {
7410 case IEMMODE_16BIT:
7411 IEM_MC_BEGIN(0, 0);
7412 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7414 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Imm);
7415 IEM_MC_ADVANCE_RIP_AND_FINISH();
7416 IEM_MC_END();
7417 break;
7418
7419 case IEMMODE_32BIT:
7420 IEM_MC_BEGIN(0, 0);
7421 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
7422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7423 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Imm);
7424 IEM_MC_ADVANCE_RIP_AND_FINISH();
7425 IEM_MC_END();
7426 break;
7427
7428 case IEMMODE_64BIT:
7429 IEM_MC_BEGIN(0, 0);
7430 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
7431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7432 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Imm);
7433 IEM_MC_ADVANCE_RIP_AND_FINISH();
7434 IEM_MC_END();
7435 break;
7436
7437 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7438 }
7439 }
7440 else
7441 {
7442 /* memory access. */
7443 switch (pVCpu->iem.s.enmEffOpSize)
7444 {
7445 case IEMMODE_16BIT:
7446 IEM_MC_BEGIN(0, 1);
7447 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7448 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
7449 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7451 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
7452 IEM_MC_ADVANCE_RIP_AND_FINISH();
7453 IEM_MC_END();
7454 break;
7455
7456 case IEMMODE_32BIT:
7457 IEM_MC_BEGIN(0, 1);
7458 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
7460 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
7461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7462 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
7463 IEM_MC_ADVANCE_RIP_AND_FINISH();
7464 IEM_MC_END();
7465 break;
7466
7467 case IEMMODE_64BIT:
7468 IEM_MC_BEGIN(0, 1);
7469 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7470 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
7471 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
7472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7473 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
7474 IEM_MC_ADVANCE_RIP_AND_FINISH();
7475 IEM_MC_END();
7476 break;
7477
7478 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7479 }
7480 }
7481}
7482
7483
7484
7485
7486/**
7487 * @opcode 0xc8
7488 */
7489FNIEMOP_DEF(iemOp_enter_Iw_Ib)
7490{
7491 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
7492 IEMOP_HLP_MIN_186();
7493 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7494 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
7495 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
7496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7497 IEM_MC_DEFER_TO_CIMPL_3_RET(0, iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
7498}
7499
7500
7501/**
7502 * @opcode 0xc9
7503 */
7504FNIEMOP_DEF(iemOp_leave)
7505{
7506 IEMOP_MNEMONIC(leave, "leave");
7507 IEMOP_HLP_MIN_186();
7508 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7510 IEM_MC_DEFER_TO_CIMPL_1_RET(0, iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
7511}
7512
7513
7514/**
7515 * @opcode 0xca
7516 */
7517FNIEMOP_DEF(iemOp_retf_Iw)
7518{
7519 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
7520 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7522 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_MODE,
7523 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
7524}
7525
7526
7527/**
7528 * @opcode 0xcb
7529 */
7530FNIEMOP_DEF(iemOp_retf)
7531{
7532 IEMOP_MNEMONIC(retf, "retf");
7533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7534 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_MODE,
7535 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
7536}
7537
7538
7539/**
7540 * @opcode 0xcc
7541 */
7542FNIEMOP_DEF(iemOp_int3)
7543{
7544 IEMOP_MNEMONIC(int3, "int3");
7545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7546 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
7547 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
7548 iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
7549}
7550
7551
7552/**
7553 * @opcode 0xcd
7554 */
7555FNIEMOP_DEF(iemOp_int_Ib)
7556{
7557 IEMOP_MNEMONIC(int_Ib, "int Ib");
7558 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
7559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7560 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
7561 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
7562 iemCImpl_int, u8Int, IEMINT_INTN);
7563}
7564
7565
7566/**
7567 * @opcode 0xce
7568 */
7569FNIEMOP_DEF(iemOp_into)
7570{
7571 IEMOP_MNEMONIC(into, "into");
7572 IEMOP_HLP_NO_64BIT();
7573 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_CONDITIONAL
7574 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
7575 iemCImpl_int, X86_XCPT_OF, IEMINT_INTO);
7576}
7577
7578
7579/**
7580 * @opcode 0xcf
7581 */
7582FNIEMOP_DEF(iemOp_iret)
7583{
7584 IEMOP_MNEMONIC(iret, "iret");
7585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7586 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
7587 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ | IEM_CIMPL_F_VMEXIT,
7588 iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
7589}
7590
7591
7592/**
7593 * @opcode 0xd0
7594 */
7595FNIEMOP_DEF(iemOp_Grp2_Eb_1)
7596{
7597 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7598 PCIEMOPSHIFTSIZES pImpl;
7599 switch (IEM_GET_MODRM_REG_8(bRm))
7600 {
7601 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
7602 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
7603 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
7604 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
7605 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
7606 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
7607 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
7608 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7609 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
7610 }
7611 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7612
7613 if (IEM_IS_MODRM_REG_MODE(bRm))
7614 {
7615 /* register */
7616 IEM_MC_BEGIN(3, 0);
7617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7618 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7619 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
7620 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7621 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7622 IEM_MC_REF_EFLAGS(pEFlags);
7623 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7624 IEM_MC_ADVANCE_RIP_AND_FINISH();
7625 IEM_MC_END();
7626 }
7627 else
7628 {
7629 /* memory */
7630 IEM_MC_BEGIN(3, 2);
7631 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7632 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
7633 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7634 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7635
7636 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7638 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7639 IEM_MC_FETCH_EFLAGS(EFlags);
7640 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7641
7642 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7643 IEM_MC_COMMIT_EFLAGS(EFlags);
7644 IEM_MC_ADVANCE_RIP_AND_FINISH();
7645 IEM_MC_END();
7646 }
7647}
7648
7649
7650
7651/**
7652 * @opcode 0xd1
7653 */
7654FNIEMOP_DEF(iemOp_Grp2_Ev_1)
7655{
7656 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7657 PCIEMOPSHIFTSIZES pImpl;
7658 switch (IEM_GET_MODRM_REG_8(bRm))
7659 {
7660 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
7661 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
7662 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
7663 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
7664 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
7665 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
7666 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
7667 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7668 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
7669 }
7670 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7671
7672 if (IEM_IS_MODRM_REG_MODE(bRm))
7673 {
7674 /* register */
7675 switch (pVCpu->iem.s.enmEffOpSize)
7676 {
7677 case IEMMODE_16BIT:
7678 IEM_MC_BEGIN(3, 0);
7679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7680 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7681 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7682 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7683 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7684 IEM_MC_REF_EFLAGS(pEFlags);
7685 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7686 IEM_MC_ADVANCE_RIP_AND_FINISH();
7687 IEM_MC_END();
7688 break;
7689
7690 case IEMMODE_32BIT:
7691 IEM_MC_BEGIN(3, 0);
7692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7693 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7694 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7695 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7696 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7697 IEM_MC_REF_EFLAGS(pEFlags);
7698 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7699 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7700 IEM_MC_ADVANCE_RIP_AND_FINISH();
7701 IEM_MC_END();
7702 break;
7703
7704 case IEMMODE_64BIT:
7705 IEM_MC_BEGIN(3, 0);
7706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7707 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7708 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7709 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7710 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7711 IEM_MC_REF_EFLAGS(pEFlags);
7712 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7713 IEM_MC_ADVANCE_RIP_AND_FINISH();
7714 IEM_MC_END();
7715 break;
7716
7717 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7718 }
7719 }
7720 else
7721 {
7722 /* memory */
7723 switch (pVCpu->iem.s.enmEffOpSize)
7724 {
7725 case IEMMODE_16BIT:
7726 IEM_MC_BEGIN(3, 2);
7727 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7728 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7729 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7730 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7731
7732 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7734 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7735 IEM_MC_FETCH_EFLAGS(EFlags);
7736 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7737
7738 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7739 IEM_MC_COMMIT_EFLAGS(EFlags);
7740 IEM_MC_ADVANCE_RIP_AND_FINISH();
7741 IEM_MC_END();
7742 break;
7743
7744 case IEMMODE_32BIT:
7745 IEM_MC_BEGIN(3, 2);
7746 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7747 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7748 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7749 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7750
7751 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7753 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7754 IEM_MC_FETCH_EFLAGS(EFlags);
7755 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7756
7757 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7758 IEM_MC_COMMIT_EFLAGS(EFlags);
7759 IEM_MC_ADVANCE_RIP_AND_FINISH();
7760 IEM_MC_END();
7761 break;
7762
7763 case IEMMODE_64BIT:
7764 IEM_MC_BEGIN(3, 2);
7765 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7766 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7767 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7768 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7769
7770 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7772 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7773 IEM_MC_FETCH_EFLAGS(EFlags);
7774 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7775
7776 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7777 IEM_MC_COMMIT_EFLAGS(EFlags);
7778 IEM_MC_ADVANCE_RIP_AND_FINISH();
7779 IEM_MC_END();
7780 break;
7781
7782 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7783 }
7784 }
7785}
7786
7787
7788/**
7789 * @opcode 0xd2
7790 */
7791FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
7792{
7793 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7794 PCIEMOPSHIFTSIZES pImpl;
7795 switch (IEM_GET_MODRM_REG_8(bRm))
7796 {
7797 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
7798 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
7799 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
7800 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
7801 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
7802 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
7803 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
7804 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7805 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
7806 }
7807 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7808
7809 if (IEM_IS_MODRM_REG_MODE(bRm))
7810 {
7811 /* register */
7812 IEM_MC_BEGIN(3, 0);
7813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7814 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7815 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7816 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7817 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7818 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7819 IEM_MC_REF_EFLAGS(pEFlags);
7820 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7821 IEM_MC_ADVANCE_RIP_AND_FINISH();
7822 IEM_MC_END();
7823 }
7824 else
7825 {
7826 /* memory */
7827 IEM_MC_BEGIN(3, 2);
7828 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7829 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7830 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7831 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7832
7833 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7835 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7836 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7837 IEM_MC_FETCH_EFLAGS(EFlags);
7838 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7839
7840 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7841 IEM_MC_COMMIT_EFLAGS(EFlags);
7842 IEM_MC_ADVANCE_RIP_AND_FINISH();
7843 IEM_MC_END();
7844 }
7845}
7846
7847
7848/**
7849 * @opcode 0xd3
7850 */
7851FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
7852{
7853 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7854 PCIEMOPSHIFTSIZES pImpl;
7855 switch (IEM_GET_MODRM_REG_8(bRm))
7856 {
7857 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
7858 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
7859 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
7860 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
7861 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
7862 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
7863 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
7864 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7865 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
7866 }
7867 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7868
7869 if (IEM_IS_MODRM_REG_MODE(bRm))
7870 {
7871 /* register */
7872 switch (pVCpu->iem.s.enmEffOpSize)
7873 {
7874 case IEMMODE_16BIT:
7875 IEM_MC_BEGIN(3, 0);
7876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7877 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7878 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7879 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7880 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7881 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7882 IEM_MC_REF_EFLAGS(pEFlags);
7883 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7884 IEM_MC_ADVANCE_RIP_AND_FINISH();
7885 IEM_MC_END();
7886 break;
7887
7888 case IEMMODE_32BIT:
7889 IEM_MC_BEGIN(3, 0);
7890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7891 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7892 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7893 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7894 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7895 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7896 IEM_MC_REF_EFLAGS(pEFlags);
7897 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7898 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7899 IEM_MC_ADVANCE_RIP_AND_FINISH();
7900 IEM_MC_END();
7901 break;
7902
7903 case IEMMODE_64BIT:
7904 IEM_MC_BEGIN(3, 0);
7905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7906 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7907 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7908 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7909 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7910 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7911 IEM_MC_REF_EFLAGS(pEFlags);
7912 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7913 IEM_MC_ADVANCE_RIP_AND_FINISH();
7914 IEM_MC_END();
7915 break;
7916
7917 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7918 }
7919 }
7920 else
7921 {
7922 /* memory */
7923 switch (pVCpu->iem.s.enmEffOpSize)
7924 {
7925 case IEMMODE_16BIT:
7926 IEM_MC_BEGIN(3, 2);
7927 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7928 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7929 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7930 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7931
7932 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7934 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7935 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7936 IEM_MC_FETCH_EFLAGS(EFlags);
7937 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7938
7939 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7940 IEM_MC_COMMIT_EFLAGS(EFlags);
7941 IEM_MC_ADVANCE_RIP_AND_FINISH();
7942 IEM_MC_END();
7943 break;
7944
7945 case IEMMODE_32BIT:
7946 IEM_MC_BEGIN(3, 2);
7947 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7948 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7949 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7950 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7951
7952 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7954 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7955 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7956 IEM_MC_FETCH_EFLAGS(EFlags);
7957 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7958
7959 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7960 IEM_MC_COMMIT_EFLAGS(EFlags);
7961 IEM_MC_ADVANCE_RIP_AND_FINISH();
7962 IEM_MC_END();
7963 break;
7964
7965 case IEMMODE_64BIT:
7966 IEM_MC_BEGIN(3, 2);
7967 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7968 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7969 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7970 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7971
7972 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7974 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7975 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7976 IEM_MC_FETCH_EFLAGS(EFlags);
7977 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7978
7979 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7980 IEM_MC_COMMIT_EFLAGS(EFlags);
7981 IEM_MC_ADVANCE_RIP_AND_FINISH();
7982 IEM_MC_END();
7983 break;
7984
7985 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7986 }
7987 }
7988}
7989
7990/**
7991 * @opcode 0xd4
7992 */
7993FNIEMOP_DEF(iemOp_aam_Ib)
7994{
7995 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
7996 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
7997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7998 IEMOP_HLP_NO_64BIT();
7999 if (!bImm)
8000 IEMOP_RAISE_DIVIDE_ERROR_RET();
8001 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aam, bImm);
8002}
8003
8004
8005/**
8006 * @opcode 0xd5
8007 */
8008FNIEMOP_DEF(iemOp_aad_Ib)
8009{
8010 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
8011 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
8012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8013 IEMOP_HLP_NO_64BIT();
8014 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aad, bImm);
8015}
8016
8017
8018/**
8019 * @opcode 0xd6
8020 */
8021FNIEMOP_DEF(iemOp_salc)
8022{
8023 IEMOP_MNEMONIC(salc, "salc");
8024 IEMOP_HLP_NO_64BIT();
8025
8026 IEM_MC_BEGIN(0, 0);
8027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8028 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
8029 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
8030 } IEM_MC_ELSE() {
8031 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
8032 } IEM_MC_ENDIF();
8033 IEM_MC_ADVANCE_RIP_AND_FINISH();
8034 IEM_MC_END();
8035}
8036
8037
8038/**
8039 * @opcode 0xd7
8040 */
8041FNIEMOP_DEF(iemOp_xlat)
8042{
8043 IEMOP_MNEMONIC(xlat, "xlat");
8044 switch (pVCpu->iem.s.enmEffAddrMode)
8045 {
8046 case IEMMODE_16BIT:
8047 IEM_MC_BEGIN(2, 0);
8048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8049 IEM_MC_LOCAL(uint8_t, u8Tmp);
8050 IEM_MC_LOCAL(uint16_t, u16Addr);
8051 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
8052 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
8053 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
8054 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
8055 IEM_MC_ADVANCE_RIP_AND_FINISH();
8056 IEM_MC_END();
8057 break;
8058
8059 case IEMMODE_32BIT:
8060 IEM_MC_BEGIN(2, 0);
8061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8062 IEM_MC_LOCAL(uint8_t, u8Tmp);
8063 IEM_MC_LOCAL(uint32_t, u32Addr);
8064 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
8065 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
8066 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
8067 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
8068 IEM_MC_ADVANCE_RIP_AND_FINISH();
8069 IEM_MC_END();
8070 break;
8071
8072 case IEMMODE_64BIT:
8073 IEM_MC_BEGIN(2, 0);
8074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8075 IEM_MC_LOCAL(uint8_t, u8Tmp);
8076 IEM_MC_LOCAL(uint64_t, u64Addr);
8077 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
8078 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
8079 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
8080 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
8081 IEM_MC_ADVANCE_RIP_AND_FINISH();
8082 IEM_MC_END();
8083 break;
8084
8085 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8086 }
8087}
8088
8089
8090/**
8091 * Common worker for FPU instructions working on ST0 and STn, and storing the
8092 * result in ST0.
8093 *
8094 * @param bRm Mod R/M byte.
8095 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8096 */
8097FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
8098{
8099 IEM_MC_BEGIN(3, 1);
8100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8101 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8102 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8103 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8104 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8105
8106 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8107 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8108 IEM_MC_PREPARE_FPU_USAGE();
8109 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8110 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
8111 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
8112 } IEM_MC_ELSE() {
8113 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
8114 } IEM_MC_ENDIF();
8115 IEM_MC_ADVANCE_RIP_AND_FINISH();
8116
8117 IEM_MC_END();
8118}
8119
8120
8121/**
8122 * Common worker for FPU instructions working on ST0 and STn, and only affecting
8123 * flags.
8124 *
8125 * @param bRm Mod R/M byte.
8126 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8127 */
8128FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8129{
8130 IEM_MC_BEGIN(3, 1);
8131 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8132 IEM_MC_LOCAL(uint16_t, u16Fsw);
8133 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8134 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8135 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8136
8137 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8138 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8139 IEM_MC_PREPARE_FPU_USAGE();
8140 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8141 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8142 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
8143 } IEM_MC_ELSE() {
8144 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
8145 } IEM_MC_ENDIF();
8146 IEM_MC_ADVANCE_RIP_AND_FINISH();
8147
8148 IEM_MC_END();
8149}
8150
8151
8152/**
8153 * Common worker for FPU instructions working on ST0 and STn, only affecting
8154 * flags, and popping when done.
8155 *
8156 * @param bRm Mod R/M byte.
8157 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8158 */
8159FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8160{
8161 IEM_MC_BEGIN(3, 1);
8162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8163 IEM_MC_LOCAL(uint16_t, u16Fsw);
8164 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8165 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8166 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8167
8168 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8169 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8170 IEM_MC_PREPARE_FPU_USAGE();
8171 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8172 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8173 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
8174 } IEM_MC_ELSE() {
8175 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
8176 } IEM_MC_ENDIF();
8177 IEM_MC_ADVANCE_RIP_AND_FINISH();
8178
8179 IEM_MC_END();
8180}
8181
8182
8183/** Opcode 0xd8 11/0. */
8184FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
8185{
8186 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
8187 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
8188}
8189
8190
8191/** Opcode 0xd8 11/1. */
8192FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
8193{
8194 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
8195 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
8196}
8197
8198
8199/** Opcode 0xd8 11/2. */
8200FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
8201{
8202 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
8203 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
8204}
8205
8206
8207/** Opcode 0xd8 11/3. */
8208FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
8209{
8210 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
8211 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
8212}
8213
8214
8215/** Opcode 0xd8 11/4. */
8216FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
8217{
8218 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
8219 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
8220}
8221
8222
8223/** Opcode 0xd8 11/5. */
8224FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
8225{
8226 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
8227 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
8228}
8229
8230
8231/** Opcode 0xd8 11/6. */
8232FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
8233{
8234 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
8235 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
8236}
8237
8238
8239/** Opcode 0xd8 11/7. */
8240FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
8241{
8242 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
8243 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
8244}
8245
8246
8247/**
8248 * Common worker for FPU instructions working on ST0 and an m32r, and storing
8249 * the result in ST0.
8250 *
8251 * @param bRm Mod R/M byte.
8252 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8253 */
8254FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
8255{
8256 IEM_MC_BEGIN(3, 3);
8257 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8258 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8259 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8260 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8261 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8262 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8263
8264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8266
8267 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8268 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8269 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8270
8271 IEM_MC_PREPARE_FPU_USAGE();
8272 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8273 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
8274 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
8275 } IEM_MC_ELSE() {
8276 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
8277 } IEM_MC_ENDIF();
8278 IEM_MC_ADVANCE_RIP_AND_FINISH();
8279
8280 IEM_MC_END();
8281}
8282
8283
8284/** Opcode 0xd8 !11/0. */
8285FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
8286{
8287 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
8288 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
8289}
8290
8291
8292/** Opcode 0xd8 !11/1. */
8293FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
8294{
8295 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
8296 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
8297}
8298
8299
8300/** Opcode 0xd8 !11/2. */
8301FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
8302{
8303 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
8304
8305 IEM_MC_BEGIN(3, 3);
8306 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8307 IEM_MC_LOCAL(uint16_t, u16Fsw);
8308 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8309 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8310 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8311 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8312
8313 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8315
8316 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8317 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8318 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8319
8320 IEM_MC_PREPARE_FPU_USAGE();
8321 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8322 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
8323 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8324 } IEM_MC_ELSE() {
8325 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8326 } IEM_MC_ENDIF();
8327 IEM_MC_ADVANCE_RIP_AND_FINISH();
8328
8329 IEM_MC_END();
8330}
8331
8332
8333/** Opcode 0xd8 !11/3. */
8334FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
8335{
8336 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
8337
8338 IEM_MC_BEGIN(3, 3);
8339 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8340 IEM_MC_LOCAL(uint16_t, u16Fsw);
8341 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8342 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8343 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8344 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8345
8346 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8347 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8348
8349 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8350 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8351 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8352
8353 IEM_MC_PREPARE_FPU_USAGE();
8354 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8355 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
8356 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8357 } IEM_MC_ELSE() {
8358 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8359 } IEM_MC_ENDIF();
8360 IEM_MC_ADVANCE_RIP_AND_FINISH();
8361
8362 IEM_MC_END();
8363}
8364
8365
8366/** Opcode 0xd8 !11/4. */
8367FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
8368{
8369 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
8370 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
8371}
8372
8373
8374/** Opcode 0xd8 !11/5. */
8375FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
8376{
8377 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
8378 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
8379}
8380
8381
8382/** Opcode 0xd8 !11/6. */
8383FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
8384{
8385 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
8386 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
8387}
8388
8389
8390/** Opcode 0xd8 !11/7. */
8391FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
8392{
8393 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
8394 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
8395}
8396
8397
8398/**
8399 * @opcode 0xd8
8400 */
8401FNIEMOP_DEF(iemOp_EscF0)
8402{
8403 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8404 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
8405
8406 if (IEM_IS_MODRM_REG_MODE(bRm))
8407 {
8408 switch (IEM_GET_MODRM_REG_8(bRm))
8409 {
8410 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
8411 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
8412 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
8413 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
8414 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
8415 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
8416 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
8417 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
8418 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8419 }
8420 }
8421 else
8422 {
8423 switch (IEM_GET_MODRM_REG_8(bRm))
8424 {
8425 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
8426 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
8427 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
8428 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
8429 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
8430 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
8431 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
8432 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
8433 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8434 }
8435 }
8436}
8437
8438
8439/** Opcode 0xd9 /0 mem32real
8440 * @sa iemOp_fld_m64r */
8441FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
8442{
8443 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
8444
8445 IEM_MC_BEGIN(2, 3);
8446 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8447 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8448 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
8449 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8450 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
8451
8452 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8453 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8454
8455 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8456 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8457 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8458 IEM_MC_PREPARE_FPU_USAGE();
8459 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
8460 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r32, pFpuRes, pr32Val);
8461 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8462 } IEM_MC_ELSE() {
8463 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
8464 } IEM_MC_ENDIF();
8465 IEM_MC_ADVANCE_RIP_AND_FINISH();
8466
8467 IEM_MC_END();
8468}
8469
8470
8471/** Opcode 0xd9 !11/2 mem32real */
8472FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
8473{
8474 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
8475 IEM_MC_BEGIN(3, 2);
8476 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8477 IEM_MC_LOCAL(uint16_t, u16Fsw);
8478 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8479 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
8480 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8481
8482 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8484 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8485 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8486
8487 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8488 IEM_MC_PREPARE_FPU_USAGE();
8489 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8490 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
8491 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8492 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
8493 } IEM_MC_ELSE() {
8494 IEM_MC_IF_FCW_IM() {
8495 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
8496 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
8497 } IEM_MC_ENDIF();
8498 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
8499 } IEM_MC_ENDIF();
8500 IEM_MC_ADVANCE_RIP_AND_FINISH();
8501
8502 IEM_MC_END();
8503}
8504
8505
8506/** Opcode 0xd9 !11/3 */
8507FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
8508{
8509 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
8510 IEM_MC_BEGIN(3, 2);
8511 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8512 IEM_MC_LOCAL(uint16_t, u16Fsw);
8513 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8514 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
8515 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8516
8517 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8519 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8520 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8521
8522 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8523 IEM_MC_PREPARE_FPU_USAGE();
8524 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8525 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
8526 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8527 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
8528 } IEM_MC_ELSE() {
8529 IEM_MC_IF_FCW_IM() {
8530 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
8531 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
8532 } IEM_MC_ENDIF();
8533 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
8534 } IEM_MC_ENDIF();
8535 IEM_MC_ADVANCE_RIP_AND_FINISH();
8536
8537 IEM_MC_END();
8538}
8539
8540
8541/** Opcode 0xd9 !11/4 */
8542FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
8543{
8544 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
8545 IEM_MC_BEGIN(3, 0);
8546 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
8547 IEM_MC_ARG(uint8_t, iEffSeg, 1);
8548 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
8549 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8551 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8552 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8553 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8554 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
8555 IEM_MC_END();
8556}
8557
8558
8559/** Opcode 0xd9 !11/5 */
8560FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
8561{
8562 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
8563 IEM_MC_BEGIN(1, 1);
8564 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8565 IEM_MC_ARG(uint16_t, u16Fsw, 0);
8566 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8568 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8569 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8570 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8571 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_FPU, iemCImpl_fldcw, u16Fsw);
8572 IEM_MC_END();
8573}
8574
8575
8576/** Opcode 0xd9 !11/6 */
8577FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
8578{
8579 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
8580 IEM_MC_BEGIN(3, 0);
8581 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
8582 IEM_MC_ARG(uint8_t, iEffSeg, 1);
8583 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
8584 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8586 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8587 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8588 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8589 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
8590 IEM_MC_END();
8591}
8592
8593
8594/** Opcode 0xd9 !11/7 */
8595FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
8596{
8597 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
8598 IEM_MC_BEGIN(2, 0);
8599 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8600 IEM_MC_LOCAL(uint16_t, u16Fcw);
8601 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8603 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8604 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8605 IEM_MC_FETCH_FCW(u16Fcw);
8606 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
8607 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
8608 IEM_MC_END();
8609}
8610
8611
8612/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
8613FNIEMOP_DEF(iemOp_fnop)
8614{
8615 IEMOP_MNEMONIC(fnop, "fnop");
8616 IEM_MC_BEGIN(0, 0);
8617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8618 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8619 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8620 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8621 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
8622 * intel optimizations. Investigate. */
8623 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
8624 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
8625 IEM_MC_END();
8626}
8627
8628
8629/** Opcode 0xd9 11/0 stN */
8630FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
8631{
8632 IEMOP_MNEMONIC(fld_stN, "fld stN");
8633 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
8634 * indicates that it does. */
8635 IEM_MC_BEGIN(0, 2);
8636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8637 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
8638 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8639 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8640 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8641
8642 IEM_MC_PREPARE_FPU_USAGE();
8643 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, IEM_GET_MODRM_RM_8(bRm)) {
8644 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
8645 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
8646 } IEM_MC_ELSE() {
8647 IEM_MC_FPU_STACK_PUSH_UNDERFLOW(pVCpu->iem.s.uFpuOpcode);
8648 } IEM_MC_ENDIF();
8649
8650 IEM_MC_ADVANCE_RIP_AND_FINISH();
8651 IEM_MC_END();
8652}
8653
8654
8655/** Opcode 0xd9 11/3 stN */
8656FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
8657{
8658 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
8659 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
8660 * indicates that it does. */
8661 IEM_MC_BEGIN(2, 3);
8662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8663 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
8664 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
8665 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8666 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ IEM_GET_MODRM_RM_8(bRm), 0);
8667 IEM_MC_ARG_CONST(uint16_t, uFpuOpcode, /*=*/ pVCpu->iem.s.uFpuOpcode, 1);
8668 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8669 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8670
8671 IEM_MC_PREPARE_FPU_USAGE();
8672 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8673 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
8674 IEM_MC_STORE_FPUREG_R80_SRC_REF(IEM_GET_MODRM_RM_8(bRm), pr80Value1);
8675 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
8676 } IEM_MC_ELSE() {
8677 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, iemCImpl_fxch_underflow, iStReg, uFpuOpcode);
8678 } IEM_MC_ENDIF();
8679
8680 IEM_MC_ADVANCE_RIP_AND_FINISH();
8681 IEM_MC_END();
8682}
8683
8684
8685/** Opcode 0xd9 11/4, 0xdd 11/2. */
8686FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
8687{
8688 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
8689
8690 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
8691 uint8_t const iDstReg = IEM_GET_MODRM_RM_8(bRm);
8692 if (!iDstReg)
8693 {
8694 IEM_MC_BEGIN(0, 1);
8695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8696 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
8697 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8698 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8699
8700 IEM_MC_PREPARE_FPU_USAGE();
8701 IEM_MC_IF_FPUREG_NOT_EMPTY(0) {
8702 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
8703 } IEM_MC_ELSE() {
8704 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0, pVCpu->iem.s.uFpuOpcode);
8705 } IEM_MC_ENDIF();
8706
8707 IEM_MC_ADVANCE_RIP_AND_FINISH();
8708 IEM_MC_END();
8709 }
8710 else
8711 {
8712 IEM_MC_BEGIN(0, 2);
8713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8714 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
8715 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8716 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8717 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8718
8719 IEM_MC_PREPARE_FPU_USAGE();
8720 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8721 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
8722 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg, pVCpu->iem.s.uFpuOpcode);
8723 } IEM_MC_ELSE() {
8724 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg, pVCpu->iem.s.uFpuOpcode);
8725 } IEM_MC_ENDIF();
8726
8727 IEM_MC_ADVANCE_RIP_AND_FINISH();
8728 IEM_MC_END();
8729 }
8730}
8731
8732
8733/**
8734 * Common worker for FPU instructions working on ST0 and replaces it with the
8735 * result, i.e. unary operators.
8736 *
8737 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8738 */
8739FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
8740{
8741 IEM_MC_BEGIN(2, 1);
8742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8743 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8744 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8745 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
8746
8747 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8748 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8749 IEM_MC_PREPARE_FPU_USAGE();
8750 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8751 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
8752 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
8753 } IEM_MC_ELSE() {
8754 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
8755 } IEM_MC_ENDIF();
8756 IEM_MC_ADVANCE_RIP_AND_FINISH();
8757
8758 IEM_MC_END();
8759}
8760
8761
8762/** Opcode 0xd9 0xe0. */
8763FNIEMOP_DEF(iemOp_fchs)
8764{
8765 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
8766 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
8767}
8768
8769
8770/** Opcode 0xd9 0xe1. */
8771FNIEMOP_DEF(iemOp_fabs)
8772{
8773 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
8774 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
8775}
8776
8777
8778/** Opcode 0xd9 0xe4. */
8779FNIEMOP_DEF(iemOp_ftst)
8780{
8781 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
8782 IEM_MC_BEGIN(2, 1);
8783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8784 IEM_MC_LOCAL(uint16_t, u16Fsw);
8785 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8786 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
8787
8788 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8789 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8790 IEM_MC_PREPARE_FPU_USAGE();
8791 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8792 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_ftst_r80, pu16Fsw, pr80Value);
8793 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
8794 } IEM_MC_ELSE() {
8795 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
8796 } IEM_MC_ENDIF();
8797 IEM_MC_ADVANCE_RIP_AND_FINISH();
8798
8799 IEM_MC_END();
8800}
8801
8802
8803/** Opcode 0xd9 0xe5. */
8804FNIEMOP_DEF(iemOp_fxam)
8805{
8806 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
8807 IEM_MC_BEGIN(2, 1);
8808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8809 IEM_MC_LOCAL(uint16_t, u16Fsw);
8810 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8811 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
8812
8813 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8814 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8815 IEM_MC_PREPARE_FPU_USAGE();
8816 IEM_MC_REF_FPUREG(pr80Value, 0);
8817 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fxam_r80, pu16Fsw, pr80Value);
8818 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
8819 IEM_MC_ADVANCE_RIP_AND_FINISH();
8820
8821 IEM_MC_END();
8822}
8823
8824
8825/**
8826 * Common worker for FPU instructions pushing a constant onto the FPU stack.
8827 *
8828 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8829 */
8830FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
8831{
8832 IEM_MC_BEGIN(1, 1);
8833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8834 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8835 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8836
8837 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8838 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8839 IEM_MC_PREPARE_FPU_USAGE();
8840 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
8841 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
8842 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
8843 } IEM_MC_ELSE() {
8844 IEM_MC_FPU_STACK_PUSH_OVERFLOW(pVCpu->iem.s.uFpuOpcode);
8845 } IEM_MC_ENDIF();
8846 IEM_MC_ADVANCE_RIP_AND_FINISH();
8847
8848 IEM_MC_END();
8849}
8850
8851
8852/** Opcode 0xd9 0xe8. */
8853FNIEMOP_DEF(iemOp_fld1)
8854{
8855 IEMOP_MNEMONIC(fld1, "fld1");
8856 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
8857}
8858
8859
8860/** Opcode 0xd9 0xe9. */
8861FNIEMOP_DEF(iemOp_fldl2t)
8862{
8863 IEMOP_MNEMONIC(fldl2t, "fldl2t");
8864 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
8865}
8866
8867
8868/** Opcode 0xd9 0xea. */
8869FNIEMOP_DEF(iemOp_fldl2e)
8870{
8871 IEMOP_MNEMONIC(fldl2e, "fldl2e");
8872 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
8873}
8874
8875/** Opcode 0xd9 0xeb. */
8876FNIEMOP_DEF(iemOp_fldpi)
8877{
8878 IEMOP_MNEMONIC(fldpi, "fldpi");
8879 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
8880}
8881
8882
8883/** Opcode 0xd9 0xec. */
8884FNIEMOP_DEF(iemOp_fldlg2)
8885{
8886 IEMOP_MNEMONIC(fldlg2, "fldlg2");
8887 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
8888}
8889
8890/** Opcode 0xd9 0xed. */
8891FNIEMOP_DEF(iemOp_fldln2)
8892{
8893 IEMOP_MNEMONIC(fldln2, "fldln2");
8894 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
8895}
8896
8897
8898/** Opcode 0xd9 0xee. */
8899FNIEMOP_DEF(iemOp_fldz)
8900{
8901 IEMOP_MNEMONIC(fldz, "fldz");
8902 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
8903}
8904
8905
8906/** Opcode 0xd9 0xf0.
8907 *
8908 * The f2xm1 instruction works on values +1.0 thru -1.0, currently (the range on
8909 * 287 & 8087 was +0.5 thru 0.0 according to docs). In addition is does appear
8910 * to produce proper results for +Inf and -Inf.
8911 *
8912 * This is probably usful in the implementation pow() and similar.
8913 */
8914FNIEMOP_DEF(iemOp_f2xm1)
8915{
8916 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
8917 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
8918}
8919
8920
8921/**
8922 * Common worker for FPU instructions working on STn and ST0, storing the result
8923 * in STn, and popping the stack unless IE, DE or ZE was raised.
8924 *
8925 * @param bRm Mod R/M byte.
8926 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8927 */
8928FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
8929{
8930 IEM_MC_BEGIN(3, 1);
8931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8932 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8933 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8934 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8935 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8936
8937 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8938 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8939
8940 IEM_MC_PREPARE_FPU_USAGE();
8941 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
8942 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
8943 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
8944 } IEM_MC_ELSE() {
8945 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
8946 } IEM_MC_ENDIF();
8947 IEM_MC_ADVANCE_RIP_AND_FINISH();
8948
8949 IEM_MC_END();
8950}
8951
8952
8953/** Opcode 0xd9 0xf1. */
8954FNIEMOP_DEF(iemOp_fyl2x)
8955{
8956 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
8957 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
8958}
8959
8960
8961/**
8962 * Common worker for FPU instructions working on ST0 and having two outputs, one
8963 * replacing ST0 and one pushed onto the stack.
8964 *
8965 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8966 */
8967FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
8968{
8969 IEM_MC_BEGIN(2, 1);
8970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8971 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
8972 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
8973 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
8974
8975 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8976 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8977 IEM_MC_PREPARE_FPU_USAGE();
8978 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8979 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
8980 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo, pVCpu->iem.s.uFpuOpcode);
8981 } IEM_MC_ELSE() {
8982 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO(pVCpu->iem.s.uFpuOpcode);
8983 } IEM_MC_ENDIF();
8984 IEM_MC_ADVANCE_RIP_AND_FINISH();
8985
8986 IEM_MC_END();
8987}
8988
8989
8990/** Opcode 0xd9 0xf2. */
8991FNIEMOP_DEF(iemOp_fptan)
8992{
8993 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
8994 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
8995}
8996
8997
8998/** Opcode 0xd9 0xf3. */
8999FNIEMOP_DEF(iemOp_fpatan)
9000{
9001 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
9002 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
9003}
9004
9005
9006/** Opcode 0xd9 0xf4. */
9007FNIEMOP_DEF(iemOp_fxtract)
9008{
9009 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
9010 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
9011}
9012
9013
9014/** Opcode 0xd9 0xf5. */
9015FNIEMOP_DEF(iemOp_fprem1)
9016{
9017 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
9018 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
9019}
9020
9021
9022/** Opcode 0xd9 0xf6. */
9023FNIEMOP_DEF(iemOp_fdecstp)
9024{
9025 IEMOP_MNEMONIC(fdecstp, "fdecstp");
9026 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
9027 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
9028 * FINCSTP and FDECSTP. */
9029 IEM_MC_BEGIN(0,0);
9030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9031
9032 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9033 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9034
9035 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9036 IEM_MC_FPU_STACK_DEC_TOP();
9037 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
9038
9039 IEM_MC_ADVANCE_RIP_AND_FINISH();
9040 IEM_MC_END();
9041}
9042
9043
9044/** Opcode 0xd9 0xf7. */
9045FNIEMOP_DEF(iemOp_fincstp)
9046{
9047 IEMOP_MNEMONIC(fincstp, "fincstp");
9048 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
9049 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
9050 * FINCSTP and FDECSTP. */
9051 IEM_MC_BEGIN(0,0);
9052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9053
9054 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9055 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9056
9057 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9058 IEM_MC_FPU_STACK_INC_TOP();
9059 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
9060
9061 IEM_MC_ADVANCE_RIP_AND_FINISH();
9062 IEM_MC_END();
9063}
9064
9065
9066/** Opcode 0xd9 0xf8. */
9067FNIEMOP_DEF(iemOp_fprem)
9068{
9069 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
9070 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
9071}
9072
9073
9074/** Opcode 0xd9 0xf9. */
9075FNIEMOP_DEF(iemOp_fyl2xp1)
9076{
9077 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
9078 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
9079}
9080
9081
9082/** Opcode 0xd9 0xfa. */
9083FNIEMOP_DEF(iemOp_fsqrt)
9084{
9085 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
9086 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
9087}
9088
9089
9090/** Opcode 0xd9 0xfb. */
9091FNIEMOP_DEF(iemOp_fsincos)
9092{
9093 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
9094 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
9095}
9096
9097
9098/** Opcode 0xd9 0xfc. */
9099FNIEMOP_DEF(iemOp_frndint)
9100{
9101 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
9102 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
9103}
9104
9105
9106/** Opcode 0xd9 0xfd. */
9107FNIEMOP_DEF(iemOp_fscale)
9108{
9109 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
9110 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
9111}
9112
9113
9114/** Opcode 0xd9 0xfe. */
9115FNIEMOP_DEF(iemOp_fsin)
9116{
9117 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
9118 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
9119}
9120
9121
9122/** Opcode 0xd9 0xff. */
9123FNIEMOP_DEF(iemOp_fcos)
9124{
9125 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
9126 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
9127}
9128
9129
9130/** Used by iemOp_EscF1. */
9131IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
9132{
9133 /* 0xe0 */ iemOp_fchs,
9134 /* 0xe1 */ iemOp_fabs,
9135 /* 0xe2 */ iemOp_Invalid,
9136 /* 0xe3 */ iemOp_Invalid,
9137 /* 0xe4 */ iemOp_ftst,
9138 /* 0xe5 */ iemOp_fxam,
9139 /* 0xe6 */ iemOp_Invalid,
9140 /* 0xe7 */ iemOp_Invalid,
9141 /* 0xe8 */ iemOp_fld1,
9142 /* 0xe9 */ iemOp_fldl2t,
9143 /* 0xea */ iemOp_fldl2e,
9144 /* 0xeb */ iemOp_fldpi,
9145 /* 0xec */ iemOp_fldlg2,
9146 /* 0xed */ iemOp_fldln2,
9147 /* 0xee */ iemOp_fldz,
9148 /* 0xef */ iemOp_Invalid,
9149 /* 0xf0 */ iemOp_f2xm1,
9150 /* 0xf1 */ iemOp_fyl2x,
9151 /* 0xf2 */ iemOp_fptan,
9152 /* 0xf3 */ iemOp_fpatan,
9153 /* 0xf4 */ iemOp_fxtract,
9154 /* 0xf5 */ iemOp_fprem1,
9155 /* 0xf6 */ iemOp_fdecstp,
9156 /* 0xf7 */ iemOp_fincstp,
9157 /* 0xf8 */ iemOp_fprem,
9158 /* 0xf9 */ iemOp_fyl2xp1,
9159 /* 0xfa */ iemOp_fsqrt,
9160 /* 0xfb */ iemOp_fsincos,
9161 /* 0xfc */ iemOp_frndint,
9162 /* 0xfd */ iemOp_fscale,
9163 /* 0xfe */ iemOp_fsin,
9164 /* 0xff */ iemOp_fcos
9165};
9166
9167
9168/**
9169 * @opcode 0xd9
9170 */
9171FNIEMOP_DEF(iemOp_EscF1)
9172{
9173 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9174 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
9175
9176 if (IEM_IS_MODRM_REG_MODE(bRm))
9177 {
9178 switch (IEM_GET_MODRM_REG_8(bRm))
9179 {
9180 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
9181 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
9182 case 2:
9183 if (bRm == 0xd0)
9184 return FNIEMOP_CALL(iemOp_fnop);
9185 IEMOP_RAISE_INVALID_OPCODE_RET();
9186 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
9187 case 4:
9188 case 5:
9189 case 6:
9190 case 7:
9191 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
9192 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
9193 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9194 }
9195 }
9196 else
9197 {
9198 switch (IEM_GET_MODRM_REG_8(bRm))
9199 {
9200 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
9201 case 1: IEMOP_RAISE_INVALID_OPCODE_RET();
9202 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
9203 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
9204 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
9205 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
9206 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
9207 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
9208 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9209 }
9210 }
9211}
9212
9213
9214/** Opcode 0xda 11/0. */
9215FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
9216{
9217 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
9218 IEM_MC_BEGIN(0, 1);
9219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9220 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9221
9222 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9223 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9224
9225 IEM_MC_PREPARE_FPU_USAGE();
9226 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9227 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9228 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9229 } IEM_MC_ENDIF();
9230 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9231 } IEM_MC_ELSE() {
9232 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9233 } IEM_MC_ENDIF();
9234 IEM_MC_ADVANCE_RIP_AND_FINISH();
9235
9236 IEM_MC_END();
9237}
9238
9239
9240/** Opcode 0xda 11/1. */
9241FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
9242{
9243 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
9244 IEM_MC_BEGIN(0, 1);
9245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9246 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9247
9248 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9249 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9250
9251 IEM_MC_PREPARE_FPU_USAGE();
9252 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9253 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9254 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9255 } IEM_MC_ENDIF();
9256 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9257 } IEM_MC_ELSE() {
9258 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9259 } IEM_MC_ENDIF();
9260 IEM_MC_ADVANCE_RIP_AND_FINISH();
9261
9262 IEM_MC_END();
9263}
9264
9265
9266/** Opcode 0xda 11/2. */
9267FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
9268{
9269 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
9270 IEM_MC_BEGIN(0, 1);
9271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9272 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9273
9274 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9275 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9276
9277 IEM_MC_PREPARE_FPU_USAGE();
9278 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9279 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9280 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9281 } IEM_MC_ENDIF();
9282 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9283 } IEM_MC_ELSE() {
9284 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9285 } IEM_MC_ENDIF();
9286 IEM_MC_ADVANCE_RIP_AND_FINISH();
9287
9288 IEM_MC_END();
9289}
9290
9291
9292/** Opcode 0xda 11/3. */
9293FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
9294{
9295 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
9296 IEM_MC_BEGIN(0, 1);
9297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9298 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9299
9300 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9301 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9302
9303 IEM_MC_PREPARE_FPU_USAGE();
9304 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9305 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9306 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9307 } IEM_MC_ENDIF();
9308 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9309 } IEM_MC_ELSE() {
9310 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9311 } IEM_MC_ENDIF();
9312 IEM_MC_ADVANCE_RIP_AND_FINISH();
9313
9314 IEM_MC_END();
9315}
9316
9317
9318/**
9319 * Common worker for FPU instructions working on ST0 and ST1, only affecting
9320 * flags, and popping twice when done.
9321 *
9322 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9323 */
9324FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
9325{
9326 IEM_MC_BEGIN(3, 1);
9327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9328 IEM_MC_LOCAL(uint16_t, u16Fsw);
9329 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9330 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9331 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9332
9333 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9334 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9335
9336 IEM_MC_PREPARE_FPU_USAGE();
9337 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1) {
9338 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
9339 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9340 } IEM_MC_ELSE() {
9341 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP(pVCpu->iem.s.uFpuOpcode);
9342 } IEM_MC_ENDIF();
9343 IEM_MC_ADVANCE_RIP_AND_FINISH();
9344
9345 IEM_MC_END();
9346}
9347
9348
9349/** Opcode 0xda 0xe9. */
9350FNIEMOP_DEF(iemOp_fucompp)
9351{
9352 IEMOP_MNEMONIC(fucompp, "fucompp");
9353 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fucom_r80_by_r80);
9354}
9355
9356
9357/**
9358 * Common worker for FPU instructions working on ST0 and an m32i, and storing
9359 * the result in ST0.
9360 *
9361 * @param bRm Mod R/M byte.
9362 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9363 */
9364FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
9365{
9366 IEM_MC_BEGIN(3, 3);
9367 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9368 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9369 IEM_MC_LOCAL(int32_t, i32Val2);
9370 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9371 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9372 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
9373
9374 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9376
9377 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9378 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9379 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9380
9381 IEM_MC_PREPARE_FPU_USAGE();
9382 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9383 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
9384 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9385 } IEM_MC_ELSE() {
9386 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9387 } IEM_MC_ENDIF();
9388 IEM_MC_ADVANCE_RIP_AND_FINISH();
9389
9390 IEM_MC_END();
9391}
9392
9393
9394/** Opcode 0xda !11/0. */
9395FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
9396{
9397 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
9398 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
9399}
9400
9401
9402/** Opcode 0xda !11/1. */
9403FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
9404{
9405 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
9406 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
9407}
9408
9409
9410/** Opcode 0xda !11/2. */
9411FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
9412{
9413 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
9414
9415 IEM_MC_BEGIN(3, 3);
9416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9417 IEM_MC_LOCAL(uint16_t, u16Fsw);
9418 IEM_MC_LOCAL(int32_t, i32Val2);
9419 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9420 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9421 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
9422
9423 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9425
9426 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9427 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9428 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9429
9430 IEM_MC_PREPARE_FPU_USAGE();
9431 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9432 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
9433 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9434 } IEM_MC_ELSE() {
9435 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9436 } IEM_MC_ENDIF();
9437 IEM_MC_ADVANCE_RIP_AND_FINISH();
9438
9439 IEM_MC_END();
9440}
9441
9442
9443/** Opcode 0xda !11/3. */
9444FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
9445{
9446 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
9447
9448 IEM_MC_BEGIN(3, 3);
9449 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9450 IEM_MC_LOCAL(uint16_t, u16Fsw);
9451 IEM_MC_LOCAL(int32_t, i32Val2);
9452 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9453 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9454 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
9455
9456 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9458
9459 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9460 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9461 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9462
9463 IEM_MC_PREPARE_FPU_USAGE();
9464 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9465 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
9466 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9467 } IEM_MC_ELSE() {
9468 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9469 } IEM_MC_ENDIF();
9470 IEM_MC_ADVANCE_RIP_AND_FINISH();
9471
9472 IEM_MC_END();
9473}
9474
9475
9476/** Opcode 0xda !11/4. */
9477FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
9478{
9479 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
9480 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
9481}
9482
9483
9484/** Opcode 0xda !11/5. */
9485FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
9486{
9487 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
9488 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
9489}
9490
9491
9492/** Opcode 0xda !11/6. */
9493FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
9494{
9495 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
9496 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
9497}
9498
9499
9500/** Opcode 0xda !11/7. */
9501FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
9502{
9503 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
9504 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
9505}
9506
9507
9508/**
9509 * @opcode 0xda
9510 */
9511FNIEMOP_DEF(iemOp_EscF2)
9512{
9513 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9514 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
9515 if (IEM_IS_MODRM_REG_MODE(bRm))
9516 {
9517 switch (IEM_GET_MODRM_REG_8(bRm))
9518 {
9519 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
9520 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
9521 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
9522 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
9523 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
9524 case 5:
9525 if (bRm == 0xe9)
9526 return FNIEMOP_CALL(iemOp_fucompp);
9527 IEMOP_RAISE_INVALID_OPCODE_RET();
9528 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9529 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
9530 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9531 }
9532 }
9533 else
9534 {
9535 switch (IEM_GET_MODRM_REG_8(bRm))
9536 {
9537 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
9538 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
9539 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
9540 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
9541 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
9542 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
9543 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
9544 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
9545 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9546 }
9547 }
9548}
9549
9550
9551/** Opcode 0xdb !11/0. */
9552FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
9553{
9554 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
9555
9556 IEM_MC_BEGIN(2, 3);
9557 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9558 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9559 IEM_MC_LOCAL(int32_t, i32Val);
9560 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9561 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
9562
9563 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9565
9566 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9567 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9568 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9569
9570 IEM_MC_PREPARE_FPU_USAGE();
9571 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
9572 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i32, pFpuRes, pi32Val);
9573 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9574 } IEM_MC_ELSE() {
9575 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9576 } IEM_MC_ENDIF();
9577 IEM_MC_ADVANCE_RIP_AND_FINISH();
9578
9579 IEM_MC_END();
9580}
9581
9582
9583/** Opcode 0xdb !11/1. */
9584FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
9585{
9586 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
9587 IEM_MC_BEGIN(3, 2);
9588 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9589 IEM_MC_LOCAL(uint16_t, u16Fsw);
9590 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9591 IEM_MC_ARG(int32_t *, pi32Dst, 1);
9592 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9593
9594 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9596 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9597 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9598
9599 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9600 IEM_MC_PREPARE_FPU_USAGE();
9601 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9602 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
9603 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9604 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9605 } IEM_MC_ELSE() {
9606 IEM_MC_IF_FCW_IM() {
9607 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
9608 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
9609 } IEM_MC_ENDIF();
9610 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9611 } IEM_MC_ENDIF();
9612 IEM_MC_ADVANCE_RIP_AND_FINISH();
9613
9614 IEM_MC_END();
9615}
9616
9617
9618/** Opcode 0xdb !11/2. */
9619FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
9620{
9621 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
9622 IEM_MC_BEGIN(3, 2);
9623 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9624 IEM_MC_LOCAL(uint16_t, u16Fsw);
9625 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9626 IEM_MC_ARG(int32_t *, pi32Dst, 1);
9627 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9628
9629 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9631 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9632 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9633
9634 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9635 IEM_MC_PREPARE_FPU_USAGE();
9636 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9637 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
9638 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9639 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9640 } IEM_MC_ELSE() {
9641 IEM_MC_IF_FCW_IM() {
9642 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
9643 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
9644 } IEM_MC_ENDIF();
9645 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9646 } IEM_MC_ENDIF();
9647 IEM_MC_ADVANCE_RIP_AND_FINISH();
9648
9649 IEM_MC_END();
9650}
9651
9652
9653/** Opcode 0xdb !11/3. */
9654FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
9655{
9656 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
9657 IEM_MC_BEGIN(3, 2);
9658 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9659 IEM_MC_LOCAL(uint16_t, u16Fsw);
9660 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9661 IEM_MC_ARG(int32_t *, pi32Dst, 1);
9662 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9663
9664 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9666 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9667 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9668
9669 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9670 IEM_MC_PREPARE_FPU_USAGE();
9671 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9672 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
9673 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9674 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9675 } IEM_MC_ELSE() {
9676 IEM_MC_IF_FCW_IM() {
9677 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
9678 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
9679 } IEM_MC_ENDIF();
9680 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9681 } IEM_MC_ENDIF();
9682 IEM_MC_ADVANCE_RIP_AND_FINISH();
9683
9684 IEM_MC_END();
9685}
9686
9687
9688/** Opcode 0xdb !11/5. */
9689FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
9690{
9691 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
9692
9693 IEM_MC_BEGIN(2, 3);
9694 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9695 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9696 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
9697 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9698 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
9699
9700 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9702
9703 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9704 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9705 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9706
9707 IEM_MC_PREPARE_FPU_USAGE();
9708 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
9709 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
9710 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9711 } IEM_MC_ELSE() {
9712 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9713 } IEM_MC_ENDIF();
9714 IEM_MC_ADVANCE_RIP_AND_FINISH();
9715
9716 IEM_MC_END();
9717}
9718
9719
9720/** Opcode 0xdb !11/7. */
9721FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
9722{
9723 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
9724 IEM_MC_BEGIN(3, 2);
9725 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9726 IEM_MC_LOCAL(uint16_t, u16Fsw);
9727 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9728 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
9729 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9730
9731 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9733 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9734 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9735
9736 IEM_MC_MEM_MAP_EX(pr80Dst, IEM_ACCESS_DATA_W, sizeof(*pr80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
9737 IEM_MC_PREPARE_FPU_USAGE();
9738 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9739 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
9740 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
9741 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9742 } IEM_MC_ELSE() {
9743 IEM_MC_IF_FCW_IM() {
9744 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
9745 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
9746 } IEM_MC_ENDIF();
9747 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9748 } IEM_MC_ENDIF();
9749 IEM_MC_ADVANCE_RIP_AND_FINISH();
9750
9751 IEM_MC_END();
9752}
9753
9754
9755/** Opcode 0xdb 11/0. */
9756FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
9757{
9758 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
9759 IEM_MC_BEGIN(0, 1);
9760 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9761 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9762
9763 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9764 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9765
9766 IEM_MC_PREPARE_FPU_USAGE();
9767 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9768 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF) {
9769 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9770 } IEM_MC_ENDIF();
9771 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9772 } IEM_MC_ELSE() {
9773 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9774 } IEM_MC_ENDIF();
9775 IEM_MC_ADVANCE_RIP_AND_FINISH();
9776
9777 IEM_MC_END();
9778}
9779
9780
9781/** Opcode 0xdb 11/1. */
9782FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
9783{
9784 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
9785 IEM_MC_BEGIN(0, 1);
9786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9787 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9788
9789 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9790 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9791
9792 IEM_MC_PREPARE_FPU_USAGE();
9793 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9794 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
9795 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9796 } IEM_MC_ENDIF();
9797 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9798 } IEM_MC_ELSE() {
9799 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9800 } IEM_MC_ENDIF();
9801 IEM_MC_ADVANCE_RIP_AND_FINISH();
9802
9803 IEM_MC_END();
9804}
9805
9806
9807/** Opcode 0xdb 11/2. */
9808FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
9809{
9810 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
9811 IEM_MC_BEGIN(0, 1);
9812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9813 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9814
9815 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9816 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9817
9818 IEM_MC_PREPARE_FPU_USAGE();
9819 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9820 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9821 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9822 } IEM_MC_ENDIF();
9823 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9824 } IEM_MC_ELSE() {
9825 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9826 } IEM_MC_ENDIF();
9827 IEM_MC_ADVANCE_RIP_AND_FINISH();
9828
9829 IEM_MC_END();
9830}
9831
9832
9833/** Opcode 0xdb 11/3. */
9834FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
9835{
9836 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
9837 IEM_MC_BEGIN(0, 1);
9838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9839 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9840
9841 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9842 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9843
9844 IEM_MC_PREPARE_FPU_USAGE();
9845 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9846 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF) {
9847 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9848 } IEM_MC_ENDIF();
9849 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9850 } IEM_MC_ELSE() {
9851 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9852 } IEM_MC_ENDIF();
9853 IEM_MC_ADVANCE_RIP_AND_FINISH();
9854
9855 IEM_MC_END();
9856}
9857
9858
9859/** Opcode 0xdb 0xe0. */
9860FNIEMOP_DEF(iemOp_fneni)
9861{
9862 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
9863 IEM_MC_BEGIN(0,0);
9864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9865 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9866 IEM_MC_ADVANCE_RIP_AND_FINISH();
9867 IEM_MC_END();
9868}
9869
9870
9871/** Opcode 0xdb 0xe1. */
9872FNIEMOP_DEF(iemOp_fndisi)
9873{
9874 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
9875 IEM_MC_BEGIN(0,0);
9876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9877 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9878 IEM_MC_ADVANCE_RIP_AND_FINISH();
9879 IEM_MC_END();
9880}
9881
9882
9883/** Opcode 0xdb 0xe2. */
9884FNIEMOP_DEF(iemOp_fnclex)
9885{
9886 IEMOP_MNEMONIC(fnclex, "fnclex");
9887 IEM_MC_BEGIN(0,0);
9888 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9889 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9890 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9891 IEM_MC_CLEAR_FSW_EX();
9892 IEM_MC_ADVANCE_RIP_AND_FINISH();
9893 IEM_MC_END();
9894}
9895
9896
9897/** Opcode 0xdb 0xe3. */
9898FNIEMOP_DEF(iemOp_fninit)
9899{
9900 IEMOP_MNEMONIC(fninit, "fninit");
9901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9902 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_FPU, iemCImpl_finit, false /*fCheckXcpts*/);
9903}
9904
9905
9906/** Opcode 0xdb 0xe4. */
9907FNIEMOP_DEF(iemOp_fnsetpm)
9908{
9909 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
9910 IEM_MC_BEGIN(0,0);
9911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9912 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9913 IEM_MC_ADVANCE_RIP_AND_FINISH();
9914 IEM_MC_END();
9915}
9916
9917
9918/** Opcode 0xdb 0xe5. */
9919FNIEMOP_DEF(iemOp_frstpm)
9920{
9921 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
9922#if 0 /* #UDs on newer CPUs */
9923 IEM_MC_BEGIN(0,0);
9924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9925 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9926 IEM_MC_ADVANCE_RIP_AND_FINISH();
9927 IEM_MC_END();
9928 return VINF_SUCCESS;
9929#else
9930 IEMOP_RAISE_INVALID_OPCODE_RET();
9931#endif
9932}
9933
9934
9935/** Opcode 0xdb 11/5. */
9936FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
9937{
9938 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
9939 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
9940 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fucomi_r80_by_r80,
9941 0 /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
9942}
9943
9944
9945/** Opcode 0xdb 11/6. */
9946FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
9947{
9948 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
9949 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
9950 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80,
9951 false /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
9952}
9953
9954
9955/**
9956 * @opcode 0xdb
9957 */
9958FNIEMOP_DEF(iemOp_EscF3)
9959{
9960 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9961 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
9962 if (IEM_IS_MODRM_REG_MODE(bRm))
9963 {
9964 switch (IEM_GET_MODRM_REG_8(bRm))
9965 {
9966 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
9967 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
9968 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
9969 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
9970 case 4:
9971 switch (bRm)
9972 {
9973 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
9974 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
9975 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
9976 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
9977 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
9978 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
9979 case 0xe6: IEMOP_RAISE_INVALID_OPCODE_RET();
9980 case 0xe7: IEMOP_RAISE_INVALID_OPCODE_RET();
9981 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9982 }
9983 break;
9984 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
9985 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
9986 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
9987 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9988 }
9989 }
9990 else
9991 {
9992 switch (IEM_GET_MODRM_REG_8(bRm))
9993 {
9994 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
9995 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
9996 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
9997 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
9998 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
9999 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
10000 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10001 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
10002 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10003 }
10004 }
10005}
10006
10007
10008/**
10009 * Common worker for FPU instructions working on STn and ST0, and storing the
10010 * result in STn unless IE, DE or ZE was raised.
10011 *
10012 * @param bRm Mod R/M byte.
10013 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10014 */
10015FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
10016{
10017 IEM_MC_BEGIN(3, 1);
10018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10019 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10020 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10021 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10022 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10023
10024 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10025 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10026
10027 IEM_MC_PREPARE_FPU_USAGE();
10028 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
10029 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
10030 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10031 } IEM_MC_ELSE() {
10032 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10033 } IEM_MC_ENDIF();
10034 IEM_MC_ADVANCE_RIP_AND_FINISH();
10035
10036 IEM_MC_END();
10037}
10038
10039
10040/** Opcode 0xdc 11/0. */
10041FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
10042{
10043 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
10044 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
10045}
10046
10047
10048/** Opcode 0xdc 11/1. */
10049FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
10050{
10051 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
10052 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
10053}
10054
10055
10056/** Opcode 0xdc 11/4. */
10057FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
10058{
10059 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
10060 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
10061}
10062
10063
10064/** Opcode 0xdc 11/5. */
10065FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
10066{
10067 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
10068 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
10069}
10070
10071
10072/** Opcode 0xdc 11/6. */
10073FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
10074{
10075 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
10076 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
10077}
10078
10079
10080/** Opcode 0xdc 11/7. */
10081FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
10082{
10083 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
10084 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
10085}
10086
10087
10088/**
10089 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
10090 * memory operand, and storing the result in ST0.
10091 *
10092 * @param bRm Mod R/M byte.
10093 * @param pfnImpl Pointer to the instruction implementation (assembly).
10094 */
10095FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
10096{
10097 IEM_MC_BEGIN(3, 3);
10098 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10099 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10100 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
10101 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10102 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
10103 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
10104
10105 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10107 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10108 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10109
10110 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10111 IEM_MC_PREPARE_FPU_USAGE();
10112 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0) {
10113 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
10114 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10115 } IEM_MC_ELSE() {
10116 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10117 } IEM_MC_ENDIF();
10118 IEM_MC_ADVANCE_RIP_AND_FINISH();
10119
10120 IEM_MC_END();
10121}
10122
10123
10124/** Opcode 0xdc !11/0. */
10125FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
10126{
10127 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
10128 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
10129}
10130
10131
10132/** Opcode 0xdc !11/1. */
10133FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
10134{
10135 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
10136 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
10137}
10138
10139
10140/** Opcode 0xdc !11/2. */
10141FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
10142{
10143 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
10144
10145 IEM_MC_BEGIN(3, 3);
10146 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10147 IEM_MC_LOCAL(uint16_t, u16Fsw);
10148 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
10149 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10150 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10151 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
10152
10153 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10155
10156 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10157 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10158 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10159
10160 IEM_MC_PREPARE_FPU_USAGE();
10161 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10162 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
10163 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10164 } IEM_MC_ELSE() {
10165 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10166 } IEM_MC_ENDIF();
10167 IEM_MC_ADVANCE_RIP_AND_FINISH();
10168
10169 IEM_MC_END();
10170}
10171
10172
10173/** Opcode 0xdc !11/3. */
10174FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
10175{
10176 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
10177
10178 IEM_MC_BEGIN(3, 3);
10179 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10180 IEM_MC_LOCAL(uint16_t, u16Fsw);
10181 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
10182 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10183 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10184 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
10185
10186 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10188
10189 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10190 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10191 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10192
10193 IEM_MC_PREPARE_FPU_USAGE();
10194 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10195 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
10196 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10197 } IEM_MC_ELSE() {
10198 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10199 } IEM_MC_ENDIF();
10200 IEM_MC_ADVANCE_RIP_AND_FINISH();
10201
10202 IEM_MC_END();
10203}
10204
10205
10206/** Opcode 0xdc !11/4. */
10207FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
10208{
10209 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
10210 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
10211}
10212
10213
10214/** Opcode 0xdc !11/5. */
10215FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
10216{
10217 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
10218 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
10219}
10220
10221
10222/** Opcode 0xdc !11/6. */
10223FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
10224{
10225 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
10226 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
10227}
10228
10229
10230/** Opcode 0xdc !11/7. */
10231FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
10232{
10233 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
10234 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
10235}
10236
10237
10238/**
10239 * @opcode 0xdc
10240 */
10241FNIEMOP_DEF(iemOp_EscF4)
10242{
10243 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10244 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
10245 if (IEM_IS_MODRM_REG_MODE(bRm))
10246 {
10247 switch (IEM_GET_MODRM_REG_8(bRm))
10248 {
10249 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
10250 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
10251 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
10252 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
10253 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
10254 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
10255 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
10256 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
10257 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10258 }
10259 }
10260 else
10261 {
10262 switch (IEM_GET_MODRM_REG_8(bRm))
10263 {
10264 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
10265 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
10266 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
10267 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
10268 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
10269 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
10270 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
10271 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
10272 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10273 }
10274 }
10275}
10276
10277
10278/** Opcode 0xdd !11/0.
10279 * @sa iemOp_fld_m32r */
10280FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
10281{
10282 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
10283
10284 IEM_MC_BEGIN(2, 3);
10285 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10286 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10287 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
10288 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10289 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
10290
10291 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10293 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10294 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10295
10296 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10297 IEM_MC_PREPARE_FPU_USAGE();
10298 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10299 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r64, pFpuRes, pr64Val);
10300 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10301 } IEM_MC_ELSE() {
10302 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10303 } IEM_MC_ENDIF();
10304 IEM_MC_ADVANCE_RIP_AND_FINISH();
10305
10306 IEM_MC_END();
10307}
10308
10309
10310/** Opcode 0xdd !11/0. */
10311FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
10312{
10313 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
10314 IEM_MC_BEGIN(3, 2);
10315 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10316 IEM_MC_LOCAL(uint16_t, u16Fsw);
10317 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10318 IEM_MC_ARG(int64_t *, pi64Dst, 1);
10319 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10320
10321 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10323 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10324 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10325
10326 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10327 IEM_MC_PREPARE_FPU_USAGE();
10328 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10329 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
10330 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10331 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10332 } IEM_MC_ELSE() {
10333 IEM_MC_IF_FCW_IM() {
10334 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
10335 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
10336 } IEM_MC_ENDIF();
10337 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10338 } IEM_MC_ENDIF();
10339 IEM_MC_ADVANCE_RIP_AND_FINISH();
10340
10341 IEM_MC_END();
10342}
10343
10344
10345/** Opcode 0xdd !11/0. */
10346FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
10347{
10348 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
10349 IEM_MC_BEGIN(3, 2);
10350 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10351 IEM_MC_LOCAL(uint16_t, u16Fsw);
10352 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10353 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
10354 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10355
10356 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10358 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10359 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10360
10361 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10362 IEM_MC_PREPARE_FPU_USAGE();
10363 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10364 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
10365 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10366 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10367 } IEM_MC_ELSE() {
10368 IEM_MC_IF_FCW_IM() {
10369 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
10370 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
10371 } IEM_MC_ENDIF();
10372 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10373 } IEM_MC_ENDIF();
10374 IEM_MC_ADVANCE_RIP_AND_FINISH();
10375
10376 IEM_MC_END();
10377}
10378
10379
10380
10381
10382/** Opcode 0xdd !11/0. */
10383FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
10384{
10385 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
10386 IEM_MC_BEGIN(3, 2);
10387 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10388 IEM_MC_LOCAL(uint16_t, u16Fsw);
10389 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10390 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
10391 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10392
10393 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10395 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10396 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10397
10398 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10399 IEM_MC_PREPARE_FPU_USAGE();
10400 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10401 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
10402 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10403 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10404 } IEM_MC_ELSE() {
10405 IEM_MC_IF_FCW_IM() {
10406 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
10407 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
10408 } IEM_MC_ENDIF();
10409 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10410 } IEM_MC_ENDIF();
10411 IEM_MC_ADVANCE_RIP_AND_FINISH();
10412
10413 IEM_MC_END();
10414}
10415
10416
10417/** Opcode 0xdd !11/0. */
10418FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
10419{
10420 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
10421 IEM_MC_BEGIN(3, 0);
10422 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
10423 IEM_MC_ARG(uint8_t, iEffSeg, 1);
10424 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
10425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10427 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10428 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10429 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
10430 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
10431 IEM_MC_END();
10432}
10433
10434
10435/** Opcode 0xdd !11/0. */
10436FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
10437{
10438 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
10439 IEM_MC_BEGIN(3, 0);
10440 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
10441 IEM_MC_ARG(uint8_t, iEffSeg, 1);
10442 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
10443 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10445 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10446 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); /* Note! Implicit fninit after the save, do not use FOR_READ here! */
10447 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
10448 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
10449 IEM_MC_END();
10450}
10451
10452/** Opcode 0xdd !11/0. */
10453FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
10454{
10455 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
10456
10457 IEM_MC_BEGIN(0, 2);
10458 IEM_MC_LOCAL(uint16_t, u16Tmp);
10459 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10460
10461 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10463 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10464
10465 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
10466 IEM_MC_FETCH_FSW(u16Tmp);
10467 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
10468 IEM_MC_ADVANCE_RIP_AND_FINISH();
10469
10470/** @todo Debug / drop a hint to the verifier that things may differ
10471 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
10472 * NT4SP1. (X86_FSW_PE) */
10473 IEM_MC_END();
10474}
10475
10476
10477/** Opcode 0xdd 11/0. */
10478FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
10479{
10480 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
10481 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
10482 unmodified. */
10483 IEM_MC_BEGIN(0, 0);
10484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10485
10486 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10487 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10488
10489 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10490 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
10491 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10492
10493 IEM_MC_ADVANCE_RIP_AND_FINISH();
10494 IEM_MC_END();
10495}
10496
10497
10498/** Opcode 0xdd 11/1. */
10499FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
10500{
10501 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
10502 IEM_MC_BEGIN(0, 2);
10503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10504 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
10505 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10506 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10507 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10508
10509 IEM_MC_PREPARE_FPU_USAGE();
10510 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10511 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
10512 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10513 } IEM_MC_ELSE() {
10514 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10515 } IEM_MC_ENDIF();
10516
10517 IEM_MC_ADVANCE_RIP_AND_FINISH();
10518 IEM_MC_END();
10519}
10520
10521
10522/** Opcode 0xdd 11/3. */
10523FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
10524{
10525 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
10526 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
10527}
10528
10529
10530/** Opcode 0xdd 11/4. */
10531FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
10532{
10533 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
10534 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
10535}
10536
10537
10538/**
10539 * @opcode 0xdd
10540 */
10541FNIEMOP_DEF(iemOp_EscF5)
10542{
10543 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10544 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
10545 if (IEM_IS_MODRM_REG_MODE(bRm))
10546 {
10547 switch (IEM_GET_MODRM_REG_8(bRm))
10548 {
10549 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
10550 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
10551 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
10552 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
10553 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
10554 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
10555 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10556 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
10557 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10558 }
10559 }
10560 else
10561 {
10562 switch (IEM_GET_MODRM_REG_8(bRm))
10563 {
10564 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
10565 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
10566 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
10567 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
10568 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
10569 case 5: IEMOP_RAISE_INVALID_OPCODE_RET();
10570 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
10571 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
10572 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10573 }
10574 }
10575}
10576
10577
10578/** Opcode 0xde 11/0. */
10579FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
10580{
10581 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
10582 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
10583}
10584
10585
10586/** Opcode 0xde 11/0. */
10587FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
10588{
10589 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
10590 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
10591}
10592
10593
10594/** Opcode 0xde 0xd9. */
10595FNIEMOP_DEF(iemOp_fcompp)
10596{
10597 IEMOP_MNEMONIC(fcompp, "fcompp");
10598 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fcom_r80_by_r80);
10599}
10600
10601
10602/** Opcode 0xde 11/4. */
10603FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
10604{
10605 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
10606 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
10607}
10608
10609
10610/** Opcode 0xde 11/5. */
10611FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
10612{
10613 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
10614 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
10615}
10616
10617
10618/** Opcode 0xde 11/6. */
10619FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
10620{
10621 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
10622 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
10623}
10624
10625
10626/** Opcode 0xde 11/7. */
10627FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
10628{
10629 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
10630 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
10631}
10632
10633
10634/**
10635 * Common worker for FPU instructions working on ST0 and an m16i, and storing
10636 * the result in ST0.
10637 *
10638 * @param bRm Mod R/M byte.
10639 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10640 */
10641FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
10642{
10643 IEM_MC_BEGIN(3, 3);
10644 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10645 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10646 IEM_MC_LOCAL(int16_t, i16Val2);
10647 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10648 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10649 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
10650
10651 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10653
10654 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10655 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10656 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10657
10658 IEM_MC_PREPARE_FPU_USAGE();
10659 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10660 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
10661 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10662 } IEM_MC_ELSE() {
10663 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10664 } IEM_MC_ENDIF();
10665 IEM_MC_ADVANCE_RIP_AND_FINISH();
10666
10667 IEM_MC_END();
10668}
10669
10670
10671/** Opcode 0xde !11/0. */
10672FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
10673{
10674 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
10675 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
10676}
10677
10678
10679/** Opcode 0xde !11/1. */
10680FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
10681{
10682 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
10683 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
10684}
10685
10686
10687/** Opcode 0xde !11/2. */
10688FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
10689{
10690 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
10691
10692 IEM_MC_BEGIN(3, 3);
10693 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10694 IEM_MC_LOCAL(uint16_t, u16Fsw);
10695 IEM_MC_LOCAL(int16_t, i16Val2);
10696 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10697 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10698 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
10699
10700 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10702
10703 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10704 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10705 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10706
10707 IEM_MC_PREPARE_FPU_USAGE();
10708 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10709 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
10710 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10711 } IEM_MC_ELSE() {
10712 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10713 } IEM_MC_ENDIF();
10714 IEM_MC_ADVANCE_RIP_AND_FINISH();
10715
10716 IEM_MC_END();
10717}
10718
10719
10720/** Opcode 0xde !11/3. */
10721FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
10722{
10723 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
10724
10725 IEM_MC_BEGIN(3, 3);
10726 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10727 IEM_MC_LOCAL(uint16_t, u16Fsw);
10728 IEM_MC_LOCAL(int16_t, i16Val2);
10729 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10730 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10731 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
10732
10733 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10734 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10735
10736 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10737 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10738 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10739
10740 IEM_MC_PREPARE_FPU_USAGE();
10741 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10742 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
10743 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10744 } IEM_MC_ELSE() {
10745 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10746 } IEM_MC_ENDIF();
10747 IEM_MC_ADVANCE_RIP_AND_FINISH();
10748
10749 IEM_MC_END();
10750}
10751
10752
10753/** Opcode 0xde !11/4. */
10754FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
10755{
10756 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
10757 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
10758}
10759
10760
10761/** Opcode 0xde !11/5. */
10762FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
10763{
10764 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
10765 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
10766}
10767
10768
10769/** Opcode 0xde !11/6. */
10770FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
10771{
10772 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
10773 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
10774}
10775
10776
10777/** Opcode 0xde !11/7. */
10778FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
10779{
10780 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
10781 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
10782}
10783
10784
10785/**
10786 * @opcode 0xde
10787 */
10788FNIEMOP_DEF(iemOp_EscF6)
10789{
10790 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10791 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
10792 if (IEM_IS_MODRM_REG_MODE(bRm))
10793 {
10794 switch (IEM_GET_MODRM_REG_8(bRm))
10795 {
10796 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
10797 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
10798 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
10799 case 3: if (bRm == 0xd9)
10800 return FNIEMOP_CALL(iemOp_fcompp);
10801 IEMOP_RAISE_INVALID_OPCODE_RET();
10802 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
10803 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
10804 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
10805 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
10806 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10807 }
10808 }
10809 else
10810 {
10811 switch (IEM_GET_MODRM_REG_8(bRm))
10812 {
10813 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
10814 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
10815 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
10816 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
10817 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
10818 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
10819 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
10820 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
10821 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10822 }
10823 }
10824}
10825
10826
10827/** Opcode 0xdf 11/0.
10828 * Undocument instruction, assumed to work like ffree + fincstp. */
10829FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
10830{
10831 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
10832 IEM_MC_BEGIN(0, 0);
10833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10834
10835 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10836 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10837
10838 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10839 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
10840 IEM_MC_FPU_STACK_INC_TOP();
10841 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10842
10843 IEM_MC_ADVANCE_RIP_AND_FINISH();
10844 IEM_MC_END();
10845}
10846
10847
10848/** Opcode 0xdf 0xe0. */
10849FNIEMOP_DEF(iemOp_fnstsw_ax)
10850{
10851 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
10852 IEM_MC_BEGIN(0, 1);
10853 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10854 IEM_MC_LOCAL(uint16_t, u16Tmp);
10855 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10856 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
10857 IEM_MC_FETCH_FSW(u16Tmp);
10858 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
10859 IEM_MC_ADVANCE_RIP_AND_FINISH();
10860 IEM_MC_END();
10861}
10862
10863
10864/** Opcode 0xdf 11/5. */
10865FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
10866{
10867 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
10868 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
10869 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80,
10870 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
10871}
10872
10873
10874/** Opcode 0xdf 11/6. */
10875FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
10876{
10877 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
10878 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
10879 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80,
10880 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
10881}
10882
10883
10884/** Opcode 0xdf !11/0. */
10885FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
10886{
10887 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
10888
10889 IEM_MC_BEGIN(2, 3);
10890 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10891 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10892 IEM_MC_LOCAL(int16_t, i16Val);
10893 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10894 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
10895
10896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10898
10899 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10900 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10901 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10902
10903 IEM_MC_PREPARE_FPU_USAGE();
10904 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10905 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i16, pFpuRes, pi16Val);
10906 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10907 } IEM_MC_ELSE() {
10908 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10909 } IEM_MC_ENDIF();
10910 IEM_MC_ADVANCE_RIP_AND_FINISH();
10911
10912 IEM_MC_END();
10913}
10914
10915
10916/** Opcode 0xdf !11/1. */
10917FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
10918{
10919 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
10920 IEM_MC_BEGIN(3, 2);
10921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10922 IEM_MC_LOCAL(uint16_t, u16Fsw);
10923 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10924 IEM_MC_ARG(int16_t *, pi16Dst, 1);
10925 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10926
10927 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10929 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10930 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10931
10932 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10933 IEM_MC_PREPARE_FPU_USAGE();
10934 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10935 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
10936 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
10937 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10938 } IEM_MC_ELSE() {
10939 IEM_MC_IF_FCW_IM() {
10940 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
10941 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
10942 } IEM_MC_ENDIF();
10943 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10944 } IEM_MC_ENDIF();
10945 IEM_MC_ADVANCE_RIP_AND_FINISH();
10946
10947 IEM_MC_END();
10948}
10949
10950
10951/** Opcode 0xdf !11/2. */
10952FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
10953{
10954 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
10955 IEM_MC_BEGIN(3, 2);
10956 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10957 IEM_MC_LOCAL(uint16_t, u16Fsw);
10958 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10959 IEM_MC_ARG(int16_t *, pi16Dst, 1);
10960 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10961
10962 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10964 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10965 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10966
10967 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10968 IEM_MC_PREPARE_FPU_USAGE();
10969 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10970 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
10971 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
10972 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10973 } IEM_MC_ELSE() {
10974 IEM_MC_IF_FCW_IM() {
10975 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
10976 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
10977 } IEM_MC_ENDIF();
10978 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10979 } IEM_MC_ENDIF();
10980 IEM_MC_ADVANCE_RIP_AND_FINISH();
10981
10982 IEM_MC_END();
10983}
10984
10985
10986/** Opcode 0xdf !11/3. */
10987FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
10988{
10989 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
10990 IEM_MC_BEGIN(3, 2);
10991 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10992 IEM_MC_LOCAL(uint16_t, u16Fsw);
10993 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10994 IEM_MC_ARG(int16_t *, pi16Dst, 1);
10995 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10996
10997 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10998 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10999 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11000 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11001
11002 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11003 IEM_MC_PREPARE_FPU_USAGE();
11004 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11005 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
11006 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
11007 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11008 } IEM_MC_ELSE() {
11009 IEM_MC_IF_FCW_IM() {
11010 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
11011 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
11012 } IEM_MC_ENDIF();
11013 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11014 } IEM_MC_ENDIF();
11015 IEM_MC_ADVANCE_RIP_AND_FINISH();
11016
11017 IEM_MC_END();
11018}
11019
11020
11021/** Opcode 0xdf !11/4. */
11022FNIEMOP_DEF_1(iemOp_fbld_m80d, uint8_t, bRm)
11023{
11024 IEMOP_MNEMONIC(fbld_m80d, "fbld m80d");
11025
11026 IEM_MC_BEGIN(2, 3);
11027 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11028 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11029 IEM_MC_LOCAL(RTPBCD80U, d80Val);
11030 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11031 IEM_MC_ARG_LOCAL_REF(PCRTPBCD80U, pd80Val, d80Val, 1);
11032
11033 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11034 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11035
11036 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11037 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11038 IEM_MC_FETCH_MEM_D80(d80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11039
11040 IEM_MC_PREPARE_FPU_USAGE();
11041 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11042 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_d80, pFpuRes, pd80Val);
11043 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11044 } IEM_MC_ELSE() {
11045 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11046 } IEM_MC_ENDIF();
11047 IEM_MC_ADVANCE_RIP_AND_FINISH();
11048
11049 IEM_MC_END();
11050}
11051
11052
11053/** Opcode 0xdf !11/5. */
11054FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
11055{
11056 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
11057
11058 IEM_MC_BEGIN(2, 3);
11059 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11060 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11061 IEM_MC_LOCAL(int64_t, i64Val);
11062 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11063 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
11064
11065 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11066 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11067
11068 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11069 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11070 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11071
11072 IEM_MC_PREPARE_FPU_USAGE();
11073 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11074 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i64, pFpuRes, pi64Val);
11075 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11076 } IEM_MC_ELSE() {
11077 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11078 } IEM_MC_ENDIF();
11079 IEM_MC_ADVANCE_RIP_AND_FINISH();
11080
11081 IEM_MC_END();
11082}
11083
11084
11085/** Opcode 0xdf !11/6. */
11086FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
11087{
11088 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
11089 IEM_MC_BEGIN(3, 2);
11090 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11091 IEM_MC_LOCAL(uint16_t, u16Fsw);
11092 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11093 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
11094 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11095
11096 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11098 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11099 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11100
11101 IEM_MC_MEM_MAP_EX(pd80Dst, IEM_ACCESS_DATA_W, sizeof(*pd80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
11102 IEM_MC_PREPARE_FPU_USAGE();
11103 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11104 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
11105 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pd80Dst, IEM_ACCESS_DATA_W, u16Fsw);
11106 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11107 } IEM_MC_ELSE() {
11108 IEM_MC_IF_FCW_IM() {
11109 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
11110 IEM_MC_MEM_COMMIT_AND_UNMAP(pd80Dst, IEM_ACCESS_DATA_W);
11111 } IEM_MC_ENDIF();
11112 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11113 } IEM_MC_ENDIF();
11114 IEM_MC_ADVANCE_RIP_AND_FINISH();
11115
11116 IEM_MC_END();
11117}
11118
11119
11120/** Opcode 0xdf !11/7. */
11121FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
11122{
11123 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
11124 IEM_MC_BEGIN(3, 2);
11125 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11126 IEM_MC_LOCAL(uint16_t, u16Fsw);
11127 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11128 IEM_MC_ARG(int64_t *, pi64Dst, 1);
11129 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11130
11131 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11133 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11134 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11135
11136 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11137 IEM_MC_PREPARE_FPU_USAGE();
11138 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11139 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
11140 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11141 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11142 } IEM_MC_ELSE() {
11143 IEM_MC_IF_FCW_IM() {
11144 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
11145 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
11146 } IEM_MC_ENDIF();
11147 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11148 } IEM_MC_ENDIF();
11149 IEM_MC_ADVANCE_RIP_AND_FINISH();
11150
11151 IEM_MC_END();
11152}
11153
11154
11155/**
11156 * @opcode 0xdf
11157 */
11158FNIEMOP_DEF(iemOp_EscF7)
11159{
11160 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11161 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdf & 0x7);
11162 if (IEM_IS_MODRM_REG_MODE(bRm))
11163 {
11164 switch (IEM_GET_MODRM_REG_8(bRm))
11165 {
11166 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
11167 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
11168 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
11169 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
11170 case 4: if (bRm == 0xe0)
11171 return FNIEMOP_CALL(iemOp_fnstsw_ax);
11172 IEMOP_RAISE_INVALID_OPCODE_RET();
11173 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
11174 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
11175 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11176 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11177 }
11178 }
11179 else
11180 {
11181 switch (IEM_GET_MODRM_REG_8(bRm))
11182 {
11183 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
11184 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
11185 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
11186 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
11187 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
11188 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
11189 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
11190 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
11191 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11192 }
11193 }
11194}
11195
11196
11197/**
11198 * @opcode 0xe0
11199 */
11200FNIEMOP_DEF(iemOp_loopne_Jb)
11201{
11202 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
11203 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11204 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11205
11206 switch (pVCpu->iem.s.enmEffAddrMode)
11207 {
11208 case IEMMODE_16BIT:
11209 IEM_MC_BEGIN(0,0);
11210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11211 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11212 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11213 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11214 } IEM_MC_ELSE() {
11215 IEM_MC_ADVANCE_RIP_AND_FINISH();
11216 } IEM_MC_ENDIF();
11217 IEM_MC_END();
11218 break;
11219
11220 case IEMMODE_32BIT:
11221 IEM_MC_BEGIN(0,0);
11222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11223 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11224 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11225 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11226 } IEM_MC_ELSE() {
11227 IEM_MC_ADVANCE_RIP_AND_FINISH();
11228 } IEM_MC_ENDIF();
11229 IEM_MC_END();
11230 break;
11231
11232 case IEMMODE_64BIT:
11233 IEM_MC_BEGIN(0,0);
11234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11235 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
11236 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11237 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11238 } IEM_MC_ELSE() {
11239 IEM_MC_ADVANCE_RIP_AND_FINISH();
11240 } IEM_MC_ENDIF();
11241 IEM_MC_END();
11242 break;
11243
11244 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11245 }
11246}
11247
11248
11249/**
11250 * @opcode 0xe1
11251 */
11252FNIEMOP_DEF(iemOp_loope_Jb)
11253{
11254 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
11255 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11256 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11257
11258 switch (pVCpu->iem.s.enmEffAddrMode)
11259 {
11260 case IEMMODE_16BIT:
11261 IEM_MC_BEGIN(0,0);
11262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11263 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11264 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11265 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11266 } IEM_MC_ELSE() {
11267 IEM_MC_ADVANCE_RIP_AND_FINISH();
11268 } IEM_MC_ENDIF();
11269 IEM_MC_END();
11270 break;
11271
11272 case IEMMODE_32BIT:
11273 IEM_MC_BEGIN(0,0);
11274 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11275 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11276 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11277 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11278 } IEM_MC_ELSE() {
11279 IEM_MC_ADVANCE_RIP_AND_FINISH();
11280 } IEM_MC_ENDIF();
11281 IEM_MC_END();
11282 break;
11283
11284 case IEMMODE_64BIT:
11285 IEM_MC_BEGIN(0,0);
11286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11287 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
11288 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11289 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11290 } IEM_MC_ELSE() {
11291 IEM_MC_ADVANCE_RIP_AND_FINISH();
11292 } IEM_MC_ENDIF();
11293 IEM_MC_END();
11294 break;
11295
11296 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11297 }
11298}
11299
11300
11301/**
11302 * @opcode 0xe2
11303 */
11304FNIEMOP_DEF(iemOp_loop_Jb)
11305{
11306 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
11307 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11308 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11309
11310 /** @todo Check out the \#GP case if EIP < CS.Base or EIP > CS.Limit when
11311 * using the 32-bit operand size override. How can that be restarted? See
11312 * weird pseudo code in intel manual. */
11313
11314 /* NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
11315 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
11316 * the loop causes guest crashes, but when logging it's nice to skip a few million
11317 * lines of useless output. */
11318#if defined(LOG_ENABLED)
11319 if ((LogIs3Enabled() || LogIs4Enabled()) && -(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm)
11320 switch (pVCpu->iem.s.enmEffAddrMode)
11321 {
11322 case IEMMODE_16BIT:
11323 IEM_MC_BEGIN(0,0);
11324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11325 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
11326 IEM_MC_ADVANCE_RIP_AND_FINISH();
11327 IEM_MC_END();
11328 break;
11329
11330 case IEMMODE_32BIT:
11331 IEM_MC_BEGIN(0,0);
11332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11333 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
11334 IEM_MC_ADVANCE_RIP_AND_FINISH();
11335 IEM_MC_END();
11336 break;
11337
11338 case IEMMODE_64BIT:
11339 IEM_MC_BEGIN(0,0);
11340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11341 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
11342 IEM_MC_ADVANCE_RIP_AND_FINISH();
11343 IEM_MC_END();
11344 break;
11345
11346 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11347 }
11348#endif
11349
11350 switch (pVCpu->iem.s.enmEffAddrMode)
11351 {
11352 case IEMMODE_16BIT:
11353 IEM_MC_BEGIN(0,0);
11354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11355 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11356 IEM_MC_IF_CX_IS_NZ() {
11357 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11358 } IEM_MC_ELSE() {
11359 IEM_MC_ADVANCE_RIP_AND_FINISH();
11360 } IEM_MC_ENDIF();
11361 IEM_MC_END();
11362 break;
11363
11364 case IEMMODE_32BIT:
11365 IEM_MC_BEGIN(0,0);
11366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11367 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11368 IEM_MC_IF_ECX_IS_NZ() {
11369 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11370 } IEM_MC_ELSE() {
11371 IEM_MC_ADVANCE_RIP_AND_FINISH();
11372 } IEM_MC_ENDIF();
11373 IEM_MC_END();
11374 break;
11375
11376 case IEMMODE_64BIT:
11377 IEM_MC_BEGIN(0,0);
11378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11379 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
11380 IEM_MC_IF_RCX_IS_NZ() {
11381 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11382 } IEM_MC_ELSE() {
11383 IEM_MC_ADVANCE_RIP_AND_FINISH();
11384 } IEM_MC_ENDIF();
11385 IEM_MC_END();
11386 break;
11387
11388 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11389 }
11390}
11391
11392
11393/**
11394 * @opcode 0xe3
11395 */
11396FNIEMOP_DEF(iemOp_jecxz_Jb)
11397{
11398 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
11399 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11400 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11401
11402 switch (pVCpu->iem.s.enmEffAddrMode)
11403 {
11404 case IEMMODE_16BIT:
11405 IEM_MC_BEGIN(0,0);
11406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11407 IEM_MC_IF_CX_IS_NZ() {
11408 IEM_MC_ADVANCE_RIP_AND_FINISH();
11409 } IEM_MC_ELSE() {
11410 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11411 } IEM_MC_ENDIF();
11412 IEM_MC_END();
11413 break;
11414
11415 case IEMMODE_32BIT:
11416 IEM_MC_BEGIN(0,0);
11417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11418 IEM_MC_IF_ECX_IS_NZ() {
11419 IEM_MC_ADVANCE_RIP_AND_FINISH();
11420 } IEM_MC_ELSE() {
11421 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11422 } IEM_MC_ENDIF();
11423 IEM_MC_END();
11424 break;
11425
11426 case IEMMODE_64BIT:
11427 IEM_MC_BEGIN(0,0);
11428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11429 IEM_MC_IF_RCX_IS_NZ() {
11430 IEM_MC_ADVANCE_RIP_AND_FINISH();
11431 } IEM_MC_ELSE() {
11432 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11433 } IEM_MC_ENDIF();
11434 IEM_MC_END();
11435 break;
11436
11437 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11438 }
11439}
11440
11441
11442/** Opcode 0xe4 */
11443FNIEMOP_DEF(iemOp_in_AL_Ib)
11444{
11445 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
11446 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11448 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_in, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
11449}
11450
11451
11452/** Opcode 0xe5 */
11453FNIEMOP_DEF(iemOp_in_eAX_Ib)
11454{
11455 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
11456 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11458 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
11459 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
11460}
11461
11462
11463/** Opcode 0xe6 */
11464FNIEMOP_DEF(iemOp_out_Ib_AL)
11465{
11466 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
11467 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11469 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_out, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
11470}
11471
11472
11473/** Opcode 0xe7 */
11474FNIEMOP_DEF(iemOp_out_Ib_eAX)
11475{
11476 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
11477 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11479 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
11480 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
11481}
11482
11483
11484/**
11485 * @opcode 0xe8
11486 */
11487FNIEMOP_DEF(iemOp_call_Jv)
11488{
11489 IEMOP_MNEMONIC(call_Jv, "call Jv");
11490 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
11491 switch (pVCpu->iem.s.enmEffOpSize)
11492 {
11493 case IEMMODE_16BIT:
11494 {
11495 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
11496 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE, iemCImpl_call_rel_16, (int16_t)u16Imm);
11497 }
11498
11499 case IEMMODE_32BIT:
11500 {
11501 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
11502 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE, iemCImpl_call_rel_32, (int32_t)u32Imm);
11503 }
11504
11505 case IEMMODE_64BIT:
11506 {
11507 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
11508 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE, iemCImpl_call_rel_64, u64Imm);
11509 }
11510
11511 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11512 }
11513}
11514
11515
11516/**
11517 * @opcode 0xe9
11518 */
11519FNIEMOP_DEF(iemOp_jmp_Jv)
11520{
11521 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
11522 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
11523 switch (pVCpu->iem.s.enmEffOpSize)
11524 {
11525 case IEMMODE_16BIT:
11526 {
11527 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
11528 IEM_MC_BEGIN(0, 0);
11529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11530 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
11531 IEM_MC_END();
11532 break;
11533 }
11534
11535 case IEMMODE_64BIT:
11536 case IEMMODE_32BIT:
11537 {
11538 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
11539 IEM_MC_BEGIN(0, 0);
11540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11541 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
11542 IEM_MC_END();
11543 break;
11544 }
11545
11546 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11547 }
11548}
11549
11550
11551/**
11552 * @opcode 0xea
11553 */
11554FNIEMOP_DEF(iemOp_jmp_Ap)
11555{
11556 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
11557 IEMOP_HLP_NO_64BIT();
11558
11559 /* Decode the far pointer address and pass it on to the far call C implementation. */
11560 uint32_t off32Seg;
11561 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
11562 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
11563 else
11564 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
11565 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
11566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11567 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
11568 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
11569 iemCImpl_FarJmp, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
11570}
11571
11572
11573/**
11574 * @opcode 0xeb
11575 */
11576FNIEMOP_DEF(iemOp_jmp_Jb)
11577{
11578 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
11579 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11580 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
11581
11582 IEM_MC_BEGIN(0, 0);
11583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11584 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11585 IEM_MC_END();
11586}
11587
11588
11589/** Opcode 0xec */
11590FNIEMOP_DEF(iemOp_in_AL_DX)
11591{
11592 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
11593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11594 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_in_eAX_DX, 1, pVCpu->iem.s.enmEffAddrMode);
11595}
11596
11597
11598/** Opcode 0xed */
11599FNIEMOP_DEF(iemOp_in_eAX_DX)
11600{
11601 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
11602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11603 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT,
11604 iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
11605 pVCpu->iem.s.enmEffAddrMode);
11606}
11607
11608
11609/** Opcode 0xee */
11610FNIEMOP_DEF(iemOp_out_DX_AL)
11611{
11612 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
11613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11614 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_out_DX_eAX, 1, pVCpu->iem.s.enmEffAddrMode);
11615}
11616
11617
11618/** Opcode 0xef */
11619FNIEMOP_DEF(iemOp_out_DX_eAX)
11620{
11621 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
11622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11623 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT,
11624 iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
11625 pVCpu->iem.s.enmEffAddrMode);
11626}
11627
11628
11629/**
11630 * @opcode 0xf0
11631 */
11632FNIEMOP_DEF(iemOp_lock)
11633{
11634 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
11635 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
11636 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
11637
11638 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
11639 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
11640}
11641
11642
11643/**
11644 * @opcode 0xf1
11645 */
11646FNIEMOP_DEF(iemOp_int1)
11647{
11648 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
11649 /** @todo Does not generate \#UD on 286, or so they say... Was allegedly a
11650 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
11651 * LOADALL memo. Needs some testing. */
11652 IEMOP_HLP_MIN_386();
11653 /** @todo testcase! */
11654 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR
11655 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
11656 iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
11657}
11658
11659
11660/**
11661 * @opcode 0xf2
11662 */
11663FNIEMOP_DEF(iemOp_repne)
11664{
11665 /* This overrides any previous REPE prefix. */
11666 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
11667 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
11668 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
11669
11670 /* For the 4 entry opcode tables, REPNZ overrides any previous
11671 REPZ and operand size prefixes. */
11672 pVCpu->iem.s.idxPrefix = 3;
11673
11674 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
11675 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
11676}
11677
11678
11679/**
11680 * @opcode 0xf3
11681 */
11682FNIEMOP_DEF(iemOp_repe)
11683{
11684 /* This overrides any previous REPNE prefix. */
11685 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
11686 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
11687 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
11688
11689 /* For the 4 entry opcode tables, REPNZ overrides any previous
11690 REPNZ and operand size prefixes. */
11691 pVCpu->iem.s.idxPrefix = 2;
11692
11693 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
11694 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
11695}
11696
11697
11698/**
11699 * @opcode 0xf4
11700 */
11701FNIEMOP_DEF(iemOp_hlt)
11702{
11703 IEMOP_MNEMONIC(hlt, "hlt");
11704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11705 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, iemCImpl_hlt);
11706}
11707
11708
11709/**
11710 * @opcode 0xf5
11711 */
11712FNIEMOP_DEF(iemOp_cmc)
11713{
11714 IEMOP_MNEMONIC(cmc, "cmc");
11715 IEM_MC_BEGIN(0, 0);
11716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11717 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
11718 IEM_MC_ADVANCE_RIP_AND_FINISH();
11719 IEM_MC_END();
11720}
11721
11722
11723/**
11724 * Body for of 'inc/dec/not/neg Eb'.
11725 */
11726#define IEMOP_BODY_UNARY_Eb(a_bRm, a_fnNormalU8, a_fnLockedU8) \
11727 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
11728 { \
11729 /* register access */ \
11730 IEM_MC_BEGIN(2, 0); \
11731 IEMOP_HLP_DONE_DECODING(); \
11732 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
11733 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
11734 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
11735 IEM_MC_REF_EFLAGS(pEFlags); \
11736 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
11737 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11738 IEM_MC_END(); \
11739 } \
11740 else \
11741 { \
11742 /* memory access. */ \
11743 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
11744 { \
11745 IEM_MC_BEGIN(2, 2); \
11746 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
11747 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11748 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11749 \
11750 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
11751 IEMOP_HLP_DONE_DECODING(); \
11752 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11753 IEM_MC_FETCH_EFLAGS(EFlags); \
11754 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
11755 \
11756 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW); \
11757 IEM_MC_COMMIT_EFLAGS(EFlags); \
11758 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11759 IEM_MC_END(); \
11760 } \
11761 else \
11762 { \
11763 IEM_MC_BEGIN(2, 2); \
11764 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
11765 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11766 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11767 \
11768 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
11769 IEMOP_HLP_DONE_DECODING(); \
11770 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11771 IEM_MC_FETCH_EFLAGS(EFlags); \
11772 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU8, pu8Dst, pEFlags); \
11773 \
11774 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW); \
11775 IEM_MC_COMMIT_EFLAGS(EFlags); \
11776 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11777 IEM_MC_END(); \
11778 } \
11779 } \
11780 (void)0
11781
11782
11783/**
11784 * Body for 'inc/dec/not/neg Ev' (groups 3 and 5).
11785 */
11786#define IEMOP_BODY_UNARY_Ev(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
11787 if (IEM_IS_MODRM_REG_MODE(bRm)) \
11788 { \
11789 /* \
11790 * Register target \
11791 */ \
11792 switch (pVCpu->iem.s.enmEffOpSize) \
11793 { \
11794 case IEMMODE_16BIT: \
11795 IEM_MC_BEGIN(2, 0); \
11796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11797 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
11798 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
11799 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11800 IEM_MC_REF_EFLAGS(pEFlags); \
11801 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
11802 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11803 IEM_MC_END(); \
11804 break; \
11805 \
11806 case IEMMODE_32BIT: \
11807 IEM_MC_BEGIN(2, 0); \
11808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11809 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
11810 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
11811 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11812 IEM_MC_REF_EFLAGS(pEFlags); \
11813 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
11814 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
11815 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11816 IEM_MC_END(); \
11817 break; \
11818 \
11819 case IEMMODE_64BIT: \
11820 IEM_MC_BEGIN(2, 0); \
11821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11822 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
11823 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
11824 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11825 IEM_MC_REF_EFLAGS(pEFlags); \
11826 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
11827 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11828 IEM_MC_END(); \
11829 break; \
11830 \
11831 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11832 } \
11833 } \
11834 else \
11835 { \
11836 /* \
11837 * Memory target. \
11838 */ \
11839 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
11840 { \
11841 switch (pVCpu->iem.s.enmEffOpSize) \
11842 { \
11843 case IEMMODE_16BIT: \
11844 IEM_MC_BEGIN(2, 2); \
11845 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
11846 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11847 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11848 \
11849 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11851 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11852 IEM_MC_FETCH_EFLAGS(EFlags); \
11853 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
11854 \
11855 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
11856 IEM_MC_COMMIT_EFLAGS(EFlags); \
11857 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11858 IEM_MC_END(); \
11859 break; \
11860 \
11861 case IEMMODE_32BIT: \
11862 IEM_MC_BEGIN(2, 2); \
11863 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
11864 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11865 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11866 \
11867 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11869 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11870 IEM_MC_FETCH_EFLAGS(EFlags); \
11871 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
11872 \
11873 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); \
11874 IEM_MC_COMMIT_EFLAGS(EFlags); \
11875 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11876 IEM_MC_END(); \
11877 break; \
11878 \
11879 case IEMMODE_64BIT: \
11880 IEM_MC_BEGIN(2, 2); \
11881 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
11882 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11883 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11884 \
11885 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11887 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11888 IEM_MC_FETCH_EFLAGS(EFlags); \
11889 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
11890 \
11891 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
11892 IEM_MC_COMMIT_EFLAGS(EFlags); \
11893 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11894 IEM_MC_END(); \
11895 break; \
11896 \
11897 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11898 } \
11899 } \
11900 else \
11901 { \
11902 (void)0
11903
11904#define IEMOP_BODY_UNARY_Ev_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
11905 switch (pVCpu->iem.s.enmEffOpSize) \
11906 { \
11907 case IEMMODE_16BIT: \
11908 IEM_MC_BEGIN(2, 2); \
11909 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
11910 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11911 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11912 \
11913 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11914 IEMOP_HLP_DONE_DECODING(); \
11915 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11916 IEM_MC_FETCH_EFLAGS(EFlags); \
11917 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU16, pu16Dst, pEFlags); \
11918 \
11919 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
11920 IEM_MC_COMMIT_EFLAGS(EFlags); \
11921 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11922 IEM_MC_END(); \
11923 break; \
11924 \
11925 case IEMMODE_32BIT: \
11926 IEM_MC_BEGIN(2, 2); \
11927 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
11928 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11929 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11930 \
11931 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11932 IEMOP_HLP_DONE_DECODING(); \
11933 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11934 IEM_MC_FETCH_EFLAGS(EFlags); \
11935 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU32, pu32Dst, pEFlags); \
11936 \
11937 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); \
11938 IEM_MC_COMMIT_EFLAGS(EFlags); \
11939 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11940 IEM_MC_END(); \
11941 break; \
11942 \
11943 case IEMMODE_64BIT: \
11944 IEM_MC_BEGIN(2, 2); \
11945 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
11946 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11947 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11948 \
11949 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11950 IEMOP_HLP_DONE_DECODING(); \
11951 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11952 IEM_MC_FETCH_EFLAGS(EFlags); \
11953 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU64, pu64Dst, pEFlags); \
11954 \
11955 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
11956 IEM_MC_COMMIT_EFLAGS(EFlags); \
11957 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11958 IEM_MC_END(); \
11959 break; \
11960 \
11961 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11962 } \
11963 } \
11964 } \
11965 (void)0
11966
11967
11968/**
11969 * @opmaps grp3_f6
11970 * @opcode /0
11971 * @todo also /1
11972 */
11973FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
11974{
11975 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
11976 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11977
11978 if (IEM_IS_MODRM_REG_MODE(bRm))
11979 {
11980 /* register access */
11981 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11982 IEM_MC_BEGIN(3, 0);
11983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11984 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11985 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
11986 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11987 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11988 IEM_MC_REF_EFLAGS(pEFlags);
11989 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
11990 IEM_MC_ADVANCE_RIP_AND_FINISH();
11991 IEM_MC_END();
11992 }
11993 else
11994 {
11995 /* memory access. */
11996 IEM_MC_BEGIN(3, 2);
11997 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11998 IEM_MC_ARG(uint8_t, u8Src, 1);
11999 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
12000 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12001
12002 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
12003 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12004 IEM_MC_ASSIGN(u8Src, u8Imm);
12005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12006 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12007 IEM_MC_FETCH_EFLAGS(EFlags);
12008 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
12009
12010 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
12011 IEM_MC_COMMIT_EFLAGS(EFlags);
12012 IEM_MC_ADVANCE_RIP_AND_FINISH();
12013 IEM_MC_END();
12014 }
12015}
12016
12017
12018/** Opcode 0xf6 /4, /5, /6 and /7. */
12019FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
12020{
12021 if (IEM_IS_MODRM_REG_MODE(bRm))
12022 {
12023 /* register access */
12024 IEM_MC_BEGIN(3, 1);
12025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12026 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12027 IEM_MC_ARG(uint8_t, u8Value, 1);
12028 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12029 IEM_MC_LOCAL(int32_t, rc);
12030
12031 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12032 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12033 IEM_MC_REF_EFLAGS(pEFlags);
12034 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
12035 IEM_MC_IF_LOCAL_IS_Z(rc) {
12036 IEM_MC_ADVANCE_RIP_AND_FINISH();
12037 } IEM_MC_ELSE() {
12038 IEM_MC_RAISE_DIVIDE_ERROR();
12039 } IEM_MC_ENDIF();
12040
12041 IEM_MC_END();
12042 }
12043 else
12044 {
12045 /* memory access. */
12046 IEM_MC_BEGIN(3, 2);
12047 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12048 IEM_MC_ARG(uint8_t, u8Value, 1);
12049 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12050 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12051 IEM_MC_LOCAL(int32_t, rc);
12052
12053 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12054 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12055 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12056 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12057 IEM_MC_REF_EFLAGS(pEFlags);
12058 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
12059 IEM_MC_IF_LOCAL_IS_Z(rc) {
12060 IEM_MC_ADVANCE_RIP_AND_FINISH();
12061 } IEM_MC_ELSE() {
12062 IEM_MC_RAISE_DIVIDE_ERROR();
12063 } IEM_MC_ENDIF();
12064
12065 IEM_MC_END();
12066 }
12067}
12068
12069
12070/** Opcode 0xf7 /4, /5, /6 and /7. */
12071FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
12072{
12073 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12074
12075 if (IEM_IS_MODRM_REG_MODE(bRm))
12076 {
12077 /* register access */
12078 switch (pVCpu->iem.s.enmEffOpSize)
12079 {
12080 case IEMMODE_16BIT:
12081 {
12082 IEM_MC_BEGIN(4, 1);
12083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12084 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12085 IEM_MC_ARG(uint16_t *, pu16DX, 1);
12086 IEM_MC_ARG(uint16_t, u16Value, 2);
12087 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12088 IEM_MC_LOCAL(int32_t, rc);
12089
12090 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12091 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12092 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
12093 IEM_MC_REF_EFLAGS(pEFlags);
12094 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
12095 IEM_MC_IF_LOCAL_IS_Z(rc) {
12096 IEM_MC_ADVANCE_RIP_AND_FINISH();
12097 } IEM_MC_ELSE() {
12098 IEM_MC_RAISE_DIVIDE_ERROR();
12099 } IEM_MC_ENDIF();
12100
12101 IEM_MC_END();
12102 break;
12103 }
12104
12105 case IEMMODE_32BIT:
12106 {
12107 IEM_MC_BEGIN(4, 1);
12108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12109 IEM_MC_ARG(uint32_t *, pu32AX, 0);
12110 IEM_MC_ARG(uint32_t *, pu32DX, 1);
12111 IEM_MC_ARG(uint32_t, u32Value, 2);
12112 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12113 IEM_MC_LOCAL(int32_t, rc);
12114
12115 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12116 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
12117 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
12118 IEM_MC_REF_EFLAGS(pEFlags);
12119 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
12120 IEM_MC_IF_LOCAL_IS_Z(rc) {
12121 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
12122 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
12123 IEM_MC_ADVANCE_RIP_AND_FINISH();
12124 } IEM_MC_ELSE() {
12125 IEM_MC_RAISE_DIVIDE_ERROR();
12126 } IEM_MC_ENDIF();
12127
12128 IEM_MC_END();
12129 break;
12130 }
12131
12132 case IEMMODE_64BIT:
12133 {
12134 IEM_MC_BEGIN(4, 1);
12135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12136 IEM_MC_ARG(uint64_t *, pu64AX, 0);
12137 IEM_MC_ARG(uint64_t *, pu64DX, 1);
12138 IEM_MC_ARG(uint64_t, u64Value, 2);
12139 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12140 IEM_MC_LOCAL(int32_t, rc);
12141
12142 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12143 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
12144 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
12145 IEM_MC_REF_EFLAGS(pEFlags);
12146 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
12147 IEM_MC_IF_LOCAL_IS_Z(rc) {
12148 IEM_MC_ADVANCE_RIP_AND_FINISH();
12149 } IEM_MC_ELSE() {
12150 IEM_MC_RAISE_DIVIDE_ERROR();
12151 } IEM_MC_ENDIF();
12152
12153 IEM_MC_END();
12154 break;
12155 }
12156
12157 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12158 }
12159 }
12160 else
12161 {
12162 /* memory access. */
12163 switch (pVCpu->iem.s.enmEffOpSize)
12164 {
12165 case IEMMODE_16BIT:
12166 {
12167 IEM_MC_BEGIN(4, 2);
12168 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12169 IEM_MC_ARG(uint16_t *, pu16DX, 1);
12170 IEM_MC_ARG(uint16_t, u16Value, 2);
12171 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12172 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12173 IEM_MC_LOCAL(int32_t, rc);
12174
12175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12177 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12178 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12179 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
12180 IEM_MC_REF_EFLAGS(pEFlags);
12181 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
12182 IEM_MC_IF_LOCAL_IS_Z(rc) {
12183 IEM_MC_ADVANCE_RIP_AND_FINISH();
12184 } IEM_MC_ELSE() {
12185 IEM_MC_RAISE_DIVIDE_ERROR();
12186 } IEM_MC_ENDIF();
12187
12188 IEM_MC_END();
12189 break;
12190 }
12191
12192 case IEMMODE_32BIT:
12193 {
12194 IEM_MC_BEGIN(4, 2);
12195 IEM_MC_ARG(uint32_t *, pu32AX, 0);
12196 IEM_MC_ARG(uint32_t *, pu32DX, 1);
12197 IEM_MC_ARG(uint32_t, u32Value, 2);
12198 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12200 IEM_MC_LOCAL(int32_t, rc);
12201
12202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12204 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12205 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
12206 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
12207 IEM_MC_REF_EFLAGS(pEFlags);
12208 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
12209 IEM_MC_IF_LOCAL_IS_Z(rc) {
12210 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
12211 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
12212 IEM_MC_ADVANCE_RIP_AND_FINISH();
12213 } IEM_MC_ELSE() {
12214 IEM_MC_RAISE_DIVIDE_ERROR();
12215 } IEM_MC_ENDIF();
12216
12217 IEM_MC_END();
12218 break;
12219 }
12220
12221 case IEMMODE_64BIT:
12222 {
12223 IEM_MC_BEGIN(4, 2);
12224 IEM_MC_ARG(uint64_t *, pu64AX, 0);
12225 IEM_MC_ARG(uint64_t *, pu64DX, 1);
12226 IEM_MC_ARG(uint64_t, u64Value, 2);
12227 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12228 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12229 IEM_MC_LOCAL(int32_t, rc);
12230
12231 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12233 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12234 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
12235 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
12236 IEM_MC_REF_EFLAGS(pEFlags);
12237 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
12238 IEM_MC_IF_LOCAL_IS_Z(rc) {
12239 IEM_MC_ADVANCE_RIP_AND_FINISH();
12240 } IEM_MC_ELSE() {
12241 IEM_MC_RAISE_DIVIDE_ERROR();
12242 } IEM_MC_ENDIF();
12243
12244 IEM_MC_END();
12245 break;
12246 }
12247
12248 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12249 }
12250 }
12251}
12252
12253
12254/**
12255 * @opmaps grp3_f6
12256 * @opcode /2
12257 */
12258FNIEMOP_DEF_1(iemOp_grp3_not_Eb, uint8_t, bRm)
12259{
12260 IEMOP_MNEMONIC(not_Eb, "not Eb");
12261 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_not_u8, iemAImpl_not_u8_locked);
12262}
12263
12264
12265/**
12266 * @opmaps grp3_f6
12267 * @opcode /3
12268 */
12269FNIEMOP_DEF_1(iemOp_grp3_neg_Eb, uint8_t, bRm)
12270{
12271 IEMOP_MNEMONIC(net_Eb, "neg Eb");
12272 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_neg_u8, iemAImpl_neg_u8_locked);
12273}
12274
12275
12276/**
12277 * @opcode 0xf6
12278 */
12279FNIEMOP_DEF(iemOp_Grp3_Eb)
12280{
12281 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12282 switch (IEM_GET_MODRM_REG_8(bRm))
12283 {
12284 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
12285 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
12286 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Eb, bRm);
12287 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Eb, bRm);
12288 case 4:
12289 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
12290 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12291 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
12292 case 5:
12293 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
12294 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12295 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
12296 case 6:
12297 IEMOP_MNEMONIC(div_Eb, "div Eb");
12298 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12299 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
12300 case 7:
12301 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
12302 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12303 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
12304 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12305 }
12306}
12307
12308
12309/** Opcode 0xf7 /0. */
12310FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
12311{
12312 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
12313 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12314
12315 if (IEM_IS_MODRM_REG_MODE(bRm))
12316 {
12317 /* register access */
12318 switch (pVCpu->iem.s.enmEffOpSize)
12319 {
12320 case IEMMODE_16BIT:
12321 {
12322 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12323 IEM_MC_BEGIN(3, 0);
12324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12325 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12326 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
12327 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12328 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12329 IEM_MC_REF_EFLAGS(pEFlags);
12330 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
12331 IEM_MC_ADVANCE_RIP_AND_FINISH();
12332 IEM_MC_END();
12333 break;
12334 }
12335
12336 case IEMMODE_32BIT:
12337 {
12338 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12339 IEM_MC_BEGIN(3, 0);
12340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12341 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12342 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
12343 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12344 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12345 IEM_MC_REF_EFLAGS(pEFlags);
12346 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
12347 /* No clearing the high dword here - test doesn't write back the result. */
12348 IEM_MC_ADVANCE_RIP_AND_FINISH();
12349 IEM_MC_END();
12350 break;
12351 }
12352
12353 case IEMMODE_64BIT:
12354 {
12355 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12356 IEM_MC_BEGIN(3, 0);
12357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12358 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12359 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
12360 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12361 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12362 IEM_MC_REF_EFLAGS(pEFlags);
12363 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
12364 IEM_MC_ADVANCE_RIP_AND_FINISH();
12365 IEM_MC_END();
12366 break;
12367 }
12368
12369 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12370 }
12371 }
12372 else
12373 {
12374 /* memory access. */
12375 switch (pVCpu->iem.s.enmEffOpSize)
12376 {
12377 case IEMMODE_16BIT:
12378 {
12379 IEM_MC_BEGIN(3, 2);
12380 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12381 IEM_MC_ARG(uint16_t, u16Src, 1);
12382 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
12383 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12384
12385 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
12386 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12387 IEM_MC_ASSIGN(u16Src, u16Imm);
12388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12389 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12390 IEM_MC_FETCH_EFLAGS(EFlags);
12391 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
12392
12393 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
12394 IEM_MC_COMMIT_EFLAGS(EFlags);
12395 IEM_MC_ADVANCE_RIP_AND_FINISH();
12396 IEM_MC_END();
12397 break;
12398 }
12399
12400 case IEMMODE_32BIT:
12401 {
12402 IEM_MC_BEGIN(3, 2);
12403 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12404 IEM_MC_ARG(uint32_t, u32Src, 1);
12405 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
12406 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12407
12408 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12409 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12410 IEM_MC_ASSIGN(u32Src, u32Imm);
12411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12412 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12413 IEM_MC_FETCH_EFLAGS(EFlags);
12414 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
12415
12416 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
12417 IEM_MC_COMMIT_EFLAGS(EFlags);
12418 IEM_MC_ADVANCE_RIP_AND_FINISH();
12419 IEM_MC_END();
12420 break;
12421 }
12422
12423 case IEMMODE_64BIT:
12424 {
12425 IEM_MC_BEGIN(3, 2);
12426 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12427 IEM_MC_ARG(uint64_t, u64Src, 1);
12428 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
12429 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12430
12431 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12432 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12433 IEM_MC_ASSIGN(u64Src, u64Imm);
12434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12435 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12436 IEM_MC_FETCH_EFLAGS(EFlags);
12437 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
12438
12439 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
12440 IEM_MC_COMMIT_EFLAGS(EFlags);
12441 IEM_MC_ADVANCE_RIP_AND_FINISH();
12442 IEM_MC_END();
12443 break;
12444 }
12445
12446 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12447 }
12448 }
12449}
12450
12451
12452/** Opcode 0xf7 /2. */
12453FNIEMOP_DEF_1(iemOp_grp3_not_Ev, uint8_t, bRm)
12454{
12455 IEMOP_MNEMONIC(not_Ev, "not Ev");
12456 IEMOP_BODY_UNARY_Ev( iemAImpl_not_u16, iemAImpl_not_u32, iemAImpl_not_u64);
12457 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_not_u16_locked, iemAImpl_not_u32_locked, iemAImpl_not_u64_locked);
12458}
12459
12460
12461/** Opcode 0xf7 /3. */
12462FNIEMOP_DEF_1(iemOp_grp3_neg_Ev, uint8_t, bRm)
12463{
12464 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
12465 IEMOP_BODY_UNARY_Ev( iemAImpl_neg_u16, iemAImpl_neg_u32, iemAImpl_neg_u64);
12466 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_neg_u16_locked, iemAImpl_neg_u32_locked, iemAImpl_neg_u64_locked);
12467}
12468
12469
12470/**
12471 * @opcode 0xf7
12472 */
12473FNIEMOP_DEF(iemOp_Grp3_Ev)
12474{
12475 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12476 switch (IEM_GET_MODRM_REG_8(bRm))
12477 {
12478 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
12479 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
12480 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Ev, bRm);
12481 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Ev, bRm);
12482 case 4:
12483 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
12484 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12485 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
12486 case 5:
12487 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
12488 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12489 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
12490 case 6:
12491 IEMOP_MNEMONIC(div_Ev, "div Ev");
12492 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12493 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
12494 case 7:
12495 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
12496 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12497 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
12498 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12499 }
12500}
12501
12502
12503/**
12504 * @opcode 0xf8
12505 */
12506FNIEMOP_DEF(iemOp_clc)
12507{
12508 IEMOP_MNEMONIC(clc, "clc");
12509 IEM_MC_BEGIN(0, 0);
12510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12511 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
12512 IEM_MC_ADVANCE_RIP_AND_FINISH();
12513 IEM_MC_END();
12514}
12515
12516
12517/**
12518 * @opcode 0xf9
12519 */
12520FNIEMOP_DEF(iemOp_stc)
12521{
12522 IEMOP_MNEMONIC(stc, "stc");
12523 IEM_MC_BEGIN(0, 0);
12524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12525 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
12526 IEM_MC_ADVANCE_RIP_AND_FINISH();
12527 IEM_MC_END();
12528}
12529
12530
12531/**
12532 * @opcode 0xfa
12533 */
12534FNIEMOP_DEF(iemOp_cli)
12535{
12536 IEMOP_MNEMONIC(cli, "cli");
12537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12538 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, iemCImpl_cli);
12539}
12540
12541
12542FNIEMOP_DEF(iemOp_sti)
12543{
12544 IEMOP_MNEMONIC(sti, "sti");
12545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12546 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_DELAYED | IEM_CIMPL_F_VMEXIT, iemCImpl_sti);
12547}
12548
12549
12550/**
12551 * @opcode 0xfc
12552 */
12553FNIEMOP_DEF(iemOp_cld)
12554{
12555 IEMOP_MNEMONIC(cld, "cld");
12556 IEM_MC_BEGIN(0, 0);
12557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12558 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
12559 IEM_MC_ADVANCE_RIP_AND_FINISH();
12560 IEM_MC_END();
12561}
12562
12563
12564/**
12565 * @opcode 0xfd
12566 */
12567FNIEMOP_DEF(iemOp_std)
12568{
12569 IEMOP_MNEMONIC(std, "std");
12570 IEM_MC_BEGIN(0, 0);
12571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12572 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
12573 IEM_MC_ADVANCE_RIP_AND_FINISH();
12574 IEM_MC_END();
12575}
12576
12577
12578/**
12579 * @opmaps grp4
12580 * @opcode /0
12581 */
12582FNIEMOP_DEF_1(iemOp_Grp4_inc_Eb, uint8_t, bRm)
12583{
12584 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
12585 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_inc_u8, iemAImpl_inc_u8_locked);
12586}
12587
12588
12589/**
12590 * @opmaps grp4
12591 * @opcode /1
12592 */
12593FNIEMOP_DEF_1(iemOp_Grp4_dec_Eb, uint8_t, bRm)
12594{
12595 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
12596 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_dec_u8, iemAImpl_dec_u8_locked);
12597}
12598
12599
12600/**
12601 * @opcode 0xfe
12602 */
12603FNIEMOP_DEF(iemOp_Grp4)
12604{
12605 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12606 switch (IEM_GET_MODRM_REG_8(bRm))
12607 {
12608 case 0: return FNIEMOP_CALL_1(iemOp_Grp4_inc_Eb, bRm);
12609 case 1: return FNIEMOP_CALL_1(iemOp_Grp4_dec_Eb, bRm);
12610 default:
12611 /** @todo is the eff-addr decoded? */
12612 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
12613 IEMOP_RAISE_INVALID_OPCODE_RET();
12614 }
12615}
12616
12617/** Opcode 0xff /0. */
12618FNIEMOP_DEF_1(iemOp_Grp5_inc_Ev, uint8_t, bRm)
12619{
12620 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
12621 IEMOP_BODY_UNARY_Ev( iemAImpl_inc_u16, iemAImpl_inc_u32, iemAImpl_inc_u64);
12622 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_inc_u16_locked, iemAImpl_inc_u32_locked, iemAImpl_inc_u64_locked);
12623}
12624
12625
12626/** Opcode 0xff /1. */
12627FNIEMOP_DEF_1(iemOp_Grp5_dec_Ev, uint8_t, bRm)
12628{
12629 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
12630 IEMOP_BODY_UNARY_Ev( iemAImpl_dec_u16, iemAImpl_dec_u32, iemAImpl_dec_u64);
12631 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_dec_u16_locked, iemAImpl_dec_u32_locked, iemAImpl_dec_u64_locked);
12632}
12633
12634
12635/**
12636 * Opcode 0xff /2.
12637 * @param bRm The RM byte.
12638 */
12639FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
12640{
12641 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
12642 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12643
12644 if (IEM_IS_MODRM_REG_MODE(bRm))
12645 {
12646 /* The new RIP is taken from a register. */
12647 switch (pVCpu->iem.s.enmEffOpSize)
12648 {
12649 case IEMMODE_16BIT:
12650 IEM_MC_BEGIN(1, 0);
12651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12652 IEM_MC_ARG(uint16_t, u16Target, 0);
12653 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12654 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_16, u16Target);
12655 IEM_MC_END();
12656 break;
12657
12658 case IEMMODE_32BIT:
12659 IEM_MC_BEGIN(1, 0);
12660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12661 IEM_MC_ARG(uint32_t, u32Target, 0);
12662 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12663 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_32, u32Target);
12664 IEM_MC_END();
12665 break;
12666
12667 case IEMMODE_64BIT:
12668 IEM_MC_BEGIN(1, 0);
12669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12670 IEM_MC_ARG(uint64_t, u64Target, 0);
12671 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12672 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_64, u64Target);
12673 IEM_MC_END();
12674 break;
12675
12676 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12677 }
12678 }
12679 else
12680 {
12681 /* The new RIP is taken from a register. */
12682 switch (pVCpu->iem.s.enmEffOpSize)
12683 {
12684 case IEMMODE_16BIT:
12685 IEM_MC_BEGIN(1, 1);
12686 IEM_MC_ARG(uint16_t, u16Target, 0);
12687 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12688 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12690 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12691 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_16, u16Target);
12692 IEM_MC_END();
12693 break;
12694
12695 case IEMMODE_32BIT:
12696 IEM_MC_BEGIN(1, 1);
12697 IEM_MC_ARG(uint32_t, u32Target, 0);
12698 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12699 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12700 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12701 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12702 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_32, u32Target);
12703 IEM_MC_END();
12704 break;
12705
12706 case IEMMODE_64BIT:
12707 IEM_MC_BEGIN(1, 1);
12708 IEM_MC_ARG(uint64_t, u64Target, 0);
12709 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12710 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12712 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12713 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT, iemCImpl_call_64, u64Target);
12714 IEM_MC_END();
12715 break;
12716
12717 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12718 }
12719 }
12720}
12721
12722#define IEMOP_BODY_GRP5_FAR_EP(a_bRm, a_fnCImpl) \
12723 /* Registers? How?? */ \
12724 if (RT_LIKELY(IEM_IS_MODRM_MEM_MODE(a_bRm))) \
12725 { /* likely */ } \
12726 else \
12727 IEMOP_RAISE_INVALID_OPCODE_RET(); /* callf eax is not legal */ \
12728 \
12729 /* 64-bit mode: Default is 32-bit, but only intel respects a REX.W prefix. */ \
12730 /** @todo what does VIA do? */ \
12731 if (!IEM_IS_64BIT_CODE(pVCpu) || pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT || IEM_IS_GUEST_CPU_INTEL(pVCpu)) \
12732 { /* likely */ } \
12733 else \
12734 pVCpu->iem.s.enmEffOpSize = IEMMODE_32BIT; \
12735 \
12736 /* Far pointer loaded from memory. */ \
12737 switch (pVCpu->iem.s.enmEffOpSize) \
12738 { \
12739 case IEMMODE_16BIT: \
12740 IEM_MC_BEGIN(3, 1); \
12741 IEM_MC_ARG(uint16_t, u16Sel, 0); \
12742 IEM_MC_ARG(uint16_t, offSeg, 1); \
12743 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2); \
12744 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
12745 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
12746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12747 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
12748 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2); \
12749 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR \
12750 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, \
12751 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
12752 IEM_MC_END(); \
12753 break; \
12754 \
12755 case IEMMODE_32BIT: \
12756 IEM_MC_BEGIN(3, 1); \
12757 IEM_MC_ARG(uint16_t, u16Sel, 0); \
12758 IEM_MC_ARG(uint32_t, offSeg, 1); \
12759 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2); \
12760 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
12761 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
12762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12763 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
12764 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4); \
12765 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR \
12766 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, \
12767 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
12768 IEM_MC_END(); \
12769 break; \
12770 \
12771 case IEMMODE_64BIT: \
12772 Assert(!IEM_IS_GUEST_CPU_AMD(pVCpu)); \
12773 IEM_MC_BEGIN(3, 1); \
12774 IEM_MC_ARG(uint16_t, u16Sel, 0); \
12775 IEM_MC_ARG(uint64_t, offSeg, 1); \
12776 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_64BIT, 2); \
12777 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
12778 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
12779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12780 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
12781 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8); \
12782 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_MODE /* no gates */, \
12783 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
12784 IEM_MC_END(); \
12785 break; \
12786 \
12787 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12788 } do {} while (0)
12789
12790
12791/**
12792 * Opcode 0xff /3.
12793 * @param bRm The RM byte.
12794 */
12795FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
12796{
12797 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
12798 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_callf);
12799}
12800
12801
12802/**
12803 * Opcode 0xff /4.
12804 * @param bRm The RM byte.
12805 */
12806FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
12807{
12808 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
12809 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12810
12811 if (IEM_IS_MODRM_REG_MODE(bRm))
12812 {
12813 /* The new RIP is taken from a register. */
12814 switch (pVCpu->iem.s.enmEffOpSize)
12815 {
12816 case IEMMODE_16BIT:
12817 IEM_MC_BEGIN(0, 1);
12818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12819 IEM_MC_LOCAL(uint16_t, u16Target);
12820 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12821 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
12822 IEM_MC_END();
12823 break;
12824
12825 case IEMMODE_32BIT:
12826 IEM_MC_BEGIN(0, 1);
12827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12828 IEM_MC_LOCAL(uint32_t, u32Target);
12829 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12830 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
12831 IEM_MC_END();
12832 break;
12833
12834 case IEMMODE_64BIT:
12835 IEM_MC_BEGIN(0, 1);
12836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12837 IEM_MC_LOCAL(uint64_t, u64Target);
12838 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12839 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
12840 IEM_MC_END();
12841 break;
12842
12843 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12844 }
12845 }
12846 else
12847 {
12848 /* The new RIP is taken from a memory location. */
12849 switch (pVCpu->iem.s.enmEffOpSize)
12850 {
12851 case IEMMODE_16BIT:
12852 IEM_MC_BEGIN(0, 2);
12853 IEM_MC_LOCAL(uint16_t, u16Target);
12854 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12855 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12857 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12858 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
12859 IEM_MC_END();
12860 break;
12861
12862 case IEMMODE_32BIT:
12863 IEM_MC_BEGIN(0, 2);
12864 IEM_MC_LOCAL(uint32_t, u32Target);
12865 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12866 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12868 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12869 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
12870 IEM_MC_END();
12871 break;
12872
12873 case IEMMODE_64BIT:
12874 IEM_MC_BEGIN(0, 2);
12875 IEM_MC_LOCAL(uint64_t, u64Target);
12876 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12877 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12879 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12880 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
12881 IEM_MC_END();
12882 break;
12883
12884 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12885 }
12886 }
12887}
12888
12889
12890/**
12891 * Opcode 0xff /5.
12892 * @param bRm The RM byte.
12893 */
12894FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
12895{
12896 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
12897 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_FarJmp);
12898}
12899
12900
12901/**
12902 * Opcode 0xff /6.
12903 * @param bRm The RM byte.
12904 */
12905FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
12906{
12907 IEMOP_MNEMONIC(push_Ev, "push Ev");
12908
12909 /* Registers are handled by a common worker. */
12910 if (IEM_IS_MODRM_REG_MODE(bRm))
12911 return FNIEMOP_CALL_1(iemOpCommonPushGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
12912
12913 /* Memory we do here. */
12914 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12915 switch (pVCpu->iem.s.enmEffOpSize)
12916 {
12917 case IEMMODE_16BIT:
12918 IEM_MC_BEGIN(0, 2);
12919 IEM_MC_LOCAL(uint16_t, u16Src);
12920 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12921 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12923 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12924 IEM_MC_PUSH_U16(u16Src);
12925 IEM_MC_ADVANCE_RIP_AND_FINISH();
12926 IEM_MC_END();
12927 break;
12928
12929 case IEMMODE_32BIT:
12930 IEM_MC_BEGIN(0, 2);
12931 IEM_MC_LOCAL(uint32_t, u32Src);
12932 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12933 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12935 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12936 IEM_MC_PUSH_U32(u32Src);
12937 IEM_MC_ADVANCE_RIP_AND_FINISH();
12938 IEM_MC_END();
12939 break;
12940
12941 case IEMMODE_64BIT:
12942 IEM_MC_BEGIN(0, 2);
12943 IEM_MC_LOCAL(uint64_t, u64Src);
12944 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12945 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12947 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12948 IEM_MC_PUSH_U64(u64Src);
12949 IEM_MC_ADVANCE_RIP_AND_FINISH();
12950 IEM_MC_END();
12951 break;
12952
12953 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12954 }
12955}
12956
12957
12958/**
12959 * @opcode 0xff
12960 */
12961FNIEMOP_DEF(iemOp_Grp5)
12962{
12963 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12964 switch (IEM_GET_MODRM_REG_8(bRm))
12965 {
12966 case 0:
12967 return FNIEMOP_CALL_1(iemOp_Grp5_inc_Ev, bRm);
12968 case 1:
12969 return FNIEMOP_CALL_1(iemOp_Grp5_dec_Ev, bRm);
12970 case 2:
12971 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
12972 case 3:
12973 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
12974 case 4:
12975 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
12976 case 5:
12977 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
12978 case 6:
12979 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
12980 case 7:
12981 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
12982 IEMOP_RAISE_INVALID_OPCODE_RET();
12983 }
12984 AssertFailedReturn(VERR_IEM_IPE_3);
12985}
12986
12987
12988
12989const PFNIEMOP g_apfnOneByteMap[256] =
12990{
12991 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
12992 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
12993 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
12994 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
12995 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
12996 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
12997 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
12998 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
12999 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
13000 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
13001 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
13002 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
13003 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
13004 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
13005 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
13006 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
13007 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
13008 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
13009 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
13010 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
13011 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
13012 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
13013 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
13014 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
13015 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
13016 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
13017 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
13018 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
13019 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
13020 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
13021 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
13022 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
13023 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
13024 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
13025 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
13026 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
13027 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
13028 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
13029 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
13030 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
13031 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
13032 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
13033 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
13034 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
13035 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
13036 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
13037 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
13038 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
13039 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
13040 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
13041 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
13042 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
13043 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
13044 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
13045 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
13046 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
13047 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
13048 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
13049 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
13050 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
13051 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
13052 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
13053 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
13054 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
13055};
13056
13057
13058/** @} */
13059
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette