VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsOneByte.cpp.h@ 100337

Last change on this file since 100337 was 100231, checked in by vboxsync, 19 months ago

VMM/IEM: Recompiler fixes. Gets thru the bios now. bugref:10369

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 447.3 KB
Line 
1/* $Id: IEMAllInstructionsOneByte.cpp.h 100231 2023-06-20 23:10:27Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*******************************************************************************
30* Global Variables *
31*******************************************************************************/
32extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
33
34/* Instruction group definitions: */
35
36/** @defgroup og_gen General
37 * @{ */
38 /** @defgroup og_gen_arith Arithmetic
39 * @{ */
40 /** @defgroup og_gen_arith_bin Binary numbers */
41 /** @defgroup og_gen_arith_dec Decimal numbers */
42 /** @} */
43/** @} */
44
45/** @defgroup og_stack Stack
46 * @{ */
47 /** @defgroup og_stack_sreg Segment registers */
48/** @} */
49
50/** @defgroup og_prefix Prefixes */
51/** @defgroup og_escapes Escape bytes */
52
53
54
55/** @name One byte opcodes.
56 * @{
57 */
58
59/**
60 * Body for instructions like ADD, AND, OR, TEST, CMP, ++ with a byte
61 * memory/register as the destination.
62 *
63 * Used with IEMOP_BODY_BINARY_rm_r8_NO_LOCK or IEMOP_BODY_BINARY_rm_r8_LOCKED.
64 */
65#define IEMOP_BODY_BINARY_rm_r8(a_fnNormalU8, a_fRW) \
66 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
67 \
68 /* \
69 * If rm is denoting a register, no more instruction bytes. \
70 */ \
71 if (IEM_IS_MODRM_REG_MODE(bRm)) \
72 { \
73 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
74 \
75 IEM_MC_BEGIN(3, 0); \
76 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
77 IEM_MC_ARG(uint8_t, u8Src, 1); \
78 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
79 \
80 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
81 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
82 IEM_MC_REF_EFLAGS(pEFlags); \
83 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
84 \
85 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
86 IEM_MC_END(); \
87 } \
88 else \
89 { \
90 /* \
91 * We're accessing memory. \
92 * Note! We're putting the eflags on the stack here so we can commit them \
93 * after the memory. \
94 */ \
95 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
96 { \
97 IEM_MC_BEGIN(3, 2); \
98 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
99 IEM_MC_ARG(uint8_t, u8Src, 1); \
100 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
102 \
103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
104 IEMOP_HLP_DONE_DECODING(); \
105 IEM_MC_MEM_MAP(pu8Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
106 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
107 IEM_MC_FETCH_EFLAGS(EFlags); \
108 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
109 \
110 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, a_fRW); \
111 IEM_MC_COMMIT_EFLAGS(EFlags); \
112 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
113 IEM_MC_END(); \
114 } \
115 else \
116 { \
117 (void)0
118
119#define IEMOP_BODY_BINARY_rm_r8_NO_LOCK() \
120 IEMOP_HLP_DONE_DECODING(); \
121 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
122 } \
123 } \
124 (void)0
125
126#define IEMOP_BODY_BINARY_rm_r8_LOCKED(a_fnLockedU8) \
127 IEM_MC_BEGIN(3, 2); \
128 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
129 IEM_MC_ARG(uint8_t, u8Src, 1); \
130 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
131 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
132 \
133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
134 IEMOP_HLP_DONE_DECODING(); \
135 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
136 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
137 IEM_MC_FETCH_EFLAGS(EFlags); \
138 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
139 \
140 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW); \
141 IEM_MC_COMMIT_EFLAGS(EFlags); \
142 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
143 IEM_MC_END(); \
144 } \
145 } \
146 (void)0
147
148/**
149 * Body for byte instructions like ADD, AND, OR, ++ with a register as the
150 * destination.
151 */
152#define IEMOP_BODY_BINARY_r8_rm(a_fnNormalU8) \
153 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
154 \
155 /* \
156 * If rm is denoting a register, no more instruction bytes. \
157 */ \
158 if (IEM_IS_MODRM_REG_MODE(bRm)) \
159 { \
160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
161 IEM_MC_BEGIN(3, 0); \
162 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
163 IEM_MC_ARG(uint8_t, u8Src, 1); \
164 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
165 \
166 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
167 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
168 IEM_MC_REF_EFLAGS(pEFlags); \
169 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
170 \
171 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
172 IEM_MC_END(); \
173 } \
174 else \
175 { \
176 /* \
177 * We're accessing memory. \
178 */ \
179 IEM_MC_BEGIN(3, 1); \
180 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
181 IEM_MC_ARG(uint8_t, u8Src, 1); \
182 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
183 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
184 \
185 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
187 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
188 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
189 IEM_MC_REF_EFLAGS(pEFlags); \
190 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
191 \
192 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
193 IEM_MC_END(); \
194 } \
195 (void)0
196
197
198/**
199 * Body for word/dword/qword instructions like ADD, AND, OR, ++ with
200 * memory/register as the destination.
201 */
202#define IEMOP_BODY_BINARY_rm_rv(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fRW) \
203 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
204 \
205 /* \
206 * If rm is denoting a register, no more instruction bytes. \
207 */ \
208 if (IEM_IS_MODRM_REG_MODE(bRm)) \
209 { \
210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
211 switch (pVCpu->iem.s.enmEffOpSize) \
212 { \
213 case IEMMODE_16BIT: \
214 IEM_MC_BEGIN(3, 0); \
215 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
216 IEM_MC_ARG(uint16_t, u16Src, 1); \
217 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
218 \
219 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
220 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
221 IEM_MC_REF_EFLAGS(pEFlags); \
222 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
223 \
224 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
225 IEM_MC_END(); \
226 break; \
227 \
228 case IEMMODE_32BIT: \
229 IEM_MC_BEGIN(3, 0); \
230 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
231 IEM_MC_ARG(uint32_t, u32Src, 1); \
232 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
233 \
234 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
235 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
236 IEM_MC_REF_EFLAGS(pEFlags); \
237 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
238 \
239 if ((a_fRW) == IEM_ACCESS_DATA_RW) /* not TEST and CMP */ \
240 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
241 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
242 IEM_MC_END(); \
243 break; \
244 \
245 case IEMMODE_64BIT: \
246 IEM_MC_BEGIN(3, 0); \
247 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
248 IEM_MC_ARG(uint64_t, u64Src, 1); \
249 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
250 \
251 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
252 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
253 IEM_MC_REF_EFLAGS(pEFlags); \
254 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
255 \
256 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
257 IEM_MC_END(); \
258 break; \
259 \
260 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
261 } \
262 } \
263 else \
264 { \
265 /* \
266 * We're accessing memory. \
267 * Note! We're putting the eflags on the stack here so we can commit them \
268 * after the memory. \
269 */ \
270 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
271 { \
272 switch (pVCpu->iem.s.enmEffOpSize) \
273 { \
274 case IEMMODE_16BIT: \
275 IEM_MC_BEGIN(3, 2); \
276 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
277 IEM_MC_ARG(uint16_t, u16Src, 1); \
278 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
279 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
280 \
281 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
282 IEMOP_HLP_DONE_DECODING(); \
283 IEM_MC_MEM_MAP(pu16Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
284 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
285 IEM_MC_FETCH_EFLAGS(EFlags); \
286 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
287 \
288 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, a_fRW); \
289 IEM_MC_COMMIT_EFLAGS(EFlags); \
290 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
291 IEM_MC_END(); \
292 break; \
293 \
294 case IEMMODE_32BIT: \
295 IEM_MC_BEGIN(3, 2); \
296 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
297 IEM_MC_ARG(uint32_t, u32Src, 1); \
298 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
299 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
300 \
301 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
302 IEMOP_HLP_DONE_DECODING(); \
303 IEM_MC_MEM_MAP(pu32Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
304 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
305 IEM_MC_FETCH_EFLAGS(EFlags); \
306 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
307 \
308 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, a_fRW); \
309 IEM_MC_COMMIT_EFLAGS(EFlags); \
310 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
311 IEM_MC_END(); \
312 break; \
313 \
314 case IEMMODE_64BIT: \
315 IEM_MC_BEGIN(3, 2); \
316 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
317 IEM_MC_ARG(uint64_t, u64Src, 1); \
318 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
319 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
320 \
321 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
322 IEMOP_HLP_DONE_DECODING(); \
323 IEM_MC_MEM_MAP(pu64Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
324 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
325 IEM_MC_FETCH_EFLAGS(EFlags); \
326 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
327 \
328 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, a_fRW); \
329 IEM_MC_COMMIT_EFLAGS(EFlags); \
330 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
331 IEM_MC_END(); \
332 break; \
333 \
334 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
335 } \
336 } \
337 else \
338 { \
339 (void)0
340
341#define IEMOP_BODY_BINARY_rm_rv_NO_LOCK() \
342 IEMOP_HLP_DONE_DECODING(); \
343 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
344 } \
345 } \
346 (void)0
347
348#define IEMOP_BODY_BINARY_rm_rv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
349 switch (pVCpu->iem.s.enmEffOpSize) \
350 { \
351 case IEMMODE_16BIT: \
352 IEM_MC_BEGIN(3, 2); \
353 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
354 IEM_MC_ARG(uint16_t, u16Src, 1); \
355 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
356 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
357 \
358 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
359 IEMOP_HLP_DONE_DECODING(); \
360 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
361 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
362 IEM_MC_FETCH_EFLAGS(EFlags); \
363 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
364 \
365 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
366 IEM_MC_COMMIT_EFLAGS(EFlags); \
367 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
368 IEM_MC_END(); \
369 break; \
370 \
371 case IEMMODE_32BIT: \
372 IEM_MC_BEGIN(3, 2); \
373 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
374 IEM_MC_ARG(uint32_t, u32Src, 1); \
375 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
376 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
377 \
378 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
379 IEMOP_HLP_DONE_DECODING(); \
380 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
381 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
382 IEM_MC_FETCH_EFLAGS(EFlags); \
383 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
384 \
385 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW /* CMP,TEST */); \
386 IEM_MC_COMMIT_EFLAGS(EFlags); \
387 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
388 IEM_MC_END(); \
389 break; \
390 \
391 case IEMMODE_64BIT: \
392 IEM_MC_BEGIN(3, 2); \
393 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
394 IEM_MC_ARG(uint64_t, u64Src, 1); \
395 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
397 \
398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
399 IEMOP_HLP_DONE_DECODING(); \
400 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
401 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
402 IEM_MC_FETCH_EFLAGS(EFlags); \
403 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
404 \
405 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
406 IEM_MC_COMMIT_EFLAGS(EFlags); \
407 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
408 IEM_MC_END(); \
409 break; \
410 \
411 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
412 } \
413 } \
414 } \
415 (void)0
416
417
418/**
419 * Body for instructions like ADD, AND, OR, ++ with working on AL with
420 * a byte immediate.
421 */
422#define IEMOP_BODY_BINARY_AL_Ib(a_fnNormalU8) \
423 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
425 \
426 IEM_MC_BEGIN(3, 0); \
427 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
428 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1); \
429 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
430 \
431 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX); \
432 IEM_MC_REF_EFLAGS(pEFlags); \
433 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
434 \
435 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
436 IEM_MC_END()
437
438/**
439 * Body for instructions like ADD, AND, OR, ++ with working on
440 * AX/EAX/RAX with a word/dword immediate.
441 */
442#define IEMOP_BODY_BINARY_rAX_Iz(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fModifiesDstReg) \
443 switch (pVCpu->iem.s.enmEffOpSize) \
444 { \
445 case IEMMODE_16BIT: \
446 { \
447 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
449 \
450 IEM_MC_BEGIN(3, 0); \
451 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
452 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1); \
453 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
454 \
455 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX); \
456 IEM_MC_REF_EFLAGS(pEFlags); \
457 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
458 \
459 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
460 IEM_MC_END(); \
461 } \
462 \
463 case IEMMODE_32BIT: \
464 { \
465 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
467 \
468 IEM_MC_BEGIN(3, 0); \
469 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
470 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1); \
471 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
472 \
473 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX); \
474 IEM_MC_REF_EFLAGS(pEFlags); \
475 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
476 \
477 if (a_fModifiesDstReg) \
478 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
479 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
480 IEM_MC_END(); \
481 } \
482 \
483 case IEMMODE_64BIT: \
484 { \
485 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
487 \
488 IEM_MC_BEGIN(3, 0); \
489 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
490 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1); \
491 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
492 \
493 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX); \
494 IEM_MC_REF_EFLAGS(pEFlags); \
495 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
496 \
497 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
498 IEM_MC_END(); \
499 } \
500 \
501 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
502 } \
503 (void)0
504
505
506
507/* Instruction specification format - work in progress: */
508
509/**
510 * @opcode 0x00
511 * @opmnemonic add
512 * @op1 rm:Eb
513 * @op2 reg:Gb
514 * @opmaps one
515 * @openc ModR/M
516 * @opflmodify cf,pf,af,zf,sf,of
517 * @ophints harmless ignores_op_sizes
518 * @opstats add_Eb_Gb
519 * @opgroup og_gen_arith_bin
520 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
521 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
522 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
523 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
524 */
525FNIEMOP_DEF(iemOp_add_Eb_Gb)
526{
527 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
528 IEMOP_BODY_BINARY_rm_r8( iemAImpl_add_u8, IEM_ACCESS_DATA_RW);
529 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_add_u8_locked);
530}
531
532
533/**
534 * @opcode 0x01
535 * @opgroup og_gen_arith_bin
536 * @opflmodify cf,pf,af,zf,sf,of
537 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
538 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
539 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
540 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
541 */
542FNIEMOP_DEF(iemOp_add_Ev_Gv)
543{
544 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
545 IEMOP_BODY_BINARY_rm_rv( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, IEM_ACCESS_DATA_RW);
546 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
547}
548
549
550/**
551 * @opcode 0x02
552 * @opgroup og_gen_arith_bin
553 * @opflmodify cf,pf,af,zf,sf,of
554 * @opcopytests iemOp_add_Eb_Gb
555 */
556FNIEMOP_DEF(iemOp_add_Gb_Eb)
557{
558 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
559 IEMOP_BODY_BINARY_r8_rm(iemAImpl_add_u8);
560}
561
562
563/**
564 * @opcode 0x03
565 * @opgroup og_gen_arith_bin
566 * @opflmodify cf,pf,af,zf,sf,of
567 * @opcopytests iemOp_add_Ev_Gv
568 */
569FNIEMOP_DEF(iemOp_add_Gv_Ev)
570{
571 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
572 IEMOP_BODY_BINARY_rv_rm(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
573}
574
575
576/**
577 * @opcode 0x04
578 * @opgroup og_gen_arith_bin
579 * @opflmodify cf,pf,af,zf,sf,of
580 * @opcopytests iemOp_add_Eb_Gb
581 */
582FNIEMOP_DEF(iemOp_add_Al_Ib)
583{
584 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
585 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_add_u8);
586}
587
588
589/**
590 * @opcode 0x05
591 * @opgroup og_gen_arith_bin
592 * @opflmodify cf,pf,af,zf,sf,of
593 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
594 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
595 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
596 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
597 */
598FNIEMOP_DEF(iemOp_add_eAX_Iz)
599{
600 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
601 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
602}
603
604
605/**
606 * @opcode 0x06
607 * @opgroup og_stack_sreg
608 */
609FNIEMOP_DEF(iemOp_push_ES)
610{
611 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
612 IEMOP_HLP_NO_64BIT();
613 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
614}
615
616
617/**
618 * @opcode 0x07
619 * @opgroup og_stack_sreg
620 */
621FNIEMOP_DEF(iemOp_pop_ES)
622{
623 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
624 IEMOP_HLP_NO_64BIT();
625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
626 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE, iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
627}
628
629
630/**
631 * @opcode 0x08
632 * @opgroup og_gen_arith_bin
633 * @opflmodify cf,pf,af,zf,sf,of
634 * @opflundef af
635 * @opflclear of,cf
636 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
637 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
638 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
639 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
640 */
641FNIEMOP_DEF(iemOp_or_Eb_Gb)
642{
643 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
644 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
645 IEMOP_BODY_BINARY_rm_r8( iemAImpl_or_u8, IEM_ACCESS_DATA_RW);
646 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_or_u8_locked);
647}
648
649
650/*
651 * @opcode 0x09
652 * @opgroup og_gen_arith_bin
653 * @opflmodify cf,pf,af,zf,sf,of
654 * @opflundef af
655 * @opflclear of,cf
656 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
657 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
658 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
659 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
660 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
661 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
662 */
663FNIEMOP_DEF(iemOp_or_Ev_Gv)
664{
665 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
666 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
667 IEMOP_BODY_BINARY_rm_rv( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, IEM_ACCESS_DATA_RW);
668 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
669}
670
671
672/**
673 * @opcode 0x0a
674 * @opgroup og_gen_arith_bin
675 * @opflmodify cf,pf,af,zf,sf,of
676 * @opflundef af
677 * @opflclear of,cf
678 * @opcopytests iemOp_or_Eb_Gb
679 */
680FNIEMOP_DEF(iemOp_or_Gb_Eb)
681{
682 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
683 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
684 IEMOP_BODY_BINARY_r8_rm(iemAImpl_or_u8);
685}
686
687
688/**
689 * @opcode 0x0b
690 * @opgroup og_gen_arith_bin
691 * @opflmodify cf,pf,af,zf,sf,of
692 * @opflundef af
693 * @opflclear of,cf
694 * @opcopytests iemOp_or_Ev_Gv
695 */
696FNIEMOP_DEF(iemOp_or_Gv_Ev)
697{
698 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
699 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
700 IEMOP_BODY_BINARY_rv_rm(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
701}
702
703
704/**
705 * @opcode 0x0c
706 * @opgroup og_gen_arith_bin
707 * @opflmodify cf,pf,af,zf,sf,of
708 * @opflundef af
709 * @opflclear of,cf
710 * @opcopytests iemOp_or_Eb_Gb
711 */
712FNIEMOP_DEF(iemOp_or_Al_Ib)
713{
714 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
715 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
716 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_or_u8);
717}
718
719
720/**
721 * @opcode 0x0d
722 * @opgroup og_gen_arith_bin
723 * @opflmodify cf,pf,af,zf,sf,of
724 * @opflundef af
725 * @opflclear of,cf
726 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
727 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
728 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
729 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
730 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
731 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
732 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
733 */
734FNIEMOP_DEF(iemOp_or_eAX_Iz)
735{
736 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
737 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
738 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
739}
740
741
742/**
743 * @opcode 0x0e
744 * @opgroup og_stack_sreg
745 */
746FNIEMOP_DEF(iemOp_push_CS)
747{
748 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, 0);
749 IEMOP_HLP_NO_64BIT();
750 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
751}
752
753
754/**
755 * @opcode 0x0f
756 * @opmnemonic EscTwo0f
757 * @openc two0f
758 * @opdisenum OP_2B_ESC
759 * @ophints harmless
760 * @opgroup og_escapes
761 */
762FNIEMOP_DEF(iemOp_2byteEscape)
763{
764#ifdef VBOX_STRICT
765 /* Sanity check the table the first time around. */
766 static bool s_fTested = false;
767 if (RT_LIKELY(s_fTested)) { /* likely */ }
768 else
769 {
770 s_fTested = true;
771 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
772 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
773 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
774 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
775 }
776#endif
777
778 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
779 {
780 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
781 IEMOP_HLP_MIN_286();
782 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
783 }
784 /* @opdone */
785
786 /*
787 * On the 8086 this is a POP CS instruction.
788 * For the time being we don't specify this this.
789 */
790 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, IEMOPHINT_SKIP_PYTHON);
791 IEMOP_HLP_NO_64BIT();
792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
793 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_END_TB/*?*/,
794 iemCImpl_pop_Sreg, X86_SREG_CS, pVCpu->iem.s.enmEffOpSize);
795}
796
797/**
798 * @opcode 0x10
799 * @opgroup og_gen_arith_bin
800 * @opfltest cf
801 * @opflmodify cf,pf,af,zf,sf,of
802 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
803 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
804 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
805 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
806 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
807 */
808FNIEMOP_DEF(iemOp_adc_Eb_Gb)
809{
810 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
811 IEMOP_BODY_BINARY_rm_r8( iemAImpl_adc_u8, IEM_ACCESS_DATA_RW);
812 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_adc_u8_locked);
813}
814
815
816/**
817 * @opcode 0x11
818 * @opgroup og_gen_arith_bin
819 * @opfltest cf
820 * @opflmodify cf,pf,af,zf,sf,of
821 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
822 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
823 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
824 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
825 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
826 */
827FNIEMOP_DEF(iemOp_adc_Ev_Gv)
828{
829 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
830 IEMOP_BODY_BINARY_rm_rv( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, IEM_ACCESS_DATA_RW);
831 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
832}
833
834
835/**
836 * @opcode 0x12
837 * @opgroup og_gen_arith_bin
838 * @opfltest cf
839 * @opflmodify cf,pf,af,zf,sf,of
840 * @opcopytests iemOp_adc_Eb_Gb
841 */
842FNIEMOP_DEF(iemOp_adc_Gb_Eb)
843{
844 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
845 IEMOP_BODY_BINARY_r8_rm(iemAImpl_adc_u8);
846}
847
848
849/**
850 * @opcode 0x13
851 * @opgroup og_gen_arith_bin
852 * @opfltest cf
853 * @opflmodify cf,pf,af,zf,sf,of
854 * @opcopytests iemOp_adc_Ev_Gv
855 */
856FNIEMOP_DEF(iemOp_adc_Gv_Ev)
857{
858 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
859 IEMOP_BODY_BINARY_rv_rm(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
860}
861
862
863/**
864 * @opcode 0x14
865 * @opgroup og_gen_arith_bin
866 * @opfltest cf
867 * @opflmodify cf,pf,af,zf,sf,of
868 * @opcopytests iemOp_adc_Eb_Gb
869 */
870FNIEMOP_DEF(iemOp_adc_Al_Ib)
871{
872 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
873 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_adc_u8);
874}
875
876
877/**
878 * @opcode 0x15
879 * @opgroup og_gen_arith_bin
880 * @opfltest cf
881 * @opflmodify cf,pf,af,zf,sf,of
882 * @opcopytests iemOp_adc_Ev_Gv
883 */
884FNIEMOP_DEF(iemOp_adc_eAX_Iz)
885{
886 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
887 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
888}
889
890
891/**
892 * @opcode 0x16
893 */
894FNIEMOP_DEF(iemOp_push_SS)
895{
896 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
897 IEMOP_HLP_NO_64BIT();
898 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
899}
900
901
902/**
903 * @opcode 0x17
904 * @opgroup og_gen_arith_bin
905 * @opfltest cf
906 * @opflmodify cf,pf,af,zf,sf,of
907 */
908FNIEMOP_DEF(iemOp_pop_SS)
909{
910 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
912 IEMOP_HLP_NO_64BIT();
913 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE, iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
914}
915
916
917/**
918 * @opcode 0x18
919 * @opgroup og_gen_arith_bin
920 * @opfltest cf
921 * @opflmodify cf,pf,af,zf,sf,of
922 */
923FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
924{
925 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
926 IEMOP_BODY_BINARY_rm_r8( iemAImpl_sbb_u8, IEM_ACCESS_DATA_RW);
927 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sbb_u8_locked);
928}
929
930
931/**
932 * @opcode 0x19
933 * @opgroup og_gen_arith_bin
934 * @opfltest cf
935 * @opflmodify cf,pf,af,zf,sf,of
936 */
937FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
938{
939 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
940 IEMOP_BODY_BINARY_rm_rv( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, IEM_ACCESS_DATA_RW);
941 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
942}
943
944
945/**
946 * @opcode 0x1a
947 * @opgroup og_gen_arith_bin
948 * @opfltest cf
949 * @opflmodify cf,pf,af,zf,sf,of
950 */
951FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
952{
953 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
954 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sbb_u8);
955}
956
957
958/**
959 * @opcode 0x1b
960 * @opgroup og_gen_arith_bin
961 * @opfltest cf
962 * @opflmodify cf,pf,af,zf,sf,of
963 */
964FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
965{
966 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
967 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
968}
969
970
971/**
972 * @opcode 0x1c
973 * @opgroup og_gen_arith_bin
974 * @opfltest cf
975 * @opflmodify cf,pf,af,zf,sf,of
976 */
977FNIEMOP_DEF(iemOp_sbb_Al_Ib)
978{
979 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
980 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sbb_u8);
981}
982
983
984/**
985 * @opcode 0x1d
986 * @opgroup og_gen_arith_bin
987 * @opfltest cf
988 * @opflmodify cf,pf,af,zf,sf,of
989 */
990FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
991{
992 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
993 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
994}
995
996
997/**
998 * @opcode 0x1e
999 * @opgroup og_stack_sreg
1000 */
1001FNIEMOP_DEF(iemOp_push_DS)
1002{
1003 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1004 IEMOP_HLP_NO_64BIT();
1005 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
1006}
1007
1008
1009/**
1010 * @opcode 0x1f
1011 * @opgroup og_stack_sreg
1012 */
1013FNIEMOP_DEF(iemOp_pop_DS)
1014{
1015 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1017 IEMOP_HLP_NO_64BIT();
1018 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE, iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
1019}
1020
1021
1022/**
1023 * @opcode 0x20
1024 * @opgroup og_gen_arith_bin
1025 * @opflmodify cf,pf,af,zf,sf,of
1026 * @opflundef af
1027 * @opflclear of,cf
1028 */
1029FNIEMOP_DEF(iemOp_and_Eb_Gb)
1030{
1031 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1032 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1033 IEMOP_BODY_BINARY_rm_r8( iemAImpl_and_u8, IEM_ACCESS_DATA_RW);
1034 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_and_u8_locked);
1035}
1036
1037
1038/**
1039 * @opcode 0x21
1040 * @opgroup og_gen_arith_bin
1041 * @opflmodify cf,pf,af,zf,sf,of
1042 * @opflundef af
1043 * @opflclear of,cf
1044 */
1045FNIEMOP_DEF(iemOp_and_Ev_Gv)
1046{
1047 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1048 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1049 IEMOP_BODY_BINARY_rm_rv( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, IEM_ACCESS_DATA_RW);
1050 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
1051}
1052
1053
1054/**
1055 * @opcode 0x22
1056 * @opgroup og_gen_arith_bin
1057 * @opflmodify cf,pf,af,zf,sf,of
1058 * @opflundef af
1059 * @opflclear of,cf
1060 */
1061FNIEMOP_DEF(iemOp_and_Gb_Eb)
1062{
1063 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1064 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1065 IEMOP_BODY_BINARY_r8_rm(iemAImpl_and_u8);
1066}
1067
1068
1069/**
1070 * @opcode 0x23
1071 * @opgroup og_gen_arith_bin
1072 * @opflmodify cf,pf,af,zf,sf,of
1073 * @opflundef af
1074 * @opflclear of,cf
1075 */
1076FNIEMOP_DEF(iemOp_and_Gv_Ev)
1077{
1078 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1079 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1080 IEMOP_BODY_BINARY_rv_rm(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1081}
1082
1083
1084/**
1085 * @opcode 0x24
1086 * @opgroup og_gen_arith_bin
1087 * @opflmodify cf,pf,af,zf,sf,of
1088 * @opflundef af
1089 * @opflclear of,cf
1090 */
1091FNIEMOP_DEF(iemOp_and_Al_Ib)
1092{
1093 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
1094 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1095 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_and_u8);
1096}
1097
1098
1099/**
1100 * @opcode 0x25
1101 * @opgroup og_gen_arith_bin
1102 * @opflmodify cf,pf,af,zf,sf,of
1103 * @opflundef af
1104 * @opflclear of,cf
1105 */
1106FNIEMOP_DEF(iemOp_and_eAX_Iz)
1107{
1108 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1109 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1110 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1111}
1112
1113
1114/**
1115 * @opcode 0x26
1116 * @opmnemonic SEG
1117 * @op1 ES
1118 * @opgroup og_prefix
1119 * @openc prefix
1120 * @opdisenum OP_SEG
1121 * @ophints harmless
1122 */
1123FNIEMOP_DEF(iemOp_seg_ES)
1124{
1125 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
1126 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
1127 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
1128
1129 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1130 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1131}
1132
1133
1134/**
1135 * @opcode 0x27
1136 * @opfltest af,cf
1137 * @opflmodify cf,pf,af,zf,sf,of
1138 * @opflundef of
1139 */
1140FNIEMOP_DEF(iemOp_daa)
1141{
1142 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1143 IEMOP_HLP_NO_64BIT();
1144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1145 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1146 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_daa);
1147}
1148
1149
1150/**
1151 * @opcode 0x28
1152 * @opgroup og_gen_arith_bin
1153 * @opflmodify cf,pf,af,zf,sf,of
1154 */
1155FNIEMOP_DEF(iemOp_sub_Eb_Gb)
1156{
1157 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1158 IEMOP_BODY_BINARY_rm_r8( iemAImpl_sub_u8, IEM_ACCESS_DATA_RW);
1159 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sub_u8_locked);
1160}
1161
1162
1163/**
1164 * @opcode 0x29
1165 * @opgroup og_gen_arith_bin
1166 * @opflmodify cf,pf,af,zf,sf,of
1167 */
1168FNIEMOP_DEF(iemOp_sub_Ev_Gv)
1169{
1170 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1171 IEMOP_BODY_BINARY_rm_rv( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, IEM_ACCESS_DATA_RW);
1172 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
1173}
1174
1175
1176/**
1177 * @opcode 0x2a
1178 * @opgroup og_gen_arith_bin
1179 * @opflmodify cf,pf,af,zf,sf,of
1180 */
1181FNIEMOP_DEF(iemOp_sub_Gb_Eb)
1182{
1183 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1184 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sub_u8);
1185}
1186
1187
1188/**
1189 * @opcode 0x2b
1190 * @opgroup og_gen_arith_bin
1191 * @opflmodify cf,pf,af,zf,sf,of
1192 */
1193FNIEMOP_DEF(iemOp_sub_Gv_Ev)
1194{
1195 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1196 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1197}
1198
1199
1200/**
1201 * @opcode 0x2c
1202 * @opgroup og_gen_arith_bin
1203 * @opflmodify cf,pf,af,zf,sf,of
1204 */
1205FNIEMOP_DEF(iemOp_sub_Al_Ib)
1206{
1207 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1208 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sub_u8);
1209}
1210
1211
1212/**
1213 * @opcode 0x2d
1214 * @opgroup og_gen_arith_bin
1215 * @opflmodify cf,pf,af,zf,sf,of
1216 */
1217FNIEMOP_DEF(iemOp_sub_eAX_Iz)
1218{
1219 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1220 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1221}
1222
1223
1224/**
1225 * @opcode 0x2e
1226 * @opmnemonic SEG
1227 * @op1 CS
1228 * @opgroup og_prefix
1229 * @openc prefix
1230 * @opdisenum OP_SEG
1231 * @ophints harmless
1232 */
1233FNIEMOP_DEF(iemOp_seg_CS)
1234{
1235 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
1236 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
1237 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
1238
1239 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1240 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1241}
1242
1243
1244/**
1245 * @opcode 0x2f
1246 * @opfltest af,cf
1247 * @opflmodify cf,pf,af,zf,sf,of
1248 * @opflundef of
1249 */
1250FNIEMOP_DEF(iemOp_das)
1251{
1252 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1253 IEMOP_HLP_NO_64BIT();
1254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1255 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1256 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_das);
1257}
1258
1259
1260/**
1261 * @opcode 0x30
1262 * @opgroup og_gen_arith_bin
1263 * @opflmodify cf,pf,af,zf,sf,of
1264 * @opflundef af
1265 * @opflclear of,cf
1266 */
1267FNIEMOP_DEF(iemOp_xor_Eb_Gb)
1268{
1269 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1270 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1271 IEMOP_BODY_BINARY_rm_r8( iemAImpl_xor_u8, IEM_ACCESS_DATA_RW);
1272 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_xor_u8_locked);
1273}
1274
1275
1276/**
1277 * @opcode 0x31
1278 * @opgroup og_gen_arith_bin
1279 * @opflmodify cf,pf,af,zf,sf,of
1280 * @opflundef af
1281 * @opflclear of,cf
1282 */
1283FNIEMOP_DEF(iemOp_xor_Ev_Gv)
1284{
1285 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1286 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1287 IEMOP_BODY_BINARY_rm_rv( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, IEM_ACCESS_DATA_RW);
1288 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
1289}
1290
1291
1292/**
1293 * @opcode 0x32
1294 * @opgroup og_gen_arith_bin
1295 * @opflmodify cf,pf,af,zf,sf,of
1296 * @opflundef af
1297 * @opflclear of,cf
1298 */
1299FNIEMOP_DEF(iemOp_xor_Gb_Eb)
1300{
1301 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1302 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1303 IEMOP_BODY_BINARY_r8_rm(iemAImpl_xor_u8);
1304}
1305
1306
1307/**
1308 * @opcode 0x33
1309 * @opgroup og_gen_arith_bin
1310 * @opflmodify cf,pf,af,zf,sf,of
1311 * @opflundef af
1312 * @opflclear of,cf
1313 */
1314FNIEMOP_DEF(iemOp_xor_Gv_Ev)
1315{
1316 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1317 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1318 IEMOP_BODY_BINARY_rv_rm(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1319}
1320
1321
1322/**
1323 * @opcode 0x34
1324 * @opgroup og_gen_arith_bin
1325 * @opflmodify cf,pf,af,zf,sf,of
1326 * @opflundef af
1327 * @opflclear of,cf
1328 */
1329FNIEMOP_DEF(iemOp_xor_Al_Ib)
1330{
1331 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1332 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1333 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_xor_u8);
1334}
1335
1336
1337/**
1338 * @opcode 0x35
1339 * @opgroup og_gen_arith_bin
1340 * @opflmodify cf,pf,af,zf,sf,of
1341 * @opflundef af
1342 * @opflclear of,cf
1343 */
1344FNIEMOP_DEF(iemOp_xor_eAX_Iz)
1345{
1346 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1347 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1348 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1349}
1350
1351
1352/**
1353 * @opcode 0x36
1354 * @opmnemonic SEG
1355 * @op1 SS
1356 * @opgroup og_prefix
1357 * @openc prefix
1358 * @opdisenum OP_SEG
1359 * @ophints harmless
1360 */
1361FNIEMOP_DEF(iemOp_seg_SS)
1362{
1363 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
1364 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
1365 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
1366
1367 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1368 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1369}
1370
1371
1372/**
1373 * @opcode 0x37
1374 * @opfltest af,cf
1375 * @opflmodify cf,pf,af,zf,sf,of
1376 * @opflundef pf,zf,sf,of
1377 * @opgroup og_gen_arith_dec
1378 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
1379 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
1380 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1381 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1382 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1383 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
1384 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
1385 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
1386 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
1387 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
1388 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
1389 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
1390 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
1391 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
1392 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
1393 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1394 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1395 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1396 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1397 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
1398 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
1399 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
1400 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
1401 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
1402 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
1403 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1404 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
1405 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
1406 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
1407 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
1408 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
1409 */
1410FNIEMOP_DEF(iemOp_aaa)
1411{
1412 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1413 IEMOP_HLP_NO_64BIT();
1414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1415 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1416
1417 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aaa);
1418}
1419
1420
1421/**
1422 * @opcode 0x38
1423 */
1424FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
1425{
1426 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
1427 IEMOP_BODY_BINARY_rm_r8(iemAImpl_cmp_u8, IEM_ACCESS_DATA_R);
1428 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
1429}
1430
1431
1432/**
1433 * @opcode 0x39
1434 */
1435FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
1436{
1437 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
1438 IEMOP_BODY_BINARY_rm_rv(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, IEM_ACCESS_DATA_R);
1439 IEMOP_BODY_BINARY_rm_rv_NO_LOCK();
1440}
1441
1442
1443/**
1444 * @opcode 0x3a
1445 */
1446FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
1447{
1448 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
1449 IEMOP_BODY_BINARY_r8_rm(iemAImpl_cmp_u8);
1450}
1451
1452
1453/**
1454 * @opcode 0x3b
1455 */
1456FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
1457{
1458 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
1459 IEMOP_BODY_BINARY_rv_rm(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1460}
1461
1462
1463/**
1464 * @opcode 0x3c
1465 */
1466FNIEMOP_DEF(iemOp_cmp_Al_Ib)
1467{
1468 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
1469 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_cmp_u8);
1470}
1471
1472
1473/**
1474 * @opcode 0x3d
1475 */
1476FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1477{
1478 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1479 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1480}
1481
1482
1483/**
1484 * @opcode 0x3e
1485 */
1486FNIEMOP_DEF(iemOp_seg_DS)
1487{
1488 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1489 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1490 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1491
1492 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1493 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1494}
1495
1496
1497/**
1498 * @opcode 0x3f
1499 * @opfltest af,cf
1500 * @opflmodify cf,pf,af,zf,sf,of
1501 * @opflundef pf,zf,sf,of
1502 * @opgroup og_gen_arith_dec
1503 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1504 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1505 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1506 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1507 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1508 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1509 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1510 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1511 * @optest amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1512 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1513 * @optest amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1514 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1515 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1516 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1517 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1518 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1519 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1520 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1521 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1522 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1523 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
1524 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1525 * @optest amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
1526 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1527 * @optest amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
1528 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1529 * @optest amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
1530 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1531 * @optest amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
1532 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1533 * @optest amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
1534 * @optest intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1535 * @optest amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1536 * @optest intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1537 * @optest amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1538 */
1539FNIEMOP_DEF(iemOp_aas)
1540{
1541 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1542 IEMOP_HLP_NO_64BIT();
1543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1544 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1545
1546 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aas);
1547}
1548
1549
1550/**
1551 * Common 'inc/dec register' helper.
1552 *
1553 * Not for 64-bit code, only for what became the rex prefixes.
1554 */
1555#define IEMOP_BODY_UNARY_GReg(a_fnNormalU16, a_fnNormalU32, a_iReg) \
1556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1557 switch (pVCpu->iem.s.enmEffOpSize) \
1558 { \
1559 case IEMMODE_16BIT: \
1560 IEM_MC_BEGIN(2, 0); \
1561 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
1562 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1563 IEM_MC_REF_GREG_U16(pu16Dst, a_iReg); \
1564 IEM_MC_REF_EFLAGS(pEFlags); \
1565 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
1566 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1567 IEM_MC_END(); \
1568 break; \
1569 \
1570 case IEMMODE_32BIT: \
1571 IEM_MC_BEGIN(2, 0); \
1572 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
1573 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1574 IEM_MC_REF_GREG_U32(pu32Dst, a_iReg); \
1575 IEM_MC_REF_EFLAGS(pEFlags); \
1576 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
1577 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
1578 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1579 IEM_MC_END(); \
1580 break; \
1581 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1582 } \
1583 (void)0
1584
1585/**
1586 * @opcode 0x40
1587 */
1588FNIEMOP_DEF(iemOp_inc_eAX)
1589{
1590 /*
1591 * This is a REX prefix in 64-bit mode.
1592 */
1593 if (IEM_IS_64BIT_CODE(pVCpu))
1594 {
1595 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1596 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1597
1598 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1599 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1600 }
1601
1602 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1603 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xAX);
1604}
1605
1606
1607/**
1608 * @opcode 0x41
1609 */
1610FNIEMOP_DEF(iemOp_inc_eCX)
1611{
1612 /*
1613 * This is a REX prefix in 64-bit mode.
1614 */
1615 if (IEM_IS_64BIT_CODE(pVCpu))
1616 {
1617 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1618 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1619 pVCpu->iem.s.uRexB = 1 << 3;
1620
1621 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1622 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1623 }
1624
1625 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1626 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xCX);
1627}
1628
1629
1630/**
1631 * @opcode 0x42
1632 */
1633FNIEMOP_DEF(iemOp_inc_eDX)
1634{
1635 /*
1636 * This is a REX prefix in 64-bit mode.
1637 */
1638 if (IEM_IS_64BIT_CODE(pVCpu))
1639 {
1640 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1641 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1642 pVCpu->iem.s.uRexIndex = 1 << 3;
1643
1644 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1645 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1646 }
1647
1648 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1649 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDX);
1650}
1651
1652
1653
1654/**
1655 * @opcode 0x43
1656 */
1657FNIEMOP_DEF(iemOp_inc_eBX)
1658{
1659 /*
1660 * This is a REX prefix in 64-bit mode.
1661 */
1662 if (IEM_IS_64BIT_CODE(pVCpu))
1663 {
1664 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1665 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1666 pVCpu->iem.s.uRexB = 1 << 3;
1667 pVCpu->iem.s.uRexIndex = 1 << 3;
1668
1669 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1670 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1671 }
1672
1673 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1674 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBX);
1675}
1676
1677
1678/**
1679 * @opcode 0x44
1680 */
1681FNIEMOP_DEF(iemOp_inc_eSP)
1682{
1683 /*
1684 * This is a REX prefix in 64-bit mode.
1685 */
1686 if (IEM_IS_64BIT_CODE(pVCpu))
1687 {
1688 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1689 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1690 pVCpu->iem.s.uRexReg = 1 << 3;
1691
1692 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1693 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1694 }
1695
1696 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1697 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSP);
1698}
1699
1700
1701/**
1702 * @opcode 0x45
1703 */
1704FNIEMOP_DEF(iemOp_inc_eBP)
1705{
1706 /*
1707 * This is a REX prefix in 64-bit mode.
1708 */
1709 if (IEM_IS_64BIT_CODE(pVCpu))
1710 {
1711 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1712 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1713 pVCpu->iem.s.uRexReg = 1 << 3;
1714 pVCpu->iem.s.uRexB = 1 << 3;
1715
1716 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1717 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1718 }
1719
1720 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1721 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBP);
1722}
1723
1724
1725/**
1726 * @opcode 0x46
1727 */
1728FNIEMOP_DEF(iemOp_inc_eSI)
1729{
1730 /*
1731 * This is a REX prefix in 64-bit mode.
1732 */
1733 if (IEM_IS_64BIT_CODE(pVCpu))
1734 {
1735 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1736 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1737 pVCpu->iem.s.uRexReg = 1 << 3;
1738 pVCpu->iem.s.uRexIndex = 1 << 3;
1739
1740 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1741 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1742 }
1743
1744 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1745 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSI);
1746}
1747
1748
1749/**
1750 * @opcode 0x47
1751 */
1752FNIEMOP_DEF(iemOp_inc_eDI)
1753{
1754 /*
1755 * This is a REX prefix in 64-bit mode.
1756 */
1757 if (IEM_IS_64BIT_CODE(pVCpu))
1758 {
1759 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1760 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1761 pVCpu->iem.s.uRexReg = 1 << 3;
1762 pVCpu->iem.s.uRexB = 1 << 3;
1763 pVCpu->iem.s.uRexIndex = 1 << 3;
1764
1765 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1766 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1767 }
1768
1769 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1770 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDI);
1771}
1772
1773
1774/**
1775 * @opcode 0x48
1776 */
1777FNIEMOP_DEF(iemOp_dec_eAX)
1778{
1779 /*
1780 * This is a REX prefix in 64-bit mode.
1781 */
1782 if (IEM_IS_64BIT_CODE(pVCpu))
1783 {
1784 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
1785 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
1786 iemRecalEffOpSize(pVCpu);
1787
1788 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1789 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1790 }
1791
1792 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
1793 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xAX);
1794}
1795
1796
1797/**
1798 * @opcode 0x49
1799 */
1800FNIEMOP_DEF(iemOp_dec_eCX)
1801{
1802 /*
1803 * This is a REX prefix in 64-bit mode.
1804 */
1805 if (IEM_IS_64BIT_CODE(pVCpu))
1806 {
1807 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
1808 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1809 pVCpu->iem.s.uRexB = 1 << 3;
1810 iemRecalEffOpSize(pVCpu);
1811
1812 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1813 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1814 }
1815
1816 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
1817 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xCX);
1818}
1819
1820
1821/**
1822 * @opcode 0x4a
1823 */
1824FNIEMOP_DEF(iemOp_dec_eDX)
1825{
1826 /*
1827 * This is a REX prefix in 64-bit mode.
1828 */
1829 if (IEM_IS_64BIT_CODE(pVCpu))
1830 {
1831 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
1832 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1833 pVCpu->iem.s.uRexIndex = 1 << 3;
1834 iemRecalEffOpSize(pVCpu);
1835
1836 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1837 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1838 }
1839
1840 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
1841 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDX);
1842}
1843
1844
1845/**
1846 * @opcode 0x4b
1847 */
1848FNIEMOP_DEF(iemOp_dec_eBX)
1849{
1850 /*
1851 * This is a REX prefix in 64-bit mode.
1852 */
1853 if (IEM_IS_64BIT_CODE(pVCpu))
1854 {
1855 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
1856 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1857 pVCpu->iem.s.uRexB = 1 << 3;
1858 pVCpu->iem.s.uRexIndex = 1 << 3;
1859 iemRecalEffOpSize(pVCpu);
1860
1861 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1862 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1863 }
1864
1865 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
1866 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBX);
1867}
1868
1869
1870/**
1871 * @opcode 0x4c
1872 */
1873FNIEMOP_DEF(iemOp_dec_eSP)
1874{
1875 /*
1876 * This is a REX prefix in 64-bit mode.
1877 */
1878 if (IEM_IS_64BIT_CODE(pVCpu))
1879 {
1880 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
1881 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
1882 pVCpu->iem.s.uRexReg = 1 << 3;
1883 iemRecalEffOpSize(pVCpu);
1884
1885 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1886 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1887 }
1888
1889 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
1890 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSP);
1891}
1892
1893
1894/**
1895 * @opcode 0x4d
1896 */
1897FNIEMOP_DEF(iemOp_dec_eBP)
1898{
1899 /*
1900 * This is a REX prefix in 64-bit mode.
1901 */
1902 if (IEM_IS_64BIT_CODE(pVCpu))
1903 {
1904 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
1905 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1906 pVCpu->iem.s.uRexReg = 1 << 3;
1907 pVCpu->iem.s.uRexB = 1 << 3;
1908 iemRecalEffOpSize(pVCpu);
1909
1910 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1911 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1912 }
1913
1914 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
1915 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBP);
1916}
1917
1918
1919/**
1920 * @opcode 0x4e
1921 */
1922FNIEMOP_DEF(iemOp_dec_eSI)
1923{
1924 /*
1925 * This is a REX prefix in 64-bit mode.
1926 */
1927 if (IEM_IS_64BIT_CODE(pVCpu))
1928 {
1929 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
1930 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1931 pVCpu->iem.s.uRexReg = 1 << 3;
1932 pVCpu->iem.s.uRexIndex = 1 << 3;
1933 iemRecalEffOpSize(pVCpu);
1934
1935 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1936 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1937 }
1938
1939 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
1940 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSI);
1941}
1942
1943
1944/**
1945 * @opcode 0x4f
1946 */
1947FNIEMOP_DEF(iemOp_dec_eDI)
1948{
1949 /*
1950 * This is a REX prefix in 64-bit mode.
1951 */
1952 if (IEM_IS_64BIT_CODE(pVCpu))
1953 {
1954 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
1955 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1956 pVCpu->iem.s.uRexReg = 1 << 3;
1957 pVCpu->iem.s.uRexB = 1 << 3;
1958 pVCpu->iem.s.uRexIndex = 1 << 3;
1959 iemRecalEffOpSize(pVCpu);
1960
1961 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1962 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1963 }
1964
1965 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
1966 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDI);
1967}
1968
1969
1970/**
1971 * Common 'push register' helper.
1972 */
1973FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
1974{
1975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1976 if (IEM_IS_64BIT_CODE(pVCpu))
1977 {
1978 iReg |= pVCpu->iem.s.uRexB;
1979 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1980 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1981 }
1982
1983 switch (pVCpu->iem.s.enmEffOpSize)
1984 {
1985 case IEMMODE_16BIT:
1986 IEM_MC_BEGIN(0, 1);
1987 IEM_MC_LOCAL(uint16_t, u16Value);
1988 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
1989 IEM_MC_PUSH_U16(u16Value);
1990 IEM_MC_ADVANCE_RIP_AND_FINISH();
1991 IEM_MC_END();
1992 break;
1993
1994 case IEMMODE_32BIT:
1995 IEM_MC_BEGIN(0, 1);
1996 IEM_MC_LOCAL(uint32_t, u32Value);
1997 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
1998 IEM_MC_PUSH_U32(u32Value);
1999 IEM_MC_ADVANCE_RIP_AND_FINISH();
2000 IEM_MC_END();
2001 break;
2002
2003 case IEMMODE_64BIT:
2004 IEM_MC_BEGIN(0, 1);
2005 IEM_MC_LOCAL(uint64_t, u64Value);
2006 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
2007 IEM_MC_PUSH_U64(u64Value);
2008 IEM_MC_ADVANCE_RIP_AND_FINISH();
2009 IEM_MC_END();
2010 break;
2011
2012 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2013 }
2014}
2015
2016
2017/**
2018 * @opcode 0x50
2019 */
2020FNIEMOP_DEF(iemOp_push_eAX)
2021{
2022 IEMOP_MNEMONIC(push_rAX, "push rAX");
2023 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
2024}
2025
2026
2027/**
2028 * @opcode 0x51
2029 */
2030FNIEMOP_DEF(iemOp_push_eCX)
2031{
2032 IEMOP_MNEMONIC(push_rCX, "push rCX");
2033 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
2034}
2035
2036
2037/**
2038 * @opcode 0x52
2039 */
2040FNIEMOP_DEF(iemOp_push_eDX)
2041{
2042 IEMOP_MNEMONIC(push_rDX, "push rDX");
2043 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
2044}
2045
2046
2047/**
2048 * @opcode 0x53
2049 */
2050FNIEMOP_DEF(iemOp_push_eBX)
2051{
2052 IEMOP_MNEMONIC(push_rBX, "push rBX");
2053 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
2054}
2055
2056
2057/**
2058 * @opcode 0x54
2059 */
2060FNIEMOP_DEF(iemOp_push_eSP)
2061{
2062 IEMOP_MNEMONIC(push_rSP, "push rSP");
2063 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
2064 {
2065 IEM_MC_BEGIN(0, 1);
2066 IEM_MC_LOCAL(uint16_t, u16Value);
2067 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
2068 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
2069 IEM_MC_PUSH_U16(u16Value);
2070 IEM_MC_ADVANCE_RIP_AND_FINISH();
2071 IEM_MC_END();
2072 }
2073 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
2074}
2075
2076
2077/**
2078 * @opcode 0x55
2079 */
2080FNIEMOP_DEF(iemOp_push_eBP)
2081{
2082 IEMOP_MNEMONIC(push_rBP, "push rBP");
2083 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
2084}
2085
2086
2087/**
2088 * @opcode 0x56
2089 */
2090FNIEMOP_DEF(iemOp_push_eSI)
2091{
2092 IEMOP_MNEMONIC(push_rSI, "push rSI");
2093 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
2094}
2095
2096
2097/**
2098 * @opcode 0x57
2099 */
2100FNIEMOP_DEF(iemOp_push_eDI)
2101{
2102 IEMOP_MNEMONIC(push_rDI, "push rDI");
2103 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
2104}
2105
2106
2107/**
2108 * Common 'pop register' helper.
2109 */
2110FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
2111{
2112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2113 if (IEM_IS_64BIT_CODE(pVCpu))
2114 {
2115 iReg |= pVCpu->iem.s.uRexB;
2116 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2117 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2118 }
2119
2120 switch (pVCpu->iem.s.enmEffOpSize)
2121 {
2122 case IEMMODE_16BIT:
2123 IEM_MC_BEGIN(0, 1);
2124 IEM_MC_LOCAL(uint16_t *, pu16Dst);
2125 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
2126 IEM_MC_POP_U16(pu16Dst);
2127 IEM_MC_ADVANCE_RIP_AND_FINISH();
2128 IEM_MC_END();
2129 break;
2130
2131 case IEMMODE_32BIT:
2132 IEM_MC_BEGIN(0, 1);
2133 IEM_MC_LOCAL(uint32_t *, pu32Dst);
2134 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
2135 IEM_MC_POP_U32(pu32Dst);
2136 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
2137 IEM_MC_ADVANCE_RIP_AND_FINISH();
2138 IEM_MC_END();
2139 break;
2140
2141 case IEMMODE_64BIT:
2142 IEM_MC_BEGIN(0, 1);
2143 IEM_MC_LOCAL(uint64_t *, pu64Dst);
2144 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
2145 IEM_MC_POP_U64(pu64Dst);
2146 IEM_MC_ADVANCE_RIP_AND_FINISH();
2147 IEM_MC_END();
2148 break;
2149
2150 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2151 }
2152}
2153
2154
2155/**
2156 * @opcode 0x58
2157 */
2158FNIEMOP_DEF(iemOp_pop_eAX)
2159{
2160 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
2161 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
2162}
2163
2164
2165/**
2166 * @opcode 0x59
2167 */
2168FNIEMOP_DEF(iemOp_pop_eCX)
2169{
2170 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
2171 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
2172}
2173
2174
2175/**
2176 * @opcode 0x5a
2177 */
2178FNIEMOP_DEF(iemOp_pop_eDX)
2179{
2180 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
2181 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
2182}
2183
2184
2185/**
2186 * @opcode 0x5b
2187 */
2188FNIEMOP_DEF(iemOp_pop_eBX)
2189{
2190 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
2191 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
2192}
2193
2194
2195/**
2196 * @opcode 0x5c
2197 */
2198FNIEMOP_DEF(iemOp_pop_eSP)
2199{
2200 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
2201 if (IEM_IS_64BIT_CODE(pVCpu))
2202 {
2203 if (pVCpu->iem.s.uRexB)
2204 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
2205 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2206 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2207 }
2208
2209 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2210 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2211 /** @todo add testcase for this instruction. */
2212 switch (pVCpu->iem.s.enmEffOpSize)
2213 {
2214 case IEMMODE_16BIT:
2215 IEM_MC_BEGIN(0, 1);
2216 IEM_MC_LOCAL(uint16_t, u16Dst);
2217 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
2218 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
2219 IEM_MC_ADVANCE_RIP_AND_FINISH();
2220 IEM_MC_END();
2221 break;
2222
2223 case IEMMODE_32BIT:
2224 IEM_MC_BEGIN(0, 1);
2225 IEM_MC_LOCAL(uint32_t, u32Dst);
2226 IEM_MC_POP_U32(&u32Dst);
2227 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
2228 IEM_MC_ADVANCE_RIP_AND_FINISH();
2229 IEM_MC_END();
2230 break;
2231
2232 case IEMMODE_64BIT:
2233 IEM_MC_BEGIN(0, 1);
2234 IEM_MC_LOCAL(uint64_t, u64Dst);
2235 IEM_MC_POP_U64(&u64Dst);
2236 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
2237 IEM_MC_ADVANCE_RIP_AND_FINISH();
2238 IEM_MC_END();
2239 break;
2240
2241 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2242 }
2243}
2244
2245
2246/**
2247 * @opcode 0x5d
2248 */
2249FNIEMOP_DEF(iemOp_pop_eBP)
2250{
2251 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
2252 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
2253}
2254
2255
2256/**
2257 * @opcode 0x5e
2258 */
2259FNIEMOP_DEF(iemOp_pop_eSI)
2260{
2261 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
2262 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
2263}
2264
2265
2266/**
2267 * @opcode 0x5f
2268 */
2269FNIEMOP_DEF(iemOp_pop_eDI)
2270{
2271 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
2272 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
2273}
2274
2275
2276/**
2277 * @opcode 0x60
2278 */
2279FNIEMOP_DEF(iemOp_pusha)
2280{
2281 IEMOP_MNEMONIC(pusha, "pusha");
2282 IEMOP_HLP_MIN_186();
2283 IEMOP_HLP_NO_64BIT();
2284 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2285 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_pusha_16);
2286 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2287 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_pusha_32);
2288}
2289
2290
2291/**
2292 * @opcode 0x61
2293 */
2294FNIEMOP_DEF(iemOp_popa__mvex)
2295{
2296 if (!IEM_IS_64BIT_CODE(pVCpu))
2297 {
2298 IEMOP_MNEMONIC(popa, "popa");
2299 IEMOP_HLP_MIN_186();
2300 IEMOP_HLP_NO_64BIT();
2301 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2302 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS, iemCImpl_popa_16);
2303 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2304 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS, iemCImpl_popa_32);
2305 }
2306 IEMOP_MNEMONIC(mvex, "mvex");
2307 Log(("mvex prefix is not supported!\n"));
2308 IEMOP_RAISE_INVALID_OPCODE_RET();
2309}
2310
2311
2312/**
2313 * @opcode 0x62
2314 * @opmnemonic bound
2315 * @op1 Gv_RO
2316 * @op2 Ma
2317 * @opmincpu 80186
2318 * @ophints harmless x86_invalid_64
2319 * @optest op1=0 op2=0 ->
2320 * @optest op1=1 op2=0 -> value.xcpt=5
2321 * @optest o16 / op1=0xffff op2=0x0000fffe ->
2322 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
2323 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
2324 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
2325 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
2326 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
2327 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
2328 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
2329 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
2330 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
2331 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
2332 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
2333 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
2334 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
2335 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
2336 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
2337 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
2338 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
2339 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
2340 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
2341 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
2342 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
2343 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
2344 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
2345 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
2346 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
2347 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
2348 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
2349 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
2350 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
2351 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
2352 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
2353 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
2354 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
2355 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
2356 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
2357 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
2358 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
2359 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
2360 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
2361 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
2362 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
2363 */
2364FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
2365{
2366 /* The BOUND instruction is invalid 64-bit mode. In legacy and
2367 compatability mode it is invalid with MOD=3.
2368
2369 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
2370 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
2371 given as R and X without an exact description, so we assume it builds on
2372 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
2373 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
2374 uint8_t bRm;
2375 if (!IEM_IS_64BIT_CODE(pVCpu))
2376 {
2377 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2378 IEMOP_HLP_MIN_186();
2379 IEM_OPCODE_GET_NEXT_U8(&bRm);
2380 if (IEM_IS_MODRM_MEM_MODE(bRm))
2381 {
2382 /** @todo testcase: check that there are two memory accesses involved. Check
2383 * whether they're both read before the \#BR triggers. */
2384 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2385 {
2386 IEM_MC_BEGIN(3, 1);
2387 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2388 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
2389 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
2390 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2391
2392 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2394
2395 IEM_MC_FETCH_GREG_U16(u16Index, IEM_GET_MODRM_REG_8(bRm));
2396 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2397 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
2398
2399 IEM_MC_CALL_CIMPL_3(0, iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
2400 IEM_MC_END();
2401 }
2402 else /* 32-bit operands */
2403 {
2404 IEM_MC_BEGIN(3, 1);
2405 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2406 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
2407 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
2408 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2409
2410 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2412
2413 IEM_MC_FETCH_GREG_U32(u32Index, IEM_GET_MODRM_REG_8(bRm));
2414 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2415 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
2416
2417 IEM_MC_CALL_CIMPL_3(0, iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
2418 IEM_MC_END();
2419 }
2420 }
2421
2422 /*
2423 * @opdone
2424 */
2425 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2426 {
2427 /* Note that there is no need for the CPU to fetch further bytes
2428 here because MODRM.MOD == 3. */
2429 Log(("evex not supported by the guest CPU!\n"));
2430 IEMOP_RAISE_INVALID_OPCODE_RET();
2431 }
2432 }
2433 else
2434 {
2435 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
2436 * does modr/m read, whereas AMD probably doesn't... */
2437 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2438 {
2439 Log(("evex not supported by the guest CPU!\n"));
2440 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
2441 }
2442 IEM_OPCODE_GET_NEXT_U8(&bRm);
2443 }
2444
2445 IEMOP_MNEMONIC(evex, "evex");
2446 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
2447 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
2448 Log(("evex prefix is not implemented!\n"));
2449 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2450}
2451
2452
2453/** Opcode 0x63 - non-64-bit modes. */
2454FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
2455{
2456 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
2457 IEMOP_HLP_MIN_286();
2458 IEMOP_HLP_NO_REAL_OR_V86_MODE();
2459 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2460
2461 if (IEM_IS_MODRM_REG_MODE(bRm))
2462 {
2463 /* Register */
2464 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2465 IEM_MC_BEGIN(3, 0);
2466 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2467 IEM_MC_ARG(uint16_t, u16Src, 1);
2468 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2469
2470 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2471 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM_8(bRm));
2472 IEM_MC_REF_EFLAGS(pEFlags);
2473 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2474
2475 IEM_MC_ADVANCE_RIP_AND_FINISH();
2476 IEM_MC_END();
2477 }
2478 else
2479 {
2480 /* Memory */
2481 IEM_MC_BEGIN(3, 2);
2482 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2483 IEM_MC_ARG(uint16_t, u16Src, 1);
2484 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2485 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2486
2487 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2488 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2489 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2490 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2491 IEM_MC_FETCH_EFLAGS(EFlags);
2492 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2493
2494 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
2495 IEM_MC_COMMIT_EFLAGS(EFlags);
2496 IEM_MC_ADVANCE_RIP_AND_FINISH();
2497 IEM_MC_END();
2498 }
2499}
2500
2501
2502/**
2503 * @opcode 0x63
2504 *
2505 * @note This is a weird one. It works like a regular move instruction if
2506 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2507 * @todo This definitely needs a testcase to verify the odd cases. */
2508FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2509{
2510 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2511
2512 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2513 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2514
2515 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2516 {
2517 if (IEM_IS_MODRM_REG_MODE(bRm))
2518 {
2519 /*
2520 * Register to register.
2521 */
2522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2523 IEM_MC_BEGIN(0, 1);
2524 IEM_MC_LOCAL(uint64_t, u64Value);
2525 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
2526 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2527 IEM_MC_ADVANCE_RIP_AND_FINISH();
2528 IEM_MC_END();
2529 }
2530 else
2531 {
2532 /*
2533 * We're loading a register from memory.
2534 */
2535 IEM_MC_BEGIN(0, 2);
2536 IEM_MC_LOCAL(uint64_t, u64Value);
2537 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2538 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2540 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2541 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2542 IEM_MC_ADVANCE_RIP_AND_FINISH();
2543 IEM_MC_END();
2544 }
2545 }
2546 else
2547 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
2548}
2549
2550
2551/**
2552 * @opcode 0x64
2553 * @opmnemonic segfs
2554 * @opmincpu 80386
2555 * @opgroup og_prefixes
2556 */
2557FNIEMOP_DEF(iemOp_seg_FS)
2558{
2559 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2560 IEMOP_HLP_MIN_386();
2561
2562 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2563 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2564
2565 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2566 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2567}
2568
2569
2570/**
2571 * @opcode 0x65
2572 * @opmnemonic seggs
2573 * @opmincpu 80386
2574 * @opgroup og_prefixes
2575 */
2576FNIEMOP_DEF(iemOp_seg_GS)
2577{
2578 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2579 IEMOP_HLP_MIN_386();
2580
2581 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2582 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2583
2584 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2585 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2586}
2587
2588
2589/**
2590 * @opcode 0x66
2591 * @opmnemonic opsize
2592 * @openc prefix
2593 * @opmincpu 80386
2594 * @ophints harmless
2595 * @opgroup og_prefixes
2596 */
2597FNIEMOP_DEF(iemOp_op_size)
2598{
2599 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2600 IEMOP_HLP_MIN_386();
2601
2602 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2603 iemRecalEffOpSize(pVCpu);
2604
2605 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2606 when REPZ or REPNZ are present. */
2607 if (pVCpu->iem.s.idxPrefix == 0)
2608 pVCpu->iem.s.idxPrefix = 1;
2609
2610 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2611 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2612}
2613
2614
2615/**
2616 * @opcode 0x67
2617 * @opmnemonic addrsize
2618 * @openc prefix
2619 * @opmincpu 80386
2620 * @ophints harmless
2621 * @opgroup og_prefixes
2622 */
2623FNIEMOP_DEF(iemOp_addr_size)
2624{
2625 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2626 IEMOP_HLP_MIN_386();
2627
2628 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2629 switch (pVCpu->iem.s.enmDefAddrMode)
2630 {
2631 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2632 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2633 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2634 default: AssertFailed();
2635 }
2636
2637 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2638 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2639}
2640
2641
2642/**
2643 * @opcode 0x68
2644 */
2645FNIEMOP_DEF(iemOp_push_Iz)
2646{
2647 IEMOP_MNEMONIC(push_Iz, "push Iz");
2648 IEMOP_HLP_MIN_186();
2649 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2650 switch (pVCpu->iem.s.enmEffOpSize)
2651 {
2652 case IEMMODE_16BIT:
2653 {
2654 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2656 IEM_MC_BEGIN(0,0);
2657 IEM_MC_PUSH_U16(u16Imm);
2658 IEM_MC_ADVANCE_RIP_AND_FINISH();
2659 IEM_MC_END();
2660 break;
2661 }
2662
2663 case IEMMODE_32BIT:
2664 {
2665 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2667 IEM_MC_BEGIN(0,0);
2668 IEM_MC_PUSH_U32(u32Imm);
2669 IEM_MC_ADVANCE_RIP_AND_FINISH();
2670 IEM_MC_END();
2671 break;
2672 }
2673
2674 case IEMMODE_64BIT:
2675 {
2676 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2678 IEM_MC_BEGIN(0,0);
2679 IEM_MC_PUSH_U64(u64Imm);
2680 IEM_MC_ADVANCE_RIP_AND_FINISH();
2681 IEM_MC_END();
2682 break;
2683 }
2684
2685 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2686 }
2687}
2688
2689
2690/**
2691 * @opcode 0x69
2692 */
2693FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
2694{
2695 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
2696 IEMOP_HLP_MIN_186();
2697 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2698 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2699
2700 switch (pVCpu->iem.s.enmEffOpSize)
2701 {
2702 case IEMMODE_16BIT:
2703 {
2704 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
2705 if (IEM_IS_MODRM_REG_MODE(bRm))
2706 {
2707 /* register operand */
2708 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2710
2711 IEM_MC_BEGIN(3, 1);
2712 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2713 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
2714 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2715 IEM_MC_LOCAL(uint16_t, u16Tmp);
2716
2717 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2718 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2719 IEM_MC_REF_EFLAGS(pEFlags);
2720 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2721 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2722
2723 IEM_MC_ADVANCE_RIP_AND_FINISH();
2724 IEM_MC_END();
2725 }
2726 else
2727 {
2728 /* memory operand */
2729 IEM_MC_BEGIN(3, 2);
2730 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2731 IEM_MC_ARG(uint16_t, u16Src, 1);
2732 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2733 IEM_MC_LOCAL(uint16_t, u16Tmp);
2734 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2735
2736 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2737 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2738 IEM_MC_ASSIGN(u16Src, u16Imm);
2739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2740 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2741 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2742 IEM_MC_REF_EFLAGS(pEFlags);
2743 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2744 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2745
2746 IEM_MC_ADVANCE_RIP_AND_FINISH();
2747 IEM_MC_END();
2748 }
2749 break;
2750 }
2751
2752 case IEMMODE_32BIT:
2753 {
2754 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
2755 if (IEM_IS_MODRM_REG_MODE(bRm))
2756 {
2757 /* register operand */
2758 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2760
2761 IEM_MC_BEGIN(3, 1);
2762 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2763 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
2764 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2765 IEM_MC_LOCAL(uint32_t, u32Tmp);
2766
2767 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2768 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2769 IEM_MC_REF_EFLAGS(pEFlags);
2770 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2771 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2772
2773 IEM_MC_ADVANCE_RIP_AND_FINISH();
2774 IEM_MC_END();
2775 }
2776 else
2777 {
2778 /* memory operand */
2779 IEM_MC_BEGIN(3, 2);
2780 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2781 IEM_MC_ARG(uint32_t, u32Src, 1);
2782 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2783 IEM_MC_LOCAL(uint32_t, u32Tmp);
2784 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2785
2786 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2787 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2788 IEM_MC_ASSIGN(u32Src, u32Imm);
2789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2790 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2791 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2792 IEM_MC_REF_EFLAGS(pEFlags);
2793 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2794 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2795
2796 IEM_MC_ADVANCE_RIP_AND_FINISH();
2797 IEM_MC_END();
2798 }
2799 break;
2800 }
2801
2802 case IEMMODE_64BIT:
2803 {
2804 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
2805 if (IEM_IS_MODRM_REG_MODE(bRm))
2806 {
2807 /* register operand */
2808 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2810
2811 IEM_MC_BEGIN(3, 1);
2812 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2813 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
2814 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2815 IEM_MC_LOCAL(uint64_t, u64Tmp);
2816
2817 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2818 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2819 IEM_MC_REF_EFLAGS(pEFlags);
2820 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
2821 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2822
2823 IEM_MC_ADVANCE_RIP_AND_FINISH();
2824 IEM_MC_END();
2825 }
2826 else
2827 {
2828 /* memory operand */
2829 IEM_MC_BEGIN(3, 2);
2830 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2831 IEM_MC_ARG(uint64_t, u64Src, 1);
2832 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2833 IEM_MC_LOCAL(uint64_t, u64Tmp);
2834 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2835
2836 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2837 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); /* Not using IEM_OPCODE_GET_NEXT_S32_SX_U64 to reduce the */
2838 IEM_MC_ASSIGN_U32_SX_U64(u64Src, u32Imm); /* parameter count for the threaded function for this block. */
2839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2840 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2841 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2842 IEM_MC_REF_EFLAGS(pEFlags);
2843 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
2844 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2845
2846 IEM_MC_ADVANCE_RIP_AND_FINISH();
2847 IEM_MC_END();
2848 }
2849 break;
2850 }
2851
2852 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2853 }
2854}
2855
2856
2857/**
2858 * @opcode 0x6a
2859 */
2860FNIEMOP_DEF(iemOp_push_Ib)
2861{
2862 IEMOP_MNEMONIC(push_Ib, "push Ib");
2863 IEMOP_HLP_MIN_186();
2864 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2866 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2867
2868 switch (pVCpu->iem.s.enmEffOpSize)
2869 {
2870 case IEMMODE_16BIT:
2871 IEM_MC_BEGIN(0,0);
2872 IEM_MC_PUSH_U16(i8Imm);
2873 IEM_MC_ADVANCE_RIP_AND_FINISH();
2874 IEM_MC_END();
2875 break;
2876 case IEMMODE_32BIT:
2877 IEM_MC_BEGIN(0,0);
2878 IEM_MC_PUSH_U32(i8Imm);
2879 IEM_MC_ADVANCE_RIP_AND_FINISH();
2880 IEM_MC_END();
2881 break;
2882 case IEMMODE_64BIT:
2883 IEM_MC_BEGIN(0,0);
2884 IEM_MC_PUSH_U64(i8Imm);
2885 IEM_MC_ADVANCE_RIP_AND_FINISH();
2886 IEM_MC_END();
2887 break;
2888 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2889 }
2890}
2891
2892
2893/**
2894 * @opcode 0x6b
2895 */
2896FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
2897{
2898 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
2899 IEMOP_HLP_MIN_186();
2900 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2901 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2902
2903 switch (pVCpu->iem.s.enmEffOpSize)
2904 {
2905 case IEMMODE_16BIT:
2906 {
2907 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
2908 if (IEM_IS_MODRM_REG_MODE(bRm))
2909 {
2910 /* register operand */
2911 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2913
2914 IEM_MC_BEGIN(3, 1);
2915 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2916 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
2917 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2918 IEM_MC_LOCAL(uint16_t, u16Tmp);
2919
2920 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2921 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2922 IEM_MC_REF_EFLAGS(pEFlags);
2923 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2924 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2925
2926 IEM_MC_ADVANCE_RIP_AND_FINISH();
2927 IEM_MC_END();
2928 }
2929 else
2930 {
2931 /* memory operand */
2932 IEM_MC_BEGIN(3, 2);
2933 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2934 IEM_MC_ARG(uint16_t, u16Src, 1);
2935 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2936 IEM_MC_LOCAL(uint16_t, u16Tmp);
2937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2938
2939 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2940 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
2941 IEM_MC_ASSIGN(u16Src, u16Imm);
2942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2943 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2944 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2945 IEM_MC_REF_EFLAGS(pEFlags);
2946 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2947 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2948
2949 IEM_MC_ADVANCE_RIP_AND_FINISH();
2950 IEM_MC_END();
2951 }
2952 break;
2953 }
2954
2955 case IEMMODE_32BIT:
2956 {
2957 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
2958 if (IEM_IS_MODRM_REG_MODE(bRm))
2959 {
2960 /* register operand */
2961 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2963
2964 IEM_MC_BEGIN(3, 1);
2965 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2966 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
2967 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2968 IEM_MC_LOCAL(uint32_t, u32Tmp);
2969
2970 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2971 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2972 IEM_MC_REF_EFLAGS(pEFlags);
2973 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2974 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2975
2976 IEM_MC_ADVANCE_RIP_AND_FINISH();
2977 IEM_MC_END();
2978 }
2979 else
2980 {
2981 /* memory operand */
2982 IEM_MC_BEGIN(3, 2);
2983 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2984 IEM_MC_ARG(uint32_t, u32Src, 1);
2985 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2986 IEM_MC_LOCAL(uint32_t, u32Tmp);
2987 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2988
2989 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2990 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
2991 IEM_MC_ASSIGN(u32Src, u32Imm);
2992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2993 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2994 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2995 IEM_MC_REF_EFLAGS(pEFlags);
2996 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2997 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2998
2999 IEM_MC_ADVANCE_RIP_AND_FINISH();
3000 IEM_MC_END();
3001 }
3002 break;
3003 }
3004
3005 case IEMMODE_64BIT:
3006 {
3007 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3008 if (IEM_IS_MODRM_REG_MODE(bRm))
3009 {
3010 /* register operand */
3011 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3013
3014 IEM_MC_BEGIN(3, 1);
3015 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3016 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
3017 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3018 IEM_MC_LOCAL(uint64_t, u64Tmp);
3019
3020 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3021 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3022 IEM_MC_REF_EFLAGS(pEFlags);
3023 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3024 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3025
3026 IEM_MC_ADVANCE_RIP_AND_FINISH();
3027 IEM_MC_END();
3028 }
3029 else
3030 {
3031 /* memory operand */
3032 IEM_MC_BEGIN(3, 2);
3033 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3034 IEM_MC_ARG(uint64_t, u64Src, 1);
3035 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3036 IEM_MC_LOCAL(uint64_t, u64Tmp);
3037 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3038
3039 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3040 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); /* Not using IEM_OPCODE_GET_NEXT_S8_SX_U64 to reduce the */
3041 IEM_MC_ASSIGN_U8_SX_U64(u64Src, u8Imm); /* parameter count for the threaded function for this block. */
3042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3043 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3044 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3045 IEM_MC_REF_EFLAGS(pEFlags);
3046 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3047 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3048
3049 IEM_MC_ADVANCE_RIP_AND_FINISH();
3050 IEM_MC_END();
3051 }
3052 break;
3053 }
3054
3055 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3056 }
3057}
3058
3059
3060/**
3061 * @opcode 0x6c
3062 */
3063FNIEMOP_DEF(iemOp_insb_Yb_DX)
3064{
3065 IEMOP_HLP_MIN_186();
3066 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3067 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3068 {
3069 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
3070 switch (pVCpu->iem.s.enmEffAddrMode)
3071 {
3072 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_ins_op8_addr16, false);
3073 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_ins_op8_addr32, false);
3074 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_ins_op8_addr64, false);
3075 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3076 }
3077 }
3078 else
3079 {
3080 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
3081 switch (pVCpu->iem.s.enmEffAddrMode)
3082 {
3083 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_ins_op8_addr16, false);
3084 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_ins_op8_addr32, false);
3085 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_ins_op8_addr64, false);
3086 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3087 }
3088 }
3089}
3090
3091
3092/**
3093 * @opcode 0x6d
3094 */
3095FNIEMOP_DEF(iemOp_inswd_Yv_DX)
3096{
3097 IEMOP_HLP_MIN_186();
3098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3099 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3100 {
3101 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
3102 switch (pVCpu->iem.s.enmEffOpSize)
3103 {
3104 case IEMMODE_16BIT:
3105 switch (pVCpu->iem.s.enmEffAddrMode)
3106 {
3107 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_ins_op16_addr16, false);
3108 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_ins_op16_addr32, false);
3109 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_ins_op16_addr64, false);
3110 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3111 }
3112 break;
3113 case IEMMODE_64BIT:
3114 case IEMMODE_32BIT:
3115 switch (pVCpu->iem.s.enmEffAddrMode)
3116 {
3117 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_ins_op32_addr16, false);
3118 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_ins_op32_addr32, false);
3119 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_ins_op32_addr64, false);
3120 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3121 }
3122 break;
3123 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3124 }
3125 }
3126 else
3127 {
3128 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
3129 switch (pVCpu->iem.s.enmEffOpSize)
3130 {
3131 case IEMMODE_16BIT:
3132 switch (pVCpu->iem.s.enmEffAddrMode)
3133 {
3134 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_ins_op16_addr16, false);
3135 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_ins_op16_addr32, false);
3136 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_ins_op16_addr64, false);
3137 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3138 }
3139 break;
3140 case IEMMODE_64BIT:
3141 case IEMMODE_32BIT:
3142 switch (pVCpu->iem.s.enmEffAddrMode)
3143 {
3144 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_ins_op32_addr16, false);
3145 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_ins_op32_addr32, false);
3146 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_ins_op32_addr64, false);
3147 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3148 }
3149 break;
3150 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3151 }
3152 }
3153}
3154
3155
3156/**
3157 * @opcode 0x6e
3158 */
3159FNIEMOP_DEF(iemOp_outsb_Yb_DX)
3160{
3161 IEMOP_HLP_MIN_186();
3162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3163 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3164 {
3165 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
3166 switch (pVCpu->iem.s.enmEffAddrMode)
3167 {
3168 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3169 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3170 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3171 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3172 }
3173 }
3174 else
3175 {
3176 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
3177 switch (pVCpu->iem.s.enmEffAddrMode)
3178 {
3179 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3180 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3181 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3182 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3183 }
3184 }
3185}
3186
3187
3188/**
3189 * @opcode 0x6f
3190 */
3191FNIEMOP_DEF(iemOp_outswd_Yv_DX)
3192{
3193 IEMOP_HLP_MIN_186();
3194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3195 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3196 {
3197 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
3198 switch (pVCpu->iem.s.enmEffOpSize)
3199 {
3200 case IEMMODE_16BIT:
3201 switch (pVCpu->iem.s.enmEffAddrMode)
3202 {
3203 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3204 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3205 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3206 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3207 }
3208 break;
3209 case IEMMODE_64BIT:
3210 case IEMMODE_32BIT:
3211 switch (pVCpu->iem.s.enmEffAddrMode)
3212 {
3213 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3214 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3215 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3216 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3217 }
3218 break;
3219 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3220 }
3221 }
3222 else
3223 {
3224 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
3225 switch (pVCpu->iem.s.enmEffOpSize)
3226 {
3227 case IEMMODE_16BIT:
3228 switch (pVCpu->iem.s.enmEffAddrMode)
3229 {
3230 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3231 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3232 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3233 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3234 }
3235 break;
3236 case IEMMODE_64BIT:
3237 case IEMMODE_32BIT:
3238 switch (pVCpu->iem.s.enmEffAddrMode)
3239 {
3240 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3241 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3242 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3243 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3244 }
3245 break;
3246 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3247 }
3248 }
3249}
3250
3251
3252/**
3253 * @opcode 0x70
3254 */
3255FNIEMOP_DEF(iemOp_jo_Jb)
3256{
3257 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
3258 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3260 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3261
3262 IEM_MC_BEGIN(0, 0);
3263 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3264 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3265 } IEM_MC_ELSE() {
3266 IEM_MC_ADVANCE_RIP_AND_FINISH();
3267 } IEM_MC_ENDIF();
3268 IEM_MC_END();
3269}
3270
3271
3272/**
3273 * @opcode 0x71
3274 */
3275FNIEMOP_DEF(iemOp_jno_Jb)
3276{
3277 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
3278 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3280 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3281
3282 IEM_MC_BEGIN(0, 0);
3283 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3284 IEM_MC_ADVANCE_RIP_AND_FINISH();
3285 } IEM_MC_ELSE() {
3286 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3287 } IEM_MC_ENDIF();
3288 IEM_MC_END();
3289}
3290
3291/**
3292 * @opcode 0x72
3293 */
3294FNIEMOP_DEF(iemOp_jc_Jb)
3295{
3296 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
3297 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3299 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3300
3301 IEM_MC_BEGIN(0, 0);
3302 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3303 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3304 } IEM_MC_ELSE() {
3305 IEM_MC_ADVANCE_RIP_AND_FINISH();
3306 } IEM_MC_ENDIF();
3307 IEM_MC_END();
3308}
3309
3310
3311/**
3312 * @opcode 0x73
3313 */
3314FNIEMOP_DEF(iemOp_jnc_Jb)
3315{
3316 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
3317 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3319 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3320
3321 IEM_MC_BEGIN(0, 0);
3322 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3323 IEM_MC_ADVANCE_RIP_AND_FINISH();
3324 } IEM_MC_ELSE() {
3325 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3326 } IEM_MC_ENDIF();
3327 IEM_MC_END();
3328}
3329
3330
3331/**
3332 * @opcode 0x74
3333 */
3334FNIEMOP_DEF(iemOp_je_Jb)
3335{
3336 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
3337 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3339 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3340
3341 IEM_MC_BEGIN(0, 0);
3342 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3343 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3344 } IEM_MC_ELSE() {
3345 IEM_MC_ADVANCE_RIP_AND_FINISH();
3346 } IEM_MC_ENDIF();
3347 IEM_MC_END();
3348}
3349
3350
3351/**
3352 * @opcode 0x75
3353 */
3354FNIEMOP_DEF(iemOp_jne_Jb)
3355{
3356 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
3357 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3359 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3360
3361 IEM_MC_BEGIN(0, 0);
3362 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3363 IEM_MC_ADVANCE_RIP_AND_FINISH();
3364 } IEM_MC_ELSE() {
3365 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3366 } IEM_MC_ENDIF();
3367 IEM_MC_END();
3368}
3369
3370
3371/**
3372 * @opcode 0x76
3373 */
3374FNIEMOP_DEF(iemOp_jbe_Jb)
3375{
3376 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
3377 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3379 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3380
3381 IEM_MC_BEGIN(0, 0);
3382 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3383 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3384 } IEM_MC_ELSE() {
3385 IEM_MC_ADVANCE_RIP_AND_FINISH();
3386 } IEM_MC_ENDIF();
3387 IEM_MC_END();
3388}
3389
3390
3391/**
3392 * @opcode 0x77
3393 */
3394FNIEMOP_DEF(iemOp_jnbe_Jb)
3395{
3396 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
3397 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3399 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3400
3401 IEM_MC_BEGIN(0, 0);
3402 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3403 IEM_MC_ADVANCE_RIP_AND_FINISH();
3404 } IEM_MC_ELSE() {
3405 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3406 } IEM_MC_ENDIF();
3407 IEM_MC_END();
3408}
3409
3410
3411/**
3412 * @opcode 0x78
3413 */
3414FNIEMOP_DEF(iemOp_js_Jb)
3415{
3416 IEMOP_MNEMONIC(js_Jb, "js Jb");
3417 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3419 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3420
3421 IEM_MC_BEGIN(0, 0);
3422 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3423 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3424 } IEM_MC_ELSE() {
3425 IEM_MC_ADVANCE_RIP_AND_FINISH();
3426 } IEM_MC_ENDIF();
3427 IEM_MC_END();
3428}
3429
3430
3431/**
3432 * @opcode 0x79
3433 */
3434FNIEMOP_DEF(iemOp_jns_Jb)
3435{
3436 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
3437 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3439 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3440
3441 IEM_MC_BEGIN(0, 0);
3442 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3443 IEM_MC_ADVANCE_RIP_AND_FINISH();
3444 } IEM_MC_ELSE() {
3445 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3446 } IEM_MC_ENDIF();
3447 IEM_MC_END();
3448}
3449
3450
3451/**
3452 * @opcode 0x7a
3453 */
3454FNIEMOP_DEF(iemOp_jp_Jb)
3455{
3456 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
3457 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3459 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3460
3461 IEM_MC_BEGIN(0, 0);
3462 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3463 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3464 } IEM_MC_ELSE() {
3465 IEM_MC_ADVANCE_RIP_AND_FINISH();
3466 } IEM_MC_ENDIF();
3467 IEM_MC_END();
3468}
3469
3470
3471/**
3472 * @opcode 0x7b
3473 */
3474FNIEMOP_DEF(iemOp_jnp_Jb)
3475{
3476 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
3477 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3479 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3480
3481 IEM_MC_BEGIN(0, 0);
3482 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3483 IEM_MC_ADVANCE_RIP_AND_FINISH();
3484 } IEM_MC_ELSE() {
3485 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3486 } IEM_MC_ENDIF();
3487 IEM_MC_END();
3488}
3489
3490
3491/**
3492 * @opcode 0x7c
3493 */
3494FNIEMOP_DEF(iemOp_jl_Jb)
3495{
3496 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
3497 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3499 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3500
3501 IEM_MC_BEGIN(0, 0);
3502 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3503 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3504 } IEM_MC_ELSE() {
3505 IEM_MC_ADVANCE_RIP_AND_FINISH();
3506 } IEM_MC_ENDIF();
3507 IEM_MC_END();
3508}
3509
3510
3511/**
3512 * @opcode 0x7d
3513 */
3514FNIEMOP_DEF(iemOp_jnl_Jb)
3515{
3516 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
3517 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3519 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3520
3521 IEM_MC_BEGIN(0, 0);
3522 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3523 IEM_MC_ADVANCE_RIP_AND_FINISH();
3524 } IEM_MC_ELSE() {
3525 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3526 } IEM_MC_ENDIF();
3527 IEM_MC_END();
3528}
3529
3530
3531/**
3532 * @opcode 0x7e
3533 */
3534FNIEMOP_DEF(iemOp_jle_Jb)
3535{
3536 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
3537 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3539 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3540
3541 IEM_MC_BEGIN(0, 0);
3542 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3543 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3544 } IEM_MC_ELSE() {
3545 IEM_MC_ADVANCE_RIP_AND_FINISH();
3546 } IEM_MC_ENDIF();
3547 IEM_MC_END();
3548}
3549
3550
3551/**
3552 * @opcode 0x7f
3553 */
3554FNIEMOP_DEF(iemOp_jnle_Jb)
3555{
3556 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
3557 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3559 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3560
3561 IEM_MC_BEGIN(0, 0);
3562 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3563 IEM_MC_ADVANCE_RIP_AND_FINISH();
3564 } IEM_MC_ELSE() {
3565 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3566 } IEM_MC_ENDIF();
3567 IEM_MC_END();
3568}
3569
3570
3571/**
3572 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
3573 * iemOp_Grp1_Eb_Ib_80.
3574 */
3575#define IEMOP_BODY_BINARY_Eb_Ib(a_fnNormalU8, a_fRW) \
3576 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3577 { \
3578 /* register target */ \
3579 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3581 IEM_MC_BEGIN(3, 0); \
3582 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3583 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3584 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3585 \
3586 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3587 IEM_MC_REF_EFLAGS(pEFlags); \
3588 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3589 \
3590 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3591 IEM_MC_END(); \
3592 } \
3593 else \
3594 { \
3595 /* memory target */ \
3596 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
3597 { \
3598 IEM_MC_BEGIN(3, 2); \
3599 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3600 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3602 \
3603 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3604 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3605 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3606 IEMOP_HLP_DONE_DECODING(); \
3607 \
3608 IEM_MC_MEM_MAP(pu8Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3609 IEM_MC_FETCH_EFLAGS(EFlags); \
3610 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3611 \
3612 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, a_fRW); \
3613 IEM_MC_COMMIT_EFLAGS(EFlags); \
3614 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3615 IEM_MC_END(); \
3616 } \
3617 else \
3618 { \
3619 (void)0
3620
3621#define IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK() \
3622 IEMOP_HLP_DONE_DECODING(); \
3623 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
3624 } \
3625 } \
3626 (void)0
3627
3628#define IEMOP_BODY_BINARY_Eb_Ib_LOCKED(a_fnLockedU8) \
3629 IEM_MC_BEGIN(3, 2); \
3630 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3631 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3632 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3633 \
3634 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3635 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3636 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3637 IEMOP_HLP_DONE_DECODING(); \
3638 \
3639 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3640 IEM_MC_FETCH_EFLAGS(EFlags); \
3641 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
3642 \
3643 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW); \
3644 IEM_MC_COMMIT_EFLAGS(EFlags); \
3645 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3646 IEM_MC_END(); \
3647 } \
3648 } \
3649 (void)0
3650
3651
3652/**
3653 * @opmaps grp1_80,grp1_83
3654 * @opcode /0
3655 */
3656FNIEMOP_DEF_1(iemOp_Grp1_add_Eb_Ib, uint8_t, bRm)
3657{
3658 IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib");
3659 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_add_u8, IEM_ACCESS_DATA_RW);
3660 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_add_u8_locked);
3661}
3662
3663
3664/**
3665 * @opmaps grp1_80,grp1_83
3666 * @opcode /1
3667 */
3668FNIEMOP_DEF_1(iemOp_Grp1_or_Eb_Ib, uint8_t, bRm)
3669{
3670 IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib");
3671 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_or_u8, IEM_ACCESS_DATA_RW);
3672 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_or_u8_locked);
3673}
3674
3675
3676/**
3677 * @opmaps grp1_80,grp1_83
3678 * @opcode /2
3679 */
3680FNIEMOP_DEF_1(iemOp_Grp1_adc_Eb_Ib, uint8_t, bRm)
3681{
3682 IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib");
3683 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_adc_u8, IEM_ACCESS_DATA_RW);
3684 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_adc_u8_locked);
3685}
3686
3687
3688/**
3689 * @opmaps grp1_80,grp1_83
3690 * @opcode /3
3691 */
3692FNIEMOP_DEF_1(iemOp_Grp1_sbb_Eb_Ib, uint8_t, bRm)
3693{
3694 IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib");
3695 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_sbb_u8, IEM_ACCESS_DATA_RW);
3696 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sbb_u8_locked);
3697}
3698
3699
3700/**
3701 * @opmaps grp1_80,grp1_83
3702 * @opcode /4
3703 */
3704FNIEMOP_DEF_1(iemOp_Grp1_and_Eb_Ib, uint8_t, bRm)
3705{
3706 IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib");
3707 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_and_u8, IEM_ACCESS_DATA_RW);
3708 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_and_u8_locked);
3709}
3710
3711
3712/**
3713 * @opmaps grp1_80,grp1_83
3714 * @opcode /5
3715 */
3716FNIEMOP_DEF_1(iemOp_Grp1_sub_Eb_Ib, uint8_t, bRm)
3717{
3718 IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib");
3719 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_sub_u8, IEM_ACCESS_DATA_RW);
3720 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sub_u8_locked);
3721}
3722
3723
3724/**
3725 * @opmaps grp1_80,grp1_83
3726 * @opcode /6
3727 */
3728FNIEMOP_DEF_1(iemOp_Grp1_xor_Eb_Ib, uint8_t, bRm)
3729{
3730 IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib");
3731 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_xor_u8, IEM_ACCESS_DATA_RW);
3732 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_xor_u8_locked);
3733}
3734
3735
3736/**
3737 * @opmaps grp1_80,grp1_83
3738 * @opcode /7
3739 */
3740FNIEMOP_DEF_1(iemOp_Grp1_cmp_Eb_Ib, uint8_t, bRm)
3741{
3742 IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib");
3743 IEMOP_BODY_BINARY_Eb_Ib(iemAImpl_cmp_u8, IEM_ACCESS_DATA_R);
3744 IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK();
3745}
3746
3747
3748/**
3749 * @opcode 0x80
3750 */
3751FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
3752{
3753 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3754 switch (IEM_GET_MODRM_REG_8(bRm))
3755 {
3756 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Eb_Ib, bRm);
3757 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Eb_Ib, bRm);
3758 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Eb_Ib, bRm);
3759 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Eb_Ib, bRm);
3760 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Eb_Ib, bRm);
3761 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Eb_Ib, bRm);
3762 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Eb_Ib, bRm);
3763 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Eb_Ib, bRm);
3764 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3765 }
3766}
3767
3768
3769/**
3770 * Body for a group 1 binary operator.
3771 */
3772#define IEMOP_BODY_BINARY_Ev_Iz(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fRW) \
3773 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3774 { \
3775 /* register target */ \
3776 switch (pVCpu->iem.s.enmEffOpSize) \
3777 { \
3778 case IEMMODE_16BIT: \
3779 { \
3780 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
3781 IEMOP_HLP_DONE_DECODING(); \
3782 IEM_MC_BEGIN(3, 0); \
3783 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
3784 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
3785 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3786 \
3787 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3788 IEM_MC_REF_EFLAGS(pEFlags); \
3789 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
3790 \
3791 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3792 IEM_MC_END(); \
3793 break; \
3794 } \
3795 \
3796 case IEMMODE_32BIT: \
3797 { \
3798 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
3799 IEMOP_HLP_DONE_DECODING(); \
3800 IEM_MC_BEGIN(3, 0); \
3801 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
3802 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
3803 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3804 \
3805 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3806 IEM_MC_REF_EFLAGS(pEFlags); \
3807 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
3808 if (a_fRW == IEM_ACCESS_DATA_RW) \
3809 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
3810 \
3811 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3812 IEM_MC_END(); \
3813 break; \
3814 } \
3815 \
3816 case IEMMODE_64BIT: \
3817 { \
3818 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
3819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3820 IEM_MC_BEGIN(3, 0); \
3821 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
3822 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
3823 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3824 \
3825 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3826 IEM_MC_REF_EFLAGS(pEFlags); \
3827 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
3828 \
3829 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3830 IEM_MC_END(); \
3831 break; \
3832 } \
3833 \
3834 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
3835 } \
3836 } \
3837 else \
3838 { \
3839 /* memory target */ \
3840 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
3841 { \
3842 switch (pVCpu->iem.s.enmEffOpSize) \
3843 { \
3844 case IEMMODE_16BIT: \
3845 { \
3846 IEM_MC_BEGIN(3, 2); \
3847 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
3848 IEM_MC_ARG(uint16_t, u16Src, 1); \
3849 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3850 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3851 \
3852 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
3853 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
3854 IEM_MC_ASSIGN(u16Src, u16Imm); \
3855 IEMOP_HLP_DONE_DECODING(); \
3856 IEM_MC_MEM_MAP(pu16Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3857 IEM_MC_FETCH_EFLAGS(EFlags); \
3858 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
3859 \
3860 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, a_fRW); \
3861 IEM_MC_COMMIT_EFLAGS(EFlags); \
3862 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3863 IEM_MC_END(); \
3864 break; \
3865 } \
3866 \
3867 case IEMMODE_32BIT: \
3868 { \
3869 IEM_MC_BEGIN(3, 2); \
3870 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
3871 IEM_MC_ARG(uint32_t, u32Src, 1); \
3872 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3873 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3874 \
3875 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
3876 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
3877 IEM_MC_ASSIGN(u32Src, u32Imm); \
3878 IEMOP_HLP_DONE_DECODING(); \
3879 IEM_MC_MEM_MAP(pu32Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3880 IEM_MC_FETCH_EFLAGS(EFlags); \
3881 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
3882 \
3883 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, a_fRW); \
3884 IEM_MC_COMMIT_EFLAGS(EFlags); \
3885 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3886 IEM_MC_END(); \
3887 break; \
3888 } \
3889 \
3890 case IEMMODE_64BIT: \
3891 { \
3892 IEM_MC_BEGIN(3, 2); \
3893 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
3894 IEM_MC_ARG(uint64_t, u64Src, 1); \
3895 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3896 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3897 \
3898 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
3899 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
3900 IEMOP_HLP_DONE_DECODING(); \
3901 IEM_MC_ASSIGN(u64Src, u64Imm); \
3902 IEM_MC_MEM_MAP(pu64Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3903 IEM_MC_FETCH_EFLAGS(EFlags); \
3904 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
3905 \
3906 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, a_fRW); \
3907 IEM_MC_COMMIT_EFLAGS(EFlags); \
3908 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3909 IEM_MC_END(); \
3910 break; \
3911 } \
3912 \
3913 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
3914 } \
3915 } \
3916 else \
3917 { \
3918 (void)0
3919
3920#define IEMOP_BODY_BINARY_Ev_Iz_NO_LOCK() \
3921 IEMOP_HLP_DONE_DECODING(); \
3922 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
3923 } \
3924 } \
3925 (void)0
3926
3927#define IEMOP_BODY_BINARY_Ev_Iz_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
3928 switch (pVCpu->iem.s.enmEffOpSize) \
3929 { \
3930 case IEMMODE_16BIT: \
3931 { \
3932 IEM_MC_BEGIN(3, 2); \
3933 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
3934 IEM_MC_ARG(uint16_t, u16Src, 1); \
3935 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3936 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3937 \
3938 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
3939 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
3940 IEM_MC_ASSIGN(u16Src, u16Imm); \
3941 IEMOP_HLP_DONE_DECODING(); \
3942 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3943 IEM_MC_FETCH_EFLAGS(EFlags); \
3944 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
3945 \
3946 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
3947 IEM_MC_COMMIT_EFLAGS(EFlags); \
3948 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3949 IEM_MC_END(); \
3950 break; \
3951 } \
3952 \
3953 case IEMMODE_32BIT: \
3954 { \
3955 IEM_MC_BEGIN(3, 2); \
3956 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
3957 IEM_MC_ARG(uint32_t, u32Src, 1); \
3958 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3959 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3960 \
3961 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
3962 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
3963 IEM_MC_ASSIGN(u32Src, u32Imm); \
3964 IEMOP_HLP_DONE_DECODING(); \
3965 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3966 IEM_MC_FETCH_EFLAGS(EFlags); \
3967 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
3968 \
3969 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); \
3970 IEM_MC_COMMIT_EFLAGS(EFlags); \
3971 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3972 IEM_MC_END(); \
3973 break; \
3974 } \
3975 \
3976 case IEMMODE_64BIT: \
3977 { \
3978 IEM_MC_BEGIN(3, 2); \
3979 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
3980 IEM_MC_ARG(uint64_t, u64Src, 1); \
3981 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3982 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3983 \
3984 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
3985 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
3986 IEMOP_HLP_DONE_DECODING(); \
3987 IEM_MC_ASSIGN(u64Src, u64Imm); \
3988 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3989 IEM_MC_FETCH_EFLAGS(EFlags); \
3990 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
3991 \
3992 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
3993 IEM_MC_COMMIT_EFLAGS(EFlags); \
3994 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3995 IEM_MC_END(); \
3996 break; \
3997 } \
3998 \
3999 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4000 } \
4001 } \
4002 } \
4003 (void)0
4004
4005
4006/**
4007 * @opmaps grp1_81
4008 * @opcode /0
4009 */
4010FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Iz, uint8_t, bRm)
4011{
4012 IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz");
4013 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, IEM_ACCESS_DATA_RW);
4014 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4015}
4016
4017
4018/**
4019 * @opmaps grp1_81
4020 * @opcode /1
4021 */
4022FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Iz, uint8_t, bRm)
4023{
4024 IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz");
4025 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, IEM_ACCESS_DATA_RW);
4026 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4027}
4028
4029
4030/**
4031 * @opmaps grp1_81
4032 * @opcode /2
4033 */
4034FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Iz, uint8_t, bRm)
4035{
4036 IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz");
4037 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, IEM_ACCESS_DATA_RW);
4038 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
4039}
4040
4041
4042/**
4043 * @opmaps grp1_81
4044 * @opcode /3
4045 */
4046FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Iz, uint8_t, bRm)
4047{
4048 IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz");
4049 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, IEM_ACCESS_DATA_RW);
4050 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
4051}
4052
4053
4054/**
4055 * @opmaps grp1_81
4056 * @opcode /4
4057 */
4058FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Iz, uint8_t, bRm)
4059{
4060 IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz");
4061 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, IEM_ACCESS_DATA_RW);
4062 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
4063}
4064
4065
4066/**
4067 * @opmaps grp1_81
4068 * @opcode /5
4069 */
4070FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Iz, uint8_t, bRm)
4071{
4072 IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz");
4073 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, IEM_ACCESS_DATA_RW);
4074 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
4075}
4076
4077
4078/**
4079 * @opmaps grp1_81
4080 * @opcode /6
4081 */
4082FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Iz, uint8_t, bRm)
4083{
4084 IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz");
4085 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, IEM_ACCESS_DATA_RW);
4086 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
4087}
4088
4089
4090/**
4091 * @opmaps grp1_81
4092 * @opcode /7
4093 */
4094FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Iz, uint8_t, bRm)
4095{
4096 IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz");
4097 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, IEM_ACCESS_DATA_R);
4098 IEMOP_BODY_BINARY_Ev_Iz_NO_LOCK();
4099}
4100
4101
4102/**
4103 * @opcode 0x81
4104 */
4105FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
4106{
4107 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4108 switch (IEM_GET_MODRM_REG_8(bRm))
4109 {
4110 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Iz, bRm);
4111 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Iz, bRm);
4112 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Iz, bRm);
4113 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Iz, bRm);
4114 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Iz, bRm);
4115 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Iz, bRm);
4116 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Iz, bRm);
4117 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Iz, bRm);
4118 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4119 }
4120}
4121
4122
4123/**
4124 * @opcode 0x82
4125 * @opmnemonic grp1_82
4126 * @opgroup og_groups
4127 */
4128FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
4129{
4130 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
4131 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
4132}
4133
4134
4135/**
4136 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
4137 * iemOp_Grp1_Ev_Ib.
4138 */
4139#define IEMOP_BODY_BINARY_Ev_Ib(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fRW) \
4140 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4141 { \
4142 /* \
4143 * Register target \
4144 */ \
4145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4146 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4147 switch (pVCpu->iem.s.enmEffOpSize) \
4148 { \
4149 case IEMMODE_16BIT: \
4150 { \
4151 IEM_MC_BEGIN(3, 0); \
4152 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4153 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4154 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4155 \
4156 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4157 IEM_MC_REF_EFLAGS(pEFlags); \
4158 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4159 \
4160 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4161 IEM_MC_END(); \
4162 break; \
4163 } \
4164 \
4165 case IEMMODE_32BIT: \
4166 { \
4167 IEM_MC_BEGIN(3, 0); \
4168 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4169 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4170 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4171 \
4172 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4173 IEM_MC_REF_EFLAGS(pEFlags); \
4174 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4175 if ((a_fRW) != IEM_ACCESS_DATA_R) \
4176 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
4177 \
4178 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4179 IEM_MC_END(); \
4180 break; \
4181 } \
4182 \
4183 case IEMMODE_64BIT: \
4184 { \
4185 IEM_MC_BEGIN(3, 0); \
4186 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4187 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4188 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4189 \
4190 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4191 IEM_MC_REF_EFLAGS(pEFlags); \
4192 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4193 \
4194 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4195 IEM_MC_END(); \
4196 break; \
4197 } \
4198 \
4199 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4200 } \
4201 } \
4202 else \
4203 { \
4204 /* \
4205 * Memory target. \
4206 */ \
4207 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4208 { \
4209 switch (pVCpu->iem.s.enmEffOpSize) \
4210 { \
4211 case IEMMODE_16BIT: \
4212 { \
4213 IEM_MC_BEGIN(3, 2); \
4214 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4215 IEM_MC_ARG(uint16_t, u16Src, 1); \
4216 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4217 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4218 \
4219 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4220 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4221 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm); \
4222 IEMOP_HLP_DONE_DECODING(); \
4223 IEM_MC_MEM_MAP(pu16Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4224 IEM_MC_FETCH_EFLAGS(EFlags); \
4225 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4226 \
4227 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, a_fRW); \
4228 IEM_MC_COMMIT_EFLAGS(EFlags); \
4229 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4230 IEM_MC_END(); \
4231 break; \
4232 } \
4233 \
4234 case IEMMODE_32BIT: \
4235 { \
4236 IEM_MC_BEGIN(3, 2); \
4237 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4238 IEM_MC_ARG(uint32_t, u32Src, 1); \
4239 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4240 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4241 \
4242 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4243 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4244 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm); \
4245 IEMOP_HLP_DONE_DECODING(); \
4246 IEM_MC_MEM_MAP(pu32Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4247 IEM_MC_FETCH_EFLAGS(EFlags); \
4248 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4249 \
4250 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, a_fRW); \
4251 IEM_MC_COMMIT_EFLAGS(EFlags); \
4252 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4253 IEM_MC_END(); \
4254 break; \
4255 } \
4256 \
4257 case IEMMODE_64BIT: \
4258 { \
4259 IEM_MC_BEGIN(3, 2); \
4260 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4261 IEM_MC_ARG(uint64_t, u64Src, 1); \
4262 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4264 \
4265 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4266 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4267 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm); \
4268 IEMOP_HLP_DONE_DECODING(); \
4269 IEM_MC_MEM_MAP(pu64Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4270 IEM_MC_FETCH_EFLAGS(EFlags); \
4271 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4272 \
4273 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, a_fRW); \
4274 IEM_MC_COMMIT_EFLAGS(EFlags); \
4275 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4276 IEM_MC_END(); \
4277 break; \
4278 } \
4279 \
4280 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4281 } \
4282 } \
4283 else \
4284 { \
4285 (void)0
4286
4287#define IEMOP_BODY_BINARY_Ev_Ib_NO_LOCK() \
4288 IEMOP_HLP_DONE_DECODING(); \
4289 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4290 } \
4291 } \
4292 (void)0
4293
4294#define IEMOP_BODY_BINARY_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4295 switch (pVCpu->iem.s.enmEffOpSize) \
4296 { \
4297 case IEMMODE_16BIT: \
4298 { \
4299 IEM_MC_BEGIN(3, 2); \
4300 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4301 IEM_MC_ARG(uint16_t, u16Src, 1); \
4302 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4303 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4304 \
4305 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4306 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4307 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm); \
4308 IEMOP_HLP_DONE_DECODING(); \
4309 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4310 IEM_MC_FETCH_EFLAGS(EFlags); \
4311 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4312 \
4313 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
4314 IEM_MC_COMMIT_EFLAGS(EFlags); \
4315 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4316 IEM_MC_END(); \
4317 break; \
4318 } \
4319 \
4320 case IEMMODE_32BIT: \
4321 { \
4322 IEM_MC_BEGIN(3, 2); \
4323 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4324 IEM_MC_ARG(uint32_t, u32Src, 1); \
4325 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4326 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4327 \
4328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4329 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4330 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm); \
4331 IEMOP_HLP_DONE_DECODING(); \
4332 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4333 IEM_MC_FETCH_EFLAGS(EFlags); \
4334 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4335 \
4336 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); \
4337 IEM_MC_COMMIT_EFLAGS(EFlags); \
4338 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4339 IEM_MC_END(); \
4340 break; \
4341 } \
4342 \
4343 case IEMMODE_64BIT: \
4344 { \
4345 IEM_MC_BEGIN(3, 2); \
4346 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4347 IEM_MC_ARG(uint64_t, u64Src, 1); \
4348 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4349 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4350 \
4351 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4352 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4353 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm); \
4354 IEMOP_HLP_DONE_DECODING(); \
4355 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4356 IEM_MC_FETCH_EFLAGS(EFlags); \
4357 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4358 \
4359 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
4360 IEM_MC_COMMIT_EFLAGS(EFlags); \
4361 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4362 IEM_MC_END(); \
4363 break; \
4364 } \
4365 \
4366 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4367 } \
4368 } \
4369 } \
4370 (void)0
4371
4372/**
4373 * @opmaps grp1_83
4374 * @opcode /0
4375 */
4376FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Ib, uint8_t, bRm)
4377{
4378 IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib");
4379 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, IEM_ACCESS_DATA_RW);
4380 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4381}
4382
4383
4384/**
4385 * @opmaps grp1_83
4386 * @opcode /1
4387 */
4388FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Ib, uint8_t, bRm)
4389{
4390 IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib");
4391 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, IEM_ACCESS_DATA_RW);
4392 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4393}
4394
4395
4396/**
4397 * @opmaps grp1_83
4398 * @opcode /2
4399 */
4400FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Ib, uint8_t, bRm)
4401{
4402 IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib");
4403 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, IEM_ACCESS_DATA_RW);
4404 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
4405}
4406
4407
4408/**
4409 * @opmaps grp1_83
4410 * @opcode /3
4411 */
4412FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Ib, uint8_t, bRm)
4413{
4414 IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib");
4415 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, IEM_ACCESS_DATA_RW);
4416 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
4417}
4418
4419
4420/**
4421 * @opmaps grp1_83
4422 * @opcode /4
4423 */
4424FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Ib, uint8_t, bRm)
4425{
4426 IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib");
4427 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, IEM_ACCESS_DATA_RW);
4428 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
4429}
4430
4431
4432/**
4433 * @opmaps grp1_83
4434 * @opcode /5
4435 */
4436FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Ib, uint8_t, bRm)
4437{
4438 IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib");
4439 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, IEM_ACCESS_DATA_RW);
4440 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
4441}
4442
4443
4444/**
4445 * @opmaps grp1_83
4446 * @opcode /6
4447 */
4448FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Ib, uint8_t, bRm)
4449{
4450 IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib");
4451 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, IEM_ACCESS_DATA_RW);
4452 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
4453}
4454
4455
4456/**
4457 * @opmaps grp1_83
4458 * @opcode /7
4459 */
4460FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Ib, uint8_t, bRm)
4461{
4462 IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib");
4463 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, IEM_ACCESS_DATA_R);
4464 IEMOP_BODY_BINARY_Ev_Ib_NO_LOCK();
4465}
4466
4467
4468/**
4469 * @opcode 0x83
4470 */
4471FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
4472{
4473 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
4474 to the 386 even if absent in the intel reference manuals and some
4475 3rd party opcode listings. */
4476 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4477 switch (IEM_GET_MODRM_REG_8(bRm))
4478 {
4479 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Ib, bRm);
4480 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Ib, bRm);
4481 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Ib, bRm);
4482 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Ib, bRm);
4483 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Ib, bRm);
4484 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Ib, bRm);
4485 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Ib, bRm);
4486 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Ib, bRm);
4487 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4488 }
4489}
4490
4491
4492/**
4493 * @opcode 0x84
4494 */
4495FNIEMOP_DEF(iemOp_test_Eb_Gb)
4496{
4497 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
4498 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
4499 IEMOP_BODY_BINARY_rm_r8(iemAImpl_test_u8, IEM_ACCESS_DATA_R);
4500 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
4501}
4502
4503
4504/**
4505 * @opcode 0x85
4506 */
4507FNIEMOP_DEF(iemOp_test_Ev_Gv)
4508{
4509 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
4510 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
4511 IEMOP_BODY_BINARY_rm_rv(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, IEM_ACCESS_DATA_R);
4512 IEMOP_BODY_BINARY_rm_rv_NO_LOCK();
4513}
4514
4515
4516/**
4517 * @opcode 0x86
4518 */
4519FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
4520{
4521 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4522 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
4523
4524 /*
4525 * If rm is denoting a register, no more instruction bytes.
4526 */
4527 if (IEM_IS_MODRM_REG_MODE(bRm))
4528 {
4529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4530
4531 IEM_MC_BEGIN(0, 2);
4532 IEM_MC_LOCAL(uint8_t, uTmp1);
4533 IEM_MC_LOCAL(uint8_t, uTmp2);
4534
4535 IEM_MC_FETCH_GREG_U8(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
4536 IEM_MC_FETCH_GREG_U8(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
4537 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
4538 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
4539
4540 IEM_MC_ADVANCE_RIP_AND_FINISH();
4541 IEM_MC_END();
4542 }
4543 else
4544 {
4545 /*
4546 * We're accessing memory.
4547 */
4548/** @todo the register must be committed separately! */
4549 IEM_MC_BEGIN(2, 2);
4550 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
4551 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
4552 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4553
4554 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4555 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
4556 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
4557 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
4558 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_locked, pu8Mem, pu8Reg);
4559 else
4560 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_unlocked, pu8Mem, pu8Reg);
4561 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
4562
4563 IEM_MC_ADVANCE_RIP_AND_FINISH();
4564 IEM_MC_END();
4565 }
4566}
4567
4568
4569/**
4570 * @opcode 0x87
4571 */
4572FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
4573{
4574 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
4575 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4576
4577 /*
4578 * If rm is denoting a register, no more instruction bytes.
4579 */
4580 if (IEM_IS_MODRM_REG_MODE(bRm))
4581 {
4582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4583
4584 switch (pVCpu->iem.s.enmEffOpSize)
4585 {
4586 case IEMMODE_16BIT:
4587 IEM_MC_BEGIN(0, 2);
4588 IEM_MC_LOCAL(uint16_t, uTmp1);
4589 IEM_MC_LOCAL(uint16_t, uTmp2);
4590
4591 IEM_MC_FETCH_GREG_U16(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
4592 IEM_MC_FETCH_GREG_U16(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
4593 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
4594 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
4595
4596 IEM_MC_ADVANCE_RIP_AND_FINISH();
4597 IEM_MC_END();
4598 break;
4599
4600 case IEMMODE_32BIT:
4601 IEM_MC_BEGIN(0, 2);
4602 IEM_MC_LOCAL(uint32_t, uTmp1);
4603 IEM_MC_LOCAL(uint32_t, uTmp2);
4604
4605 IEM_MC_FETCH_GREG_U32(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
4606 IEM_MC_FETCH_GREG_U32(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
4607 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
4608 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
4609
4610 IEM_MC_ADVANCE_RIP_AND_FINISH();
4611 IEM_MC_END();
4612 break;
4613
4614 case IEMMODE_64BIT:
4615 IEM_MC_BEGIN(0, 2);
4616 IEM_MC_LOCAL(uint64_t, uTmp1);
4617 IEM_MC_LOCAL(uint64_t, uTmp2);
4618
4619 IEM_MC_FETCH_GREG_U64(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
4620 IEM_MC_FETCH_GREG_U64(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
4621 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
4622 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
4623
4624 IEM_MC_ADVANCE_RIP_AND_FINISH();
4625 IEM_MC_END();
4626 break;
4627
4628 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4629 }
4630 }
4631 else
4632 {
4633 /*
4634 * We're accessing memory.
4635 */
4636 switch (pVCpu->iem.s.enmEffOpSize)
4637 {
4638/** @todo the register must be committed separately! */
4639 case IEMMODE_16BIT:
4640 IEM_MC_BEGIN(2, 2);
4641 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
4642 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
4643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4644
4645 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4646 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
4647 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
4648 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
4649 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_locked, pu16Mem, pu16Reg);
4650 else
4651 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_unlocked, pu16Mem, pu16Reg);
4652 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
4653
4654 IEM_MC_ADVANCE_RIP_AND_FINISH();
4655 IEM_MC_END();
4656 break;
4657
4658 case IEMMODE_32BIT:
4659 IEM_MC_BEGIN(2, 2);
4660 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
4661 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
4662 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4663
4664 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4665 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
4666 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
4667 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
4668 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_locked, pu32Mem, pu32Reg);
4669 else
4670 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_unlocked, pu32Mem, pu32Reg);
4671 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
4672
4673 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
4674 IEM_MC_ADVANCE_RIP_AND_FINISH();
4675 IEM_MC_END();
4676 break;
4677
4678 case IEMMODE_64BIT:
4679 IEM_MC_BEGIN(2, 2);
4680 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
4681 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
4682 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4683
4684 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4685 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
4686 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
4687 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
4688 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_locked, pu64Mem, pu64Reg);
4689 else
4690 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_unlocked, pu64Mem, pu64Reg);
4691 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
4692
4693 IEM_MC_ADVANCE_RIP_AND_FINISH();
4694 IEM_MC_END();
4695 break;
4696
4697 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4698 }
4699 }
4700}
4701
4702
4703/**
4704 * @opcode 0x88
4705 */
4706FNIEMOP_DEF(iemOp_mov_Eb_Gb)
4707{
4708 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
4709
4710 uint8_t bRm;
4711 IEM_OPCODE_GET_NEXT_U8(&bRm);
4712
4713 /*
4714 * If rm is denoting a register, no more instruction bytes.
4715 */
4716 if (IEM_IS_MODRM_REG_MODE(bRm))
4717 {
4718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4719 IEM_MC_BEGIN(0, 1);
4720 IEM_MC_LOCAL(uint8_t, u8Value);
4721 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4722 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Value);
4723 IEM_MC_ADVANCE_RIP_AND_FINISH();
4724 IEM_MC_END();
4725 }
4726 else
4727 {
4728 /*
4729 * We're writing a register to memory.
4730 */
4731 IEM_MC_BEGIN(0, 2);
4732 IEM_MC_LOCAL(uint8_t, u8Value);
4733 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4736 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4737 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
4738 IEM_MC_ADVANCE_RIP_AND_FINISH();
4739 IEM_MC_END();
4740 }
4741}
4742
4743
4744/**
4745 * @opcode 0x89
4746 */
4747FNIEMOP_DEF(iemOp_mov_Ev_Gv)
4748{
4749 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
4750
4751 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4752
4753 /*
4754 * If rm is denoting a register, no more instruction bytes.
4755 */
4756 if (IEM_IS_MODRM_REG_MODE(bRm))
4757 {
4758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4759 switch (pVCpu->iem.s.enmEffOpSize)
4760 {
4761 case IEMMODE_16BIT:
4762 IEM_MC_BEGIN(0, 1);
4763 IEM_MC_LOCAL(uint16_t, u16Value);
4764 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4765 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
4766 IEM_MC_ADVANCE_RIP_AND_FINISH();
4767 IEM_MC_END();
4768 break;
4769
4770 case IEMMODE_32BIT:
4771 IEM_MC_BEGIN(0, 1);
4772 IEM_MC_LOCAL(uint32_t, u32Value);
4773 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4774 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
4775 IEM_MC_ADVANCE_RIP_AND_FINISH();
4776 IEM_MC_END();
4777 break;
4778
4779 case IEMMODE_64BIT:
4780 IEM_MC_BEGIN(0, 1);
4781 IEM_MC_LOCAL(uint64_t, u64Value);
4782 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4783 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
4784 IEM_MC_ADVANCE_RIP_AND_FINISH();
4785 IEM_MC_END();
4786 break;
4787
4788 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4789 }
4790 }
4791 else
4792 {
4793 /*
4794 * We're writing a register to memory.
4795 */
4796 switch (pVCpu->iem.s.enmEffOpSize)
4797 {
4798 case IEMMODE_16BIT:
4799 IEM_MC_BEGIN(0, 2);
4800 IEM_MC_LOCAL(uint16_t, u16Value);
4801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4802 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4804 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4805 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
4806 IEM_MC_ADVANCE_RIP_AND_FINISH();
4807 IEM_MC_END();
4808 break;
4809
4810 case IEMMODE_32BIT:
4811 IEM_MC_BEGIN(0, 2);
4812 IEM_MC_LOCAL(uint32_t, u32Value);
4813 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4814 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4816 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4817 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
4818 IEM_MC_ADVANCE_RIP_AND_FINISH();
4819 IEM_MC_END();
4820 break;
4821
4822 case IEMMODE_64BIT:
4823 IEM_MC_BEGIN(0, 2);
4824 IEM_MC_LOCAL(uint64_t, u64Value);
4825 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4826 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4828 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4829 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
4830 IEM_MC_ADVANCE_RIP_AND_FINISH();
4831 IEM_MC_END();
4832 break;
4833
4834 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4835 }
4836 }
4837}
4838
4839
4840/**
4841 * @opcode 0x8a
4842 */
4843FNIEMOP_DEF(iemOp_mov_Gb_Eb)
4844{
4845 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
4846
4847 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4848
4849 /*
4850 * If rm is denoting a register, no more instruction bytes.
4851 */
4852 if (IEM_IS_MODRM_REG_MODE(bRm))
4853 {
4854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4855 IEM_MC_BEGIN(0, 1);
4856 IEM_MC_LOCAL(uint8_t, u8Value);
4857 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4858 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
4859 IEM_MC_ADVANCE_RIP_AND_FINISH();
4860 IEM_MC_END();
4861 }
4862 else
4863 {
4864 /*
4865 * We're loading a register from memory.
4866 */
4867 IEM_MC_BEGIN(0, 2);
4868 IEM_MC_LOCAL(uint8_t, u8Value);
4869 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4870 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4872 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4873 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
4874 IEM_MC_ADVANCE_RIP_AND_FINISH();
4875 IEM_MC_END();
4876 }
4877}
4878
4879
4880/**
4881 * @opcode 0x8b
4882 */
4883FNIEMOP_DEF(iemOp_mov_Gv_Ev)
4884{
4885 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
4886
4887 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4888
4889 /*
4890 * If rm is denoting a register, no more instruction bytes.
4891 */
4892 if (IEM_IS_MODRM_REG_MODE(bRm))
4893 {
4894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4895 switch (pVCpu->iem.s.enmEffOpSize)
4896 {
4897 case IEMMODE_16BIT:
4898 IEM_MC_BEGIN(0, 1);
4899 IEM_MC_LOCAL(uint16_t, u16Value);
4900 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4901 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
4902 IEM_MC_ADVANCE_RIP_AND_FINISH();
4903 IEM_MC_END();
4904 break;
4905
4906 case IEMMODE_32BIT:
4907 IEM_MC_BEGIN(0, 1);
4908 IEM_MC_LOCAL(uint32_t, u32Value);
4909 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4910 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
4911 IEM_MC_ADVANCE_RIP_AND_FINISH();
4912 IEM_MC_END();
4913 break;
4914
4915 case IEMMODE_64BIT:
4916 IEM_MC_BEGIN(0, 1);
4917 IEM_MC_LOCAL(uint64_t, u64Value);
4918 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4919 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
4920 IEM_MC_ADVANCE_RIP_AND_FINISH();
4921 IEM_MC_END();
4922 break;
4923
4924 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4925 }
4926 }
4927 else
4928 {
4929 /*
4930 * We're loading a register from memory.
4931 */
4932 switch (pVCpu->iem.s.enmEffOpSize)
4933 {
4934 case IEMMODE_16BIT:
4935 IEM_MC_BEGIN(0, 2);
4936 IEM_MC_LOCAL(uint16_t, u16Value);
4937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4938 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4940 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4941 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
4942 IEM_MC_ADVANCE_RIP_AND_FINISH();
4943 IEM_MC_END();
4944 break;
4945
4946 case IEMMODE_32BIT:
4947 IEM_MC_BEGIN(0, 2);
4948 IEM_MC_LOCAL(uint32_t, u32Value);
4949 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4950 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4952 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4953 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
4954 IEM_MC_ADVANCE_RIP_AND_FINISH();
4955 IEM_MC_END();
4956 break;
4957
4958 case IEMMODE_64BIT:
4959 IEM_MC_BEGIN(0, 2);
4960 IEM_MC_LOCAL(uint64_t, u64Value);
4961 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4962 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4964 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4965 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
4966 IEM_MC_ADVANCE_RIP_AND_FINISH();
4967 IEM_MC_END();
4968 break;
4969
4970 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4971 }
4972 }
4973}
4974
4975
4976/**
4977 * opcode 0x63
4978 * @todo Table fixme
4979 */
4980FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
4981{
4982 if (!IEM_IS_64BIT_CODE(pVCpu))
4983 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
4984 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
4985 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
4986 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
4987}
4988
4989
4990/**
4991 * @opcode 0x8c
4992 */
4993FNIEMOP_DEF(iemOp_mov_Ev_Sw)
4994{
4995 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
4996
4997 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4998
4999 /*
5000 * Check that the destination register exists. The REX.R prefix is ignored.
5001 */
5002 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5003 if ( iSegReg > X86_SREG_GS)
5004 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5005
5006 /*
5007 * If rm is denoting a register, no more instruction bytes.
5008 * In that case, the operand size is respected and the upper bits are
5009 * cleared (starting with some pentium).
5010 */
5011 if (IEM_IS_MODRM_REG_MODE(bRm))
5012 {
5013 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5014 switch (pVCpu->iem.s.enmEffOpSize)
5015 {
5016 case IEMMODE_16BIT:
5017 IEM_MC_BEGIN(0, 1);
5018 IEM_MC_LOCAL(uint16_t, u16Value);
5019 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5020 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5021 IEM_MC_ADVANCE_RIP_AND_FINISH();
5022 IEM_MC_END();
5023 break;
5024
5025 case IEMMODE_32BIT:
5026 IEM_MC_BEGIN(0, 1);
5027 IEM_MC_LOCAL(uint32_t, u32Value);
5028 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
5029 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5030 IEM_MC_ADVANCE_RIP_AND_FINISH();
5031 IEM_MC_END();
5032 break;
5033
5034 case IEMMODE_64BIT:
5035 IEM_MC_BEGIN(0, 1);
5036 IEM_MC_LOCAL(uint64_t, u64Value);
5037 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
5038 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5039 IEM_MC_ADVANCE_RIP_AND_FINISH();
5040 IEM_MC_END();
5041 break;
5042
5043 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5044 }
5045 }
5046 else
5047 {
5048 /*
5049 * We're saving the register to memory. The access is word sized
5050 * regardless of operand size prefixes.
5051 */
5052#if 0 /* not necessary */
5053 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5054#endif
5055 IEM_MC_BEGIN(0, 2);
5056 IEM_MC_LOCAL(uint16_t, u16Value);
5057 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5058 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5060 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5061 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5062 IEM_MC_ADVANCE_RIP_AND_FINISH();
5063 IEM_MC_END();
5064 }
5065}
5066
5067
5068
5069
5070/**
5071 * @opcode 0x8d
5072 */
5073FNIEMOP_DEF(iemOp_lea_Gv_M)
5074{
5075 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
5076 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5077 if (IEM_IS_MODRM_REG_MODE(bRm))
5078 IEMOP_RAISE_INVALID_OPCODE_RET(); /* no register form */
5079
5080 switch (pVCpu->iem.s.enmEffOpSize)
5081 {
5082 case IEMMODE_16BIT:
5083 IEM_MC_BEGIN(0, 2);
5084 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5085 IEM_MC_LOCAL(uint16_t, u16Cast);
5086 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5088 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
5089 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Cast);
5090 IEM_MC_ADVANCE_RIP_AND_FINISH();
5091 IEM_MC_END();
5092 break;
5093
5094 case IEMMODE_32BIT:
5095 IEM_MC_BEGIN(0, 2);
5096 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5097 IEM_MC_LOCAL(uint32_t, u32Cast);
5098 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5100 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
5101 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Cast);
5102 IEM_MC_ADVANCE_RIP_AND_FINISH();
5103 IEM_MC_END();
5104 break;
5105
5106 case IEMMODE_64BIT:
5107 IEM_MC_BEGIN(0, 1);
5108 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5109 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5110 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5111 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), GCPtrEffSrc);
5112 IEM_MC_ADVANCE_RIP_AND_FINISH();
5113 IEM_MC_END();
5114 break;
5115
5116 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5117 }
5118}
5119
5120
5121/**
5122 * @opcode 0x8e
5123 */
5124FNIEMOP_DEF(iemOp_mov_Sw_Ev)
5125{
5126 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
5127
5128 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5129
5130 /*
5131 * The practical operand size is 16-bit.
5132 */
5133#if 0 /* not necessary */
5134 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5135#endif
5136
5137 /*
5138 * Check that the destination register exists and can be used with this
5139 * instruction. The REX.R prefix is ignored.
5140 */
5141 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5142 if ( iSegReg == X86_SREG_CS
5143 || iSegReg > X86_SREG_GS)
5144 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5145
5146 /*
5147 * If rm is denoting a register, no more instruction bytes.
5148 */
5149 if (IEM_IS_MODRM_REG_MODE(bRm))
5150 {
5151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5152 IEM_MC_BEGIN(2, 0);
5153 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
5154 IEM_MC_ARG(uint16_t, u16Value, 1);
5155 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5156 if (iSRegArg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5157 IEM_MC_CALL_CIMPL_2( 0, iemCImpl_load_SReg, iSRegArg, u16Value);
5158 else
5159 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE, iemCImpl_load_SReg, iSRegArg, u16Value);
5160 IEM_MC_END();
5161 }
5162 else
5163 {
5164 /*
5165 * We're loading the register from memory. The access is word sized
5166 * regardless of operand size prefixes.
5167 */
5168 IEM_MC_BEGIN(2, 1);
5169 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
5170 IEM_MC_ARG(uint16_t, u16Value, 1);
5171 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5172 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5174 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5175 if (iSRegArg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5176 IEM_MC_CALL_CIMPL_2( 0, iemCImpl_load_SReg, iSRegArg, u16Value);
5177 else
5178 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE, iemCImpl_load_SReg, iSRegArg, u16Value);
5179 IEM_MC_END();
5180 }
5181}
5182
5183
5184/** Opcode 0x8f /0. */
5185FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
5186{
5187 /* This bugger is rather annoying as it requires rSP to be updated before
5188 doing the effective address calculations. Will eventually require a
5189 split between the R/M+SIB decoding and the effective address
5190 calculation - which is something that is required for any attempt at
5191 reusing this code for a recompiler. It may also be good to have if we
5192 need to delay #UD exception caused by invalid lock prefixes.
5193
5194 For now, we'll do a mostly safe interpreter-only implementation here. */
5195 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
5196 * now until tests show it's checked.. */
5197 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
5198
5199 /* Register access is relatively easy and can share code. */
5200 if (IEM_IS_MODRM_REG_MODE(bRm))
5201 return FNIEMOP_CALL_1(iemOpCommonPopGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
5202
5203 /*
5204 * Memory target.
5205 *
5206 * Intel says that RSP is incremented before it's used in any effective
5207 * address calcuations. This means some serious extra annoyance here since
5208 * we decode and calculate the effective address in one step and like to
5209 * delay committing registers till everything is done.
5210 *
5211 * So, we'll decode and calculate the effective address twice. This will
5212 * require some recoding if turned into a recompiler.
5213 */
5214 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
5215
5216#if 1 /* This can be compiled, optimize later if needed. */
5217 switch (pVCpu->iem.s.enmEffOpSize)
5218 {
5219 case IEMMODE_16BIT:
5220 {
5221 IEM_MC_BEGIN(2, 0);
5222 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
5223 IEM_MC_ARG( RTGCPTR, GCPtrEffDst, 1);
5224 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2 << 8);
5225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5226 IEM_MC_CALL_CIMPL_2(0, iemCImpl_pop_mem16, iEffSeg, GCPtrEffDst);
5227 IEM_MC_END();
5228 }
5229
5230 case IEMMODE_32BIT:
5231 {
5232 IEM_MC_BEGIN(2, 0);
5233 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
5234 IEM_MC_ARG( RTGCPTR, GCPtrEffDst, 1);
5235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4 << 8);
5236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5237 IEM_MC_CALL_CIMPL_2(0, iemCImpl_pop_mem32, iEffSeg, GCPtrEffDst);
5238 IEM_MC_END();
5239 }
5240
5241 case IEMMODE_64BIT:
5242 {
5243 IEM_MC_BEGIN(2, 0);
5244 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 0);
5245 IEM_MC_ARG( RTGCPTR, GCPtrEffDst, 1);
5246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 8 << 8);
5247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5248 IEM_MC_CALL_CIMPL_2(0, iemCImpl_pop_mem64, iEffSeg, GCPtrEffDst);
5249 IEM_MC_END();
5250 }
5251
5252 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5253 }
5254
5255#else
5256# ifndef TST_IEM_CHECK_MC
5257 /* Calc effective address with modified ESP. */
5258/** @todo testcase */
5259 RTGCPTR GCPtrEff;
5260 VBOXSTRICTRC rcStrict;
5261 switch (pVCpu->iem.s.enmEffOpSize)
5262 {
5263 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 2 << 8, &GCPtrEff); break;
5264 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 4 << 8, &GCPtrEff); break;
5265 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 8 << 8, &GCPtrEff); break;
5266 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5267 }
5268 if (rcStrict != VINF_SUCCESS)
5269 return rcStrict;
5270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5271
5272 /* Perform the operation - this should be CImpl. */
5273 RTUINT64U TmpRsp;
5274 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
5275 switch (pVCpu->iem.s.enmEffOpSize)
5276 {
5277 case IEMMODE_16BIT:
5278 {
5279 uint16_t u16Value;
5280 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
5281 if (rcStrict == VINF_SUCCESS)
5282 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
5283 break;
5284 }
5285
5286 case IEMMODE_32BIT:
5287 {
5288 uint32_t u32Value;
5289 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
5290 if (rcStrict == VINF_SUCCESS)
5291 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
5292 break;
5293 }
5294
5295 case IEMMODE_64BIT:
5296 {
5297 uint64_t u64Value;
5298 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
5299 if (rcStrict == VINF_SUCCESS)
5300 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
5301 break;
5302 }
5303
5304 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5305 }
5306 if (rcStrict == VINF_SUCCESS)
5307 {
5308 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
5309 return iemRegUpdateRipAndFinishClearingRF(pVCpu);
5310 }
5311 return rcStrict;
5312
5313# else
5314 return VERR_IEM_IPE_2;
5315# endif
5316#endif
5317}
5318
5319
5320/**
5321 * @opcode 0x8f
5322 */
5323FNIEMOP_DEF(iemOp_Grp1A__xop)
5324{
5325 /*
5326 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
5327 * three byte VEX prefix, except that the mmmmm field cannot have the values
5328 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
5329 */
5330 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5331 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
5332 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
5333
5334 IEMOP_MNEMONIC(xop, "xop");
5335 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
5336 {
5337 /** @todo Test when exctly the XOP conformance checks kick in during
5338 * instruction decoding and fetching (using \#PF). */
5339 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
5340 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
5341 if ( ( pVCpu->iem.s.fPrefixes
5342 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
5343 == 0)
5344 {
5345 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
5346 if ((bXop2 & 0x80 /* XOP.W */) && IEM_IS_64BIT_CODE(pVCpu))
5347 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
5348 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
5349 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
5350 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
5351 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
5352 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
5353 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
5354
5355 /** @todo XOP: Just use new tables and decoders. */
5356 switch (bRm & 0x1f)
5357 {
5358 case 8: /* xop opcode map 8. */
5359 IEMOP_BITCH_ABOUT_STUB();
5360 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5361
5362 case 9: /* xop opcode map 9. */
5363 IEMOP_BITCH_ABOUT_STUB();
5364 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5365
5366 case 10: /* xop opcode map 10. */
5367 IEMOP_BITCH_ABOUT_STUB();
5368 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5369
5370 default:
5371 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
5372 IEMOP_RAISE_INVALID_OPCODE_RET();
5373 }
5374 }
5375 else
5376 Log(("XOP: Invalid prefix mix!\n"));
5377 }
5378 else
5379 Log(("XOP: XOP support disabled!\n"));
5380 IEMOP_RAISE_INVALID_OPCODE_RET();
5381}
5382
5383
5384/**
5385 * Common 'xchg reg,rAX' helper.
5386 */
5387FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
5388{
5389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5390
5391 iReg |= pVCpu->iem.s.uRexB;
5392 switch (pVCpu->iem.s.enmEffOpSize)
5393 {
5394 case IEMMODE_16BIT:
5395 IEM_MC_BEGIN(0, 2);
5396 IEM_MC_LOCAL(uint16_t, u16Tmp1);
5397 IEM_MC_LOCAL(uint16_t, u16Tmp2);
5398 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
5399 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
5400 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
5401 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
5402 IEM_MC_ADVANCE_RIP_AND_FINISH();
5403 IEM_MC_END();
5404 break;
5405
5406 case IEMMODE_32BIT:
5407 IEM_MC_BEGIN(0, 2);
5408 IEM_MC_LOCAL(uint32_t, u32Tmp1);
5409 IEM_MC_LOCAL(uint32_t, u32Tmp2);
5410 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
5411 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
5412 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
5413 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
5414 IEM_MC_ADVANCE_RIP_AND_FINISH();
5415 IEM_MC_END();
5416 break;
5417
5418 case IEMMODE_64BIT:
5419 IEM_MC_BEGIN(0, 2);
5420 IEM_MC_LOCAL(uint64_t, u64Tmp1);
5421 IEM_MC_LOCAL(uint64_t, u64Tmp2);
5422 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
5423 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
5424 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
5425 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
5426 IEM_MC_ADVANCE_RIP_AND_FINISH();
5427 IEM_MC_END();
5428 break;
5429
5430 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5431 }
5432}
5433
5434
5435/**
5436 * @opcode 0x90
5437 */
5438FNIEMOP_DEF(iemOp_nop)
5439{
5440 /* R8/R8D and RAX/EAX can be exchanged. */
5441 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
5442 {
5443 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
5444 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
5445 }
5446
5447 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
5448 {
5449 IEMOP_MNEMONIC(pause, "pause");
5450 /* ASSUMING that we keep the IEM_F_X86_CTX_IN_GUEST, IEM_F_X86_CTX_VMX
5451 and IEM_F_X86_CTX_SVM in the TB key, we can safely do the following: */
5452 if (!IEM_IS_IN_GUEST(pVCpu))
5453 { /* probable */ }
5454#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
5455 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_VMX)
5456 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmx_pause);
5457#endif
5458#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
5459 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_SVM)
5460 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_svm_pause);
5461#endif
5462 }
5463 else
5464 IEMOP_MNEMONIC(nop, "nop");
5465 IEM_MC_BEGIN(0, 0);
5466 IEM_MC_ADVANCE_RIP_AND_FINISH();
5467 IEM_MC_END();
5468}
5469
5470
5471/**
5472 * @opcode 0x91
5473 */
5474FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
5475{
5476 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
5477 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
5478}
5479
5480
5481/**
5482 * @opcode 0x92
5483 */
5484FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
5485{
5486 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
5487 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
5488}
5489
5490
5491/**
5492 * @opcode 0x93
5493 */
5494FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
5495{
5496 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
5497 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
5498}
5499
5500
5501/**
5502 * @opcode 0x94
5503 */
5504FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
5505{
5506 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
5507 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
5508}
5509
5510
5511/**
5512 * @opcode 0x95
5513 */
5514FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
5515{
5516 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
5517 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
5518}
5519
5520
5521/**
5522 * @opcode 0x96
5523 */
5524FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
5525{
5526 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
5527 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
5528}
5529
5530
5531/**
5532 * @opcode 0x97
5533 */
5534FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
5535{
5536 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
5537 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
5538}
5539
5540
5541/**
5542 * @opcode 0x98
5543 */
5544FNIEMOP_DEF(iemOp_cbw)
5545{
5546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5547 switch (pVCpu->iem.s.enmEffOpSize)
5548 {
5549 case IEMMODE_16BIT:
5550 IEMOP_MNEMONIC(cbw, "cbw");
5551 IEM_MC_BEGIN(0, 1);
5552 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
5553 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
5554 } IEM_MC_ELSE() {
5555 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
5556 } IEM_MC_ENDIF();
5557 IEM_MC_ADVANCE_RIP_AND_FINISH();
5558 IEM_MC_END();
5559 break;
5560
5561 case IEMMODE_32BIT:
5562 IEMOP_MNEMONIC(cwde, "cwde");
5563 IEM_MC_BEGIN(0, 1);
5564 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
5565 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
5566 } IEM_MC_ELSE() {
5567 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
5568 } IEM_MC_ENDIF();
5569 IEM_MC_ADVANCE_RIP_AND_FINISH();
5570 IEM_MC_END();
5571 break;
5572
5573 case IEMMODE_64BIT:
5574 IEMOP_MNEMONIC(cdqe, "cdqe");
5575 IEM_MC_BEGIN(0, 1);
5576 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
5577 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
5578 } IEM_MC_ELSE() {
5579 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
5580 } IEM_MC_ENDIF();
5581 IEM_MC_ADVANCE_RIP_AND_FINISH();
5582 IEM_MC_END();
5583 break;
5584
5585 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5586 }
5587}
5588
5589
5590/**
5591 * @opcode 0x99
5592 */
5593FNIEMOP_DEF(iemOp_cwd)
5594{
5595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5596 switch (pVCpu->iem.s.enmEffOpSize)
5597 {
5598 case IEMMODE_16BIT:
5599 IEMOP_MNEMONIC(cwd, "cwd");
5600 IEM_MC_BEGIN(0, 1);
5601 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
5602 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
5603 } IEM_MC_ELSE() {
5604 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
5605 } IEM_MC_ENDIF();
5606 IEM_MC_ADVANCE_RIP_AND_FINISH();
5607 IEM_MC_END();
5608 break;
5609
5610 case IEMMODE_32BIT:
5611 IEMOP_MNEMONIC(cdq, "cdq");
5612 IEM_MC_BEGIN(0, 1);
5613 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
5614 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
5615 } IEM_MC_ELSE() {
5616 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
5617 } IEM_MC_ENDIF();
5618 IEM_MC_ADVANCE_RIP_AND_FINISH();
5619 IEM_MC_END();
5620 break;
5621
5622 case IEMMODE_64BIT:
5623 IEMOP_MNEMONIC(cqo, "cqo");
5624 IEM_MC_BEGIN(0, 1);
5625 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
5626 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
5627 } IEM_MC_ELSE() {
5628 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
5629 } IEM_MC_ENDIF();
5630 IEM_MC_ADVANCE_RIP_AND_FINISH();
5631 IEM_MC_END();
5632 break;
5633
5634 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5635 }
5636}
5637
5638
5639/**
5640 * @opcode 0x9a
5641 */
5642FNIEMOP_DEF(iemOp_call_Ap)
5643{
5644 IEMOP_MNEMONIC(call_Ap, "call Ap");
5645 IEMOP_HLP_NO_64BIT();
5646
5647 /* Decode the far pointer address and pass it on to the far call C implementation. */
5648 uint32_t off32Seg;
5649 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
5650 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
5651 else
5652 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
5653 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
5654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5655 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
5656 iemCImpl_callf, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
5657}
5658
5659
5660/** Opcode 0x9b. (aka fwait) */
5661FNIEMOP_DEF(iemOp_wait)
5662{
5663 IEMOP_MNEMONIC(wait, "wait");
5664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5665
5666 IEM_MC_BEGIN(0, 0);
5667 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
5668 IEM_MC_MAYBE_RAISE_FPU_XCPT();
5669 IEM_MC_ADVANCE_RIP_AND_FINISH();
5670 IEM_MC_END();
5671}
5672
5673
5674/**
5675 * @opcode 0x9c
5676 */
5677FNIEMOP_DEF(iemOp_pushf_Fv)
5678{
5679 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
5680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5681 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5682 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
5683}
5684
5685
5686/**
5687 * @opcode 0x9d
5688 */
5689FNIEMOP_DEF(iemOp_popf_Fv)
5690{
5691 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
5692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5693 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5694 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS, iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
5695}
5696
5697
5698/**
5699 * @opcode 0x9e
5700 */
5701FNIEMOP_DEF(iemOp_sahf)
5702{
5703 IEMOP_MNEMONIC(sahf, "sahf");
5704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5705 if ( IEM_IS_64BIT_CODE(pVCpu)
5706 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
5707 IEMOP_RAISE_INVALID_OPCODE_RET();
5708 IEM_MC_BEGIN(0, 2);
5709 IEM_MC_LOCAL(uint32_t, u32Flags);
5710 IEM_MC_LOCAL(uint32_t, EFlags);
5711 IEM_MC_FETCH_EFLAGS(EFlags);
5712 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
5713 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
5714 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
5715 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
5716 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
5717 IEM_MC_COMMIT_EFLAGS(EFlags);
5718 IEM_MC_ADVANCE_RIP_AND_FINISH();
5719 IEM_MC_END();
5720}
5721
5722
5723/**
5724 * @opcode 0x9f
5725 */
5726FNIEMOP_DEF(iemOp_lahf)
5727{
5728 IEMOP_MNEMONIC(lahf, "lahf");
5729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5730 if ( IEM_IS_64BIT_CODE(pVCpu)
5731 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
5732 IEMOP_RAISE_INVALID_OPCODE_RET();
5733 IEM_MC_BEGIN(0, 1);
5734 IEM_MC_LOCAL(uint8_t, u8Flags);
5735 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
5736 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
5737 IEM_MC_ADVANCE_RIP_AND_FINISH();
5738 IEM_MC_END();
5739}
5740
5741
5742/**
5743 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
5744 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend off lock
5745 * prefixes. Will return on failures.
5746 * @param a_GCPtrMemOff The variable to store the offset in.
5747 */
5748#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
5749 do \
5750 { \
5751 switch (pVCpu->iem.s.enmEffAddrMode) \
5752 { \
5753 case IEMMODE_16BIT: \
5754 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
5755 break; \
5756 case IEMMODE_32BIT: \
5757 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
5758 break; \
5759 case IEMMODE_64BIT: \
5760 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
5761 break; \
5762 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5763 } \
5764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5765 } while (0)
5766
5767/**
5768 * @opcode 0xa0
5769 */
5770FNIEMOP_DEF(iemOp_mov_AL_Ob)
5771{
5772 /*
5773 * Get the offset and fend off lock prefixes.
5774 */
5775 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
5776 RTGCPTR GCPtrMemOff;
5777 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
5778
5779 /*
5780 * Fetch AL.
5781 */
5782 IEM_MC_BEGIN(0,1);
5783 IEM_MC_LOCAL(uint8_t, u8Tmp);
5784 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
5785 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
5786 IEM_MC_ADVANCE_RIP_AND_FINISH();
5787 IEM_MC_END();
5788}
5789
5790
5791/**
5792 * @opcode 0xa1
5793 */
5794FNIEMOP_DEF(iemOp_mov_rAX_Ov)
5795{
5796 /*
5797 * Get the offset and fend off lock prefixes.
5798 */
5799 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
5800 RTGCPTR GCPtrMemOff;
5801 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
5802
5803 /*
5804 * Fetch rAX.
5805 */
5806 switch (pVCpu->iem.s.enmEffOpSize)
5807 {
5808 case IEMMODE_16BIT:
5809 IEM_MC_BEGIN(0,1);
5810 IEM_MC_LOCAL(uint16_t, u16Tmp);
5811 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
5812 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
5813 IEM_MC_ADVANCE_RIP_AND_FINISH();
5814 IEM_MC_END();
5815 break;
5816
5817 case IEMMODE_32BIT:
5818 IEM_MC_BEGIN(0,1);
5819 IEM_MC_LOCAL(uint32_t, u32Tmp);
5820 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
5821 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
5822 IEM_MC_ADVANCE_RIP_AND_FINISH();
5823 IEM_MC_END();
5824 break;
5825
5826 case IEMMODE_64BIT:
5827 IEM_MC_BEGIN(0,1);
5828 IEM_MC_LOCAL(uint64_t, u64Tmp);
5829 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
5830 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
5831 IEM_MC_ADVANCE_RIP_AND_FINISH();
5832 IEM_MC_END();
5833 break;
5834
5835 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5836 }
5837}
5838
5839
5840/**
5841 * @opcode 0xa2
5842 */
5843FNIEMOP_DEF(iemOp_mov_Ob_AL)
5844{
5845 /*
5846 * Get the offset and fend off lock prefixes.
5847 */
5848 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
5849 RTGCPTR GCPtrMemOff;
5850 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
5851
5852 /*
5853 * Store AL.
5854 */
5855 IEM_MC_BEGIN(0,1);
5856 IEM_MC_LOCAL(uint8_t, u8Tmp);
5857 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
5858 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
5859 IEM_MC_ADVANCE_RIP_AND_FINISH();
5860 IEM_MC_END();
5861}
5862
5863
5864/**
5865 * @opcode 0xa3
5866 */
5867FNIEMOP_DEF(iemOp_mov_Ov_rAX)
5868{
5869 /*
5870 * Get the offset and fend off lock prefixes.
5871 */
5872 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
5873 RTGCPTR GCPtrMemOff;
5874 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
5875
5876 /*
5877 * Store rAX.
5878 */
5879 switch (pVCpu->iem.s.enmEffOpSize)
5880 {
5881 case IEMMODE_16BIT:
5882 IEM_MC_BEGIN(0,1);
5883 IEM_MC_LOCAL(uint16_t, u16Tmp);
5884 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
5885 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
5886 IEM_MC_ADVANCE_RIP_AND_FINISH();
5887 IEM_MC_END();
5888 break;
5889
5890 case IEMMODE_32BIT:
5891 IEM_MC_BEGIN(0,1);
5892 IEM_MC_LOCAL(uint32_t, u32Tmp);
5893 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
5894 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
5895 IEM_MC_ADVANCE_RIP_AND_FINISH();
5896 IEM_MC_END();
5897 break;
5898
5899 case IEMMODE_64BIT:
5900 IEM_MC_BEGIN(0,1);
5901 IEM_MC_LOCAL(uint64_t, u64Tmp);
5902 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
5903 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
5904 IEM_MC_ADVANCE_RIP_AND_FINISH();
5905 IEM_MC_END();
5906 break;
5907
5908 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5909 }
5910}
5911
5912/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
5913#define IEM_MOVS_CASE(ValBits, AddrBits) \
5914 IEM_MC_BEGIN(0, 2); \
5915 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5916 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5917 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
5918 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
5919 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5920 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
5921 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5922 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5923 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5924 } IEM_MC_ELSE() { \
5925 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5926 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5927 } IEM_MC_ENDIF(); \
5928 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5929 IEM_MC_END() \
5930
5931/**
5932 * @opcode 0xa4
5933 */
5934FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
5935{
5936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5937
5938 /*
5939 * Use the C implementation if a repeat prefix is encountered.
5940 */
5941 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5942 {
5943 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
5944 switch (pVCpu->iem.s.enmEffAddrMode)
5945 {
5946 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
5947 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
5948 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
5949 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5950 }
5951 }
5952 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
5953
5954 /*
5955 * Sharing case implementation with movs[wdq] below.
5956 */
5957 switch (pVCpu->iem.s.enmEffAddrMode)
5958 {
5959 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
5960 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
5961 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
5962 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5963 }
5964}
5965
5966
5967/**
5968 * @opcode 0xa5
5969 */
5970FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
5971{
5972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5973
5974 /*
5975 * Use the C implementation if a repeat prefix is encountered.
5976 */
5977 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5978 {
5979 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
5980 switch (pVCpu->iem.s.enmEffOpSize)
5981 {
5982 case IEMMODE_16BIT:
5983 switch (pVCpu->iem.s.enmEffAddrMode)
5984 {
5985 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
5986 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
5987 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
5988 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5989 }
5990 break;
5991 case IEMMODE_32BIT:
5992 switch (pVCpu->iem.s.enmEffAddrMode)
5993 {
5994 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
5995 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
5996 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
5997 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5998 }
5999 case IEMMODE_64BIT:
6000 switch (pVCpu->iem.s.enmEffAddrMode)
6001 {
6002 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
6003 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
6004 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
6005 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6006 }
6007 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6008 }
6009 }
6010 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
6011
6012 /*
6013 * Annoying double switch here.
6014 * Using ugly macro for implementing the cases, sharing it with movsb.
6015 */
6016 switch (pVCpu->iem.s.enmEffOpSize)
6017 {
6018 case IEMMODE_16BIT:
6019 switch (pVCpu->iem.s.enmEffAddrMode)
6020 {
6021 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
6022 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
6023 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
6024 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6025 }
6026 break;
6027
6028 case IEMMODE_32BIT:
6029 switch (pVCpu->iem.s.enmEffAddrMode)
6030 {
6031 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
6032 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
6033 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
6034 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6035 }
6036 break;
6037
6038 case IEMMODE_64BIT:
6039 switch (pVCpu->iem.s.enmEffAddrMode)
6040 {
6041 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6042 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
6043 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
6044 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6045 }
6046 break;
6047 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6048 }
6049}
6050
6051#undef IEM_MOVS_CASE
6052
6053/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
6054#define IEM_CMPS_CASE(ValBits, AddrBits) \
6055 IEM_MC_BEGIN(3, 3); \
6056 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
6057 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
6058 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
6059 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
6060 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6061 \
6062 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6063 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
6064 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6065 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
6066 IEM_MC_REF_LOCAL(puValue1, uValue1); \
6067 IEM_MC_REF_EFLAGS(pEFlags); \
6068 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
6069 \
6070 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6071 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6072 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6073 } IEM_MC_ELSE() { \
6074 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6075 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6076 } IEM_MC_ENDIF(); \
6077 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6078 IEM_MC_END() \
6079
6080/**
6081 * @opcode 0xa6
6082 */
6083FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
6084{
6085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6086
6087 /*
6088 * Use the C implementation if a repeat prefix is encountered.
6089 */
6090 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6091 {
6092 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
6093 switch (pVCpu->iem.s.enmEffAddrMode)
6094 {
6095 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6096 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6097 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6098 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6099 }
6100 }
6101 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6102 {
6103 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
6104 switch (pVCpu->iem.s.enmEffAddrMode)
6105 {
6106 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6107 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6108 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6109 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6110 }
6111 }
6112 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
6113
6114 /*
6115 * Sharing case implementation with cmps[wdq] below.
6116 */
6117 switch (pVCpu->iem.s.enmEffAddrMode)
6118 {
6119 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
6120 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
6121 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
6122 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6123 }
6124}
6125
6126
6127/**
6128 * @opcode 0xa7
6129 */
6130FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
6131{
6132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6133
6134 /*
6135 * Use the C implementation if a repeat prefix is encountered.
6136 */
6137 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6138 {
6139 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
6140 switch (pVCpu->iem.s.enmEffOpSize)
6141 {
6142 case IEMMODE_16BIT:
6143 switch (pVCpu->iem.s.enmEffAddrMode)
6144 {
6145 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
6146 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
6147 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
6148 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6149 }
6150 break;
6151 case IEMMODE_32BIT:
6152 switch (pVCpu->iem.s.enmEffAddrMode)
6153 {
6154 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
6155 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
6156 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
6157 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6158 }
6159 case IEMMODE_64BIT:
6160 switch (pVCpu->iem.s.enmEffAddrMode)
6161 {
6162 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
6163 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
6164 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
6165 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6166 }
6167 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6168 }
6169 }
6170
6171 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6172 {
6173 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
6174 switch (pVCpu->iem.s.enmEffOpSize)
6175 {
6176 case IEMMODE_16BIT:
6177 switch (pVCpu->iem.s.enmEffAddrMode)
6178 {
6179 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
6180 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
6181 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
6182 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6183 }
6184 break;
6185 case IEMMODE_32BIT:
6186 switch (pVCpu->iem.s.enmEffAddrMode)
6187 {
6188 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
6189 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
6190 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
6191 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6192 }
6193 case IEMMODE_64BIT:
6194 switch (pVCpu->iem.s.enmEffAddrMode)
6195 {
6196 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
6197 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
6198 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
6199 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6200 }
6201 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6202 }
6203 }
6204
6205 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
6206
6207 /*
6208 * Annoying double switch here.
6209 * Using ugly macro for implementing the cases, sharing it with cmpsb.
6210 */
6211 switch (pVCpu->iem.s.enmEffOpSize)
6212 {
6213 case IEMMODE_16BIT:
6214 switch (pVCpu->iem.s.enmEffAddrMode)
6215 {
6216 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
6217 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
6218 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
6219 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6220 }
6221 break;
6222
6223 case IEMMODE_32BIT:
6224 switch (pVCpu->iem.s.enmEffAddrMode)
6225 {
6226 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
6227 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
6228 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
6229 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6230 }
6231 break;
6232
6233 case IEMMODE_64BIT:
6234 switch (pVCpu->iem.s.enmEffAddrMode)
6235 {
6236 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6237 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
6238 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
6239 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6240 }
6241 break;
6242 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6243 }
6244}
6245
6246#undef IEM_CMPS_CASE
6247
6248/**
6249 * @opcode 0xa8
6250 */
6251FNIEMOP_DEF(iemOp_test_AL_Ib)
6252{
6253 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
6254 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6255 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_test_u8);
6256}
6257
6258
6259/**
6260 * @opcode 0xa9
6261 */
6262FNIEMOP_DEF(iemOp_test_eAX_Iz)
6263{
6264 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
6265 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6266 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, 0);
6267}
6268
6269
6270/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
6271#define IEM_STOS_CASE(ValBits, AddrBits) \
6272 IEM_MC_BEGIN(0, 2); \
6273 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6274 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6275 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
6276 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6277 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
6278 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6279 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6280 } IEM_MC_ELSE() { \
6281 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6282 } IEM_MC_ENDIF(); \
6283 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6284 IEM_MC_END() \
6285
6286/**
6287 * @opcode 0xaa
6288 */
6289FNIEMOP_DEF(iemOp_stosb_Yb_AL)
6290{
6291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6292
6293 /*
6294 * Use the C implementation if a repeat prefix is encountered.
6295 */
6296 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6297 {
6298 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
6299 switch (pVCpu->iem.s.enmEffAddrMode)
6300 {
6301 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP, iemCImpl_stos_al_m16);
6302 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP, iemCImpl_stos_al_m32);
6303 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP, iemCImpl_stos_al_m64);
6304 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6305 }
6306 }
6307 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
6308
6309 /*
6310 * Sharing case implementation with stos[wdq] below.
6311 */
6312 switch (pVCpu->iem.s.enmEffAddrMode)
6313 {
6314 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
6315 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
6316 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
6317 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6318 }
6319}
6320
6321
6322/**
6323 * @opcode 0xab
6324 */
6325FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
6326{
6327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6328
6329 /*
6330 * Use the C implementation if a repeat prefix is encountered.
6331 */
6332 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6333 {
6334 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
6335 switch (pVCpu->iem.s.enmEffOpSize)
6336 {
6337 case IEMMODE_16BIT:
6338 switch (pVCpu->iem.s.enmEffAddrMode)
6339 {
6340 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_ax_m16);
6341 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_ax_m32);
6342 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_ax_m64);
6343 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6344 }
6345 break;
6346 case IEMMODE_32BIT:
6347 switch (pVCpu->iem.s.enmEffAddrMode)
6348 {
6349 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_eax_m16);
6350 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_eax_m32);
6351 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_eax_m64);
6352 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6353 }
6354 case IEMMODE_64BIT:
6355 switch (pVCpu->iem.s.enmEffAddrMode)
6356 {
6357 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
6358 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_rax_m32);
6359 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_rax_m64);
6360 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6361 }
6362 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6363 }
6364 }
6365 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
6366
6367 /*
6368 * Annoying double switch here.
6369 * Using ugly macro for implementing the cases, sharing it with stosb.
6370 */
6371 switch (pVCpu->iem.s.enmEffOpSize)
6372 {
6373 case IEMMODE_16BIT:
6374 switch (pVCpu->iem.s.enmEffAddrMode)
6375 {
6376 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
6377 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
6378 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
6379 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6380 }
6381 break;
6382
6383 case IEMMODE_32BIT:
6384 switch (pVCpu->iem.s.enmEffAddrMode)
6385 {
6386 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
6387 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
6388 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
6389 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6390 }
6391 break;
6392
6393 case IEMMODE_64BIT:
6394 switch (pVCpu->iem.s.enmEffAddrMode)
6395 {
6396 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6397 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
6398 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
6399 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6400 }
6401 break;
6402 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6403 }
6404}
6405
6406#undef IEM_STOS_CASE
6407
6408/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
6409#define IEM_LODS_CASE(ValBits, AddrBits) \
6410 IEM_MC_BEGIN(0, 2); \
6411 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6412 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6413 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6414 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
6415 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
6416 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6417 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6418 } IEM_MC_ELSE() { \
6419 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6420 } IEM_MC_ENDIF(); \
6421 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6422 IEM_MC_END() \
6423
6424/**
6425 * @opcode 0xac
6426 */
6427FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
6428{
6429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6430
6431 /*
6432 * Use the C implementation if a repeat prefix is encountered.
6433 */
6434 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6435 {
6436 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
6437 switch (pVCpu->iem.s.enmEffAddrMode)
6438 {
6439 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
6440 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
6441 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
6442 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6443 }
6444 }
6445 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
6446
6447 /*
6448 * Sharing case implementation with stos[wdq] below.
6449 */
6450 switch (pVCpu->iem.s.enmEffAddrMode)
6451 {
6452 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
6453 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
6454 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
6455 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6456 }
6457}
6458
6459
6460/**
6461 * @opcode 0xad
6462 */
6463FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
6464{
6465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6466
6467 /*
6468 * Use the C implementation if a repeat prefix is encountered.
6469 */
6470 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6471 {
6472 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
6473 switch (pVCpu->iem.s.enmEffOpSize)
6474 {
6475 case IEMMODE_16BIT:
6476 switch (pVCpu->iem.s.enmEffAddrMode)
6477 {
6478 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
6479 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
6480 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
6481 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6482 }
6483 break;
6484 case IEMMODE_32BIT:
6485 switch (pVCpu->iem.s.enmEffAddrMode)
6486 {
6487 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
6488 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
6489 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
6490 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6491 }
6492 case IEMMODE_64BIT:
6493 switch (pVCpu->iem.s.enmEffAddrMode)
6494 {
6495 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
6496 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
6497 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
6498 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6499 }
6500 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6501 }
6502 }
6503 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
6504
6505 /*
6506 * Annoying double switch here.
6507 * Using ugly macro for implementing the cases, sharing it with lodsb.
6508 */
6509 switch (pVCpu->iem.s.enmEffOpSize)
6510 {
6511 case IEMMODE_16BIT:
6512 switch (pVCpu->iem.s.enmEffAddrMode)
6513 {
6514 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
6515 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
6516 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
6517 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6518 }
6519 break;
6520
6521 case IEMMODE_32BIT:
6522 switch (pVCpu->iem.s.enmEffAddrMode)
6523 {
6524 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
6525 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
6526 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
6527 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6528 }
6529 break;
6530
6531 case IEMMODE_64BIT:
6532 switch (pVCpu->iem.s.enmEffAddrMode)
6533 {
6534 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6535 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
6536 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
6537 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6538 }
6539 break;
6540 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6541 }
6542}
6543
6544#undef IEM_LODS_CASE
6545
6546/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
6547#define IEM_SCAS_CASE(ValBits, AddrBits) \
6548 IEM_MC_BEGIN(3, 2); \
6549 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
6550 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
6551 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
6552 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6553 \
6554 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6555 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
6556 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
6557 IEM_MC_REF_EFLAGS(pEFlags); \
6558 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
6559 \
6560 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6561 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6562 } IEM_MC_ELSE() { \
6563 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6564 } IEM_MC_ENDIF(); \
6565 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6566 IEM_MC_END();
6567
6568/**
6569 * @opcode 0xae
6570 */
6571FNIEMOP_DEF(iemOp_scasb_AL_Xb)
6572{
6573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6574
6575 /*
6576 * Use the C implementation if a repeat prefix is encountered.
6577 */
6578 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6579 {
6580 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
6581 switch (pVCpu->iem.s.enmEffAddrMode)
6582 {
6583 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_al_m16);
6584 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_al_m32);
6585 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_al_m64);
6586 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6587 }
6588 }
6589 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6590 {
6591 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
6592 switch (pVCpu->iem.s.enmEffAddrMode)
6593 {
6594 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_al_m16);
6595 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_al_m32);
6596 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_al_m64);
6597 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6598 }
6599 }
6600 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
6601
6602 /*
6603 * Sharing case implementation with stos[wdq] below.
6604 */
6605 switch (pVCpu->iem.s.enmEffAddrMode)
6606 {
6607 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
6608 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
6609 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
6610 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6611 }
6612}
6613
6614
6615/**
6616 * @opcode 0xaf
6617 */
6618FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
6619{
6620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6621
6622 /*
6623 * Use the C implementation if a repeat prefix is encountered.
6624 */
6625 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6626 {
6627 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
6628 switch (pVCpu->iem.s.enmEffOpSize)
6629 {
6630 case IEMMODE_16BIT:
6631 switch (pVCpu->iem.s.enmEffAddrMode)
6632 {
6633 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_ax_m16);
6634 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_ax_m32);
6635 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_ax_m64);
6636 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6637 }
6638 break;
6639 case IEMMODE_32BIT:
6640 switch (pVCpu->iem.s.enmEffAddrMode)
6641 {
6642 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_eax_m16);
6643 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_eax_m32);
6644 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_eax_m64);
6645 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6646 }
6647 case IEMMODE_64BIT:
6648 switch (pVCpu->iem.s.enmEffAddrMode)
6649 {
6650 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
6651 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_rax_m32);
6652 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_rax_m64);
6653 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6654 }
6655 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6656 }
6657 }
6658 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6659 {
6660 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
6661 switch (pVCpu->iem.s.enmEffOpSize)
6662 {
6663 case IEMMODE_16BIT:
6664 switch (pVCpu->iem.s.enmEffAddrMode)
6665 {
6666 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_ax_m16);
6667 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_ax_m32);
6668 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_ax_m64);
6669 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6670 }
6671 break;
6672 case IEMMODE_32BIT:
6673 switch (pVCpu->iem.s.enmEffAddrMode)
6674 {
6675 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_eax_m16);
6676 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_eax_m32);
6677 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_eax_m64);
6678 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6679 }
6680 case IEMMODE_64BIT:
6681 switch (pVCpu->iem.s.enmEffAddrMode)
6682 {
6683 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
6684 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_rax_m32);
6685 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_rax_m64);
6686 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6687 }
6688 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6689 }
6690 }
6691 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
6692
6693 /*
6694 * Annoying double switch here.
6695 * Using ugly macro for implementing the cases, sharing it with scasb.
6696 */
6697 switch (pVCpu->iem.s.enmEffOpSize)
6698 {
6699 case IEMMODE_16BIT:
6700 switch (pVCpu->iem.s.enmEffAddrMode)
6701 {
6702 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
6703 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
6704 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
6705 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6706 }
6707 break;
6708
6709 case IEMMODE_32BIT:
6710 switch (pVCpu->iem.s.enmEffAddrMode)
6711 {
6712 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
6713 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
6714 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
6715 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6716 }
6717 break;
6718
6719 case IEMMODE_64BIT:
6720 switch (pVCpu->iem.s.enmEffAddrMode)
6721 {
6722 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6723 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
6724 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
6725 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6726 }
6727 break;
6728 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6729 }
6730}
6731
6732#undef IEM_SCAS_CASE
6733
6734/**
6735 * Common 'mov r8, imm8' helper.
6736 */
6737FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iFixedReg)
6738{
6739 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6741
6742 IEM_MC_BEGIN(0, 1);
6743 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
6744 IEM_MC_STORE_GREG_U8(iFixedReg, u8Value);
6745 IEM_MC_ADVANCE_RIP_AND_FINISH();
6746 IEM_MC_END();
6747}
6748
6749
6750/**
6751 * @opcode 0xb0
6752 */
6753FNIEMOP_DEF(iemOp_mov_AL_Ib)
6754{
6755 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
6756 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
6757}
6758
6759
6760/**
6761 * @opcode 0xb1
6762 */
6763FNIEMOP_DEF(iemOp_CL_Ib)
6764{
6765 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
6766 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
6767}
6768
6769
6770/**
6771 * @opcode 0xb2
6772 */
6773FNIEMOP_DEF(iemOp_DL_Ib)
6774{
6775 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
6776 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
6777}
6778
6779
6780/**
6781 * @opcode 0xb3
6782 */
6783FNIEMOP_DEF(iemOp_BL_Ib)
6784{
6785 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
6786 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
6787}
6788
6789
6790/**
6791 * @opcode 0xb4
6792 */
6793FNIEMOP_DEF(iemOp_mov_AH_Ib)
6794{
6795 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
6796 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
6797}
6798
6799
6800/**
6801 * @opcode 0xb5
6802 */
6803FNIEMOP_DEF(iemOp_CH_Ib)
6804{
6805 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
6806 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
6807}
6808
6809
6810/**
6811 * @opcode 0xb6
6812 */
6813FNIEMOP_DEF(iemOp_DH_Ib)
6814{
6815 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
6816 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
6817}
6818
6819
6820/**
6821 * @opcode 0xb7
6822 */
6823FNIEMOP_DEF(iemOp_BH_Ib)
6824{
6825 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
6826 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
6827}
6828
6829
6830/**
6831 * Common 'mov regX,immX' helper.
6832 */
6833FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iFixedReg)
6834{
6835 switch (pVCpu->iem.s.enmEffOpSize)
6836 {
6837 case IEMMODE_16BIT:
6838 {
6839 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6841
6842 IEM_MC_BEGIN(0, 1);
6843 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
6844 IEM_MC_STORE_GREG_U16(iFixedReg, u16Value);
6845 IEM_MC_ADVANCE_RIP_AND_FINISH();
6846 IEM_MC_END();
6847 break;
6848 }
6849
6850 case IEMMODE_32BIT:
6851 {
6852 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6853 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6854
6855 IEM_MC_BEGIN(0, 1);
6856 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
6857 IEM_MC_STORE_GREG_U32(iFixedReg, u32Value);
6858 IEM_MC_ADVANCE_RIP_AND_FINISH();
6859 IEM_MC_END();
6860 break;
6861 }
6862 case IEMMODE_64BIT:
6863 {
6864 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
6865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6866
6867 IEM_MC_BEGIN(0, 1);
6868 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
6869 IEM_MC_STORE_GREG_U64(iFixedReg, u64Value);
6870 IEM_MC_ADVANCE_RIP_AND_FINISH();
6871 IEM_MC_END();
6872 break;
6873 }
6874 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6875 }
6876}
6877
6878
6879/**
6880 * @opcode 0xb8
6881 */
6882FNIEMOP_DEF(iemOp_eAX_Iv)
6883{
6884 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
6885 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
6886}
6887
6888
6889/**
6890 * @opcode 0xb9
6891 */
6892FNIEMOP_DEF(iemOp_eCX_Iv)
6893{
6894 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
6895 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
6896}
6897
6898
6899/**
6900 * @opcode 0xba
6901 */
6902FNIEMOP_DEF(iemOp_eDX_Iv)
6903{
6904 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
6905 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
6906}
6907
6908
6909/**
6910 * @opcode 0xbb
6911 */
6912FNIEMOP_DEF(iemOp_eBX_Iv)
6913{
6914 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
6915 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
6916}
6917
6918
6919/**
6920 * @opcode 0xbc
6921 */
6922FNIEMOP_DEF(iemOp_eSP_Iv)
6923{
6924 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
6925 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
6926}
6927
6928
6929/**
6930 * @opcode 0xbd
6931 */
6932FNIEMOP_DEF(iemOp_eBP_Iv)
6933{
6934 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
6935 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
6936}
6937
6938
6939/**
6940 * @opcode 0xbe
6941 */
6942FNIEMOP_DEF(iemOp_eSI_Iv)
6943{
6944 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
6945 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
6946}
6947
6948
6949/**
6950 * @opcode 0xbf
6951 */
6952FNIEMOP_DEF(iemOp_eDI_Iv)
6953{
6954 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
6955 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
6956}
6957
6958
6959/**
6960 * @opcode 0xc0
6961 */
6962FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
6963{
6964 IEMOP_HLP_MIN_186();
6965 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6966 PCIEMOPSHIFTSIZES pImpl;
6967 switch (IEM_GET_MODRM_REG_8(bRm))
6968 {
6969 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
6970 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
6971 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
6972 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
6973 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
6974 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
6975 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
6976 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
6977 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6978 }
6979 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6980
6981 if (IEM_IS_MODRM_REG_MODE(bRm))
6982 {
6983 /* register */
6984 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6986 IEM_MC_BEGIN(3, 0);
6987 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6988 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6989 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6990 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6991 IEM_MC_REF_EFLAGS(pEFlags);
6992 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6993 IEM_MC_ADVANCE_RIP_AND_FINISH();
6994 IEM_MC_END();
6995 }
6996 else
6997 {
6998 /* memory */
6999 IEM_MC_BEGIN(3, 2);
7000 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7001 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7002 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7003 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7004
7005 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7006 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7007 IEM_MC_ASSIGN(cShiftArg, cShift);
7008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7009 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7010 IEM_MC_FETCH_EFLAGS(EFlags);
7011 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7012
7013 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7014 IEM_MC_COMMIT_EFLAGS(EFlags);
7015 IEM_MC_ADVANCE_RIP_AND_FINISH();
7016 IEM_MC_END();
7017 }
7018}
7019
7020
7021/**
7022 * @opcode 0xc1
7023 */
7024FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
7025{
7026 IEMOP_HLP_MIN_186();
7027 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7028 PCIEMOPSHIFTSIZES pImpl;
7029 switch (IEM_GET_MODRM_REG_8(bRm))
7030 {
7031 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
7032 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
7033 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
7034 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
7035 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
7036 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
7037 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
7038 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7039 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
7040 }
7041 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7042
7043 if (IEM_IS_MODRM_REG_MODE(bRm))
7044 {
7045 /* register */
7046 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7048 switch (pVCpu->iem.s.enmEffOpSize)
7049 {
7050 case IEMMODE_16BIT:
7051 IEM_MC_BEGIN(3, 0);
7052 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7053 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7054 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7055 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7056 IEM_MC_REF_EFLAGS(pEFlags);
7057 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7058 IEM_MC_ADVANCE_RIP_AND_FINISH();
7059 IEM_MC_END();
7060 break;
7061
7062 case IEMMODE_32BIT:
7063 IEM_MC_BEGIN(3, 0);
7064 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7065 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7066 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7067 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7068 IEM_MC_REF_EFLAGS(pEFlags);
7069 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7070 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7071 IEM_MC_ADVANCE_RIP_AND_FINISH();
7072 IEM_MC_END();
7073 break;
7074
7075 case IEMMODE_64BIT:
7076 IEM_MC_BEGIN(3, 0);
7077 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7078 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7079 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7080 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7081 IEM_MC_REF_EFLAGS(pEFlags);
7082 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7083 IEM_MC_ADVANCE_RIP_AND_FINISH();
7084 IEM_MC_END();
7085 break;
7086
7087 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7088 }
7089 }
7090 else
7091 {
7092 /* memory */
7093 switch (pVCpu->iem.s.enmEffOpSize)
7094 {
7095 case IEMMODE_16BIT:
7096 IEM_MC_BEGIN(3, 2);
7097 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7098 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7099 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7101
7102 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7103 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7104 IEM_MC_ASSIGN(cShiftArg, cShift);
7105 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7106 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7107 IEM_MC_FETCH_EFLAGS(EFlags);
7108 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7109
7110 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7111 IEM_MC_COMMIT_EFLAGS(EFlags);
7112 IEM_MC_ADVANCE_RIP_AND_FINISH();
7113 IEM_MC_END();
7114 break;
7115
7116 case IEMMODE_32BIT:
7117 IEM_MC_BEGIN(3, 2);
7118 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7119 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7120 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7121 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7122
7123 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7124 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7125 IEM_MC_ASSIGN(cShiftArg, cShift);
7126 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7127 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7128 IEM_MC_FETCH_EFLAGS(EFlags);
7129 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7130
7131 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7132 IEM_MC_COMMIT_EFLAGS(EFlags);
7133 IEM_MC_ADVANCE_RIP_AND_FINISH();
7134 IEM_MC_END();
7135 break;
7136
7137 case IEMMODE_64BIT:
7138 IEM_MC_BEGIN(3, 2);
7139 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7140 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7141 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7142 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7143
7144 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7145 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7146 IEM_MC_ASSIGN(cShiftArg, cShift);
7147 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7148 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7149 IEM_MC_FETCH_EFLAGS(EFlags);
7150 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7151
7152 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7153 IEM_MC_COMMIT_EFLAGS(EFlags);
7154 IEM_MC_ADVANCE_RIP_AND_FINISH();
7155 IEM_MC_END();
7156 break;
7157
7158 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7159 }
7160 }
7161}
7162
7163
7164/**
7165 * @opcode 0xc2
7166 */
7167FNIEMOP_DEF(iemOp_retn_Iw)
7168{
7169 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
7170 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7171 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7173 switch (pVCpu->iem.s.enmEffOpSize)
7174 {
7175 case IEMMODE_16BIT:
7176 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH, iemCImpl_retn_iw_16, u16Imm);
7177 case IEMMODE_32BIT:
7178 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH, iemCImpl_retn_iw_32, u16Imm);
7179 case IEMMODE_64BIT:
7180 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH, iemCImpl_retn_iw_64, u16Imm);
7181 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7182 }
7183}
7184
7185
7186/**
7187 * @opcode 0xc3
7188 */
7189FNIEMOP_DEF(iemOp_retn)
7190{
7191 IEMOP_MNEMONIC(retn, "retn");
7192 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7194 switch (pVCpu->iem.s.enmEffOpSize)
7195 {
7196 case IEMMODE_16BIT:
7197 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH, iemCImpl_retn_16);
7198 case IEMMODE_32BIT:
7199 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH, iemCImpl_retn_32);
7200 case IEMMODE_64BIT:
7201 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH, iemCImpl_retn_64);
7202 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7203 }
7204}
7205
7206
7207/**
7208 * @opcode 0xc4
7209 */
7210FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
7211{
7212 /* The LDS instruction is invalid 64-bit mode. In legacy and
7213 compatability mode it is invalid with MOD=3.
7214 The use as a VEX prefix is made possible by assigning the inverted
7215 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
7216 outside of 64-bit mode. VEX is not available in real or v86 mode. */
7217 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7218 if ( IEM_IS_64BIT_CODE(pVCpu)
7219 || IEM_IS_MODRM_REG_MODE(bRm) )
7220 {
7221 IEMOP_MNEMONIC(vex3_prefix, "vex3");
7222 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
7223 {
7224 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
7225 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
7226 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
7227 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
7228 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
7229 if ((bVex2 & 0x80 /* VEX.W */) && IEM_IS_64BIT_CODE(pVCpu))
7230 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
7231 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
7232 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
7233 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
7234 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
7235 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
7236 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
7237
7238 switch (bRm & 0x1f)
7239 {
7240 case 1: /* 0x0f lead opcode byte. */
7241#ifdef IEM_WITH_VEX
7242 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7243#else
7244 IEMOP_BITCH_ABOUT_STUB();
7245 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7246#endif
7247
7248 case 2: /* 0x0f 0x38 lead opcode bytes. */
7249#ifdef IEM_WITH_VEX
7250 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7251#else
7252 IEMOP_BITCH_ABOUT_STUB();
7253 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7254#endif
7255
7256 case 3: /* 0x0f 0x3a lead opcode bytes. */
7257#ifdef IEM_WITH_VEX
7258 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7259#else
7260 IEMOP_BITCH_ABOUT_STUB();
7261 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7262#endif
7263
7264 default:
7265 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
7266 IEMOP_RAISE_INVALID_OPCODE_RET();
7267 }
7268 }
7269 Log(("VEX3: VEX support disabled!\n"));
7270 IEMOP_RAISE_INVALID_OPCODE_RET();
7271 }
7272
7273 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
7274 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
7275}
7276
7277
7278/**
7279 * @opcode 0xc5
7280 */
7281FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
7282{
7283 /* The LES instruction is invalid 64-bit mode. In legacy and
7284 compatability mode it is invalid with MOD=3.
7285 The use as a VEX prefix is made possible by assigning the inverted
7286 REX.R to the top MOD bit, and the top bit in the inverted register
7287 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
7288 to accessing registers 0..7 in this VEX form. */
7289 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7290 if ( IEM_IS_64BIT_CODE(pVCpu)
7291 || IEM_IS_MODRM_REG_MODE(bRm))
7292 {
7293 IEMOP_MNEMONIC(vex2_prefix, "vex2");
7294 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
7295 {
7296 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
7297 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
7298 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
7299 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
7300 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
7301 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
7302 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
7303 pVCpu->iem.s.idxPrefix = bRm & 0x3;
7304
7305#ifdef IEM_WITH_VEX
7306 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7307#else
7308 IEMOP_BITCH_ABOUT_STUB();
7309 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7310#endif
7311 }
7312
7313 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
7314 Log(("VEX2: VEX support disabled!\n"));
7315 IEMOP_RAISE_INVALID_OPCODE_RET();
7316 }
7317
7318 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
7319 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
7320}
7321
7322
7323/**
7324 * @opcode 0xc6
7325 */
7326FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
7327{
7328 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7329 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
7330 IEMOP_RAISE_INVALID_OPCODE_RET();
7331 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
7332
7333 if (IEM_IS_MODRM_REG_MODE(bRm))
7334 {
7335 /* register access */
7336 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7338 IEM_MC_BEGIN(0, 0);
7339 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Imm);
7340 IEM_MC_ADVANCE_RIP_AND_FINISH();
7341 IEM_MC_END();
7342 }
7343 else
7344 {
7345 /* memory access. */
7346 IEM_MC_BEGIN(0, 1);
7347 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7348 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7349 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7351 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
7352 IEM_MC_ADVANCE_RIP_AND_FINISH();
7353 IEM_MC_END();
7354 }
7355}
7356
7357
7358/**
7359 * @opcode 0xc7
7360 */
7361FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
7362{
7363 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7364 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
7365 IEMOP_RAISE_INVALID_OPCODE_RET();
7366 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
7367
7368 if (IEM_IS_MODRM_REG_MODE(bRm))
7369 {
7370 /* register access */
7371 switch (pVCpu->iem.s.enmEffOpSize)
7372 {
7373 case IEMMODE_16BIT:
7374 IEM_MC_BEGIN(0, 0);
7375 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7377 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Imm);
7378 IEM_MC_ADVANCE_RIP_AND_FINISH();
7379 IEM_MC_END();
7380 break;
7381
7382 case IEMMODE_32BIT:
7383 IEM_MC_BEGIN(0, 0);
7384 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
7385 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7386 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Imm);
7387 IEM_MC_ADVANCE_RIP_AND_FINISH();
7388 IEM_MC_END();
7389 break;
7390
7391 case IEMMODE_64BIT:
7392 IEM_MC_BEGIN(0, 0);
7393 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
7394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7395 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Imm);
7396 IEM_MC_ADVANCE_RIP_AND_FINISH();
7397 IEM_MC_END();
7398 break;
7399
7400 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7401 }
7402 }
7403 else
7404 {
7405 /* memory access. */
7406 switch (pVCpu->iem.s.enmEffOpSize)
7407 {
7408 case IEMMODE_16BIT:
7409 IEM_MC_BEGIN(0, 1);
7410 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7411 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
7412 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7414 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
7415 IEM_MC_ADVANCE_RIP_AND_FINISH();
7416 IEM_MC_END();
7417 break;
7418
7419 case IEMMODE_32BIT:
7420 IEM_MC_BEGIN(0, 1);
7421 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7422 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
7423 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
7424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7425 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
7426 IEM_MC_ADVANCE_RIP_AND_FINISH();
7427 IEM_MC_END();
7428 break;
7429
7430 case IEMMODE_64BIT:
7431 IEM_MC_BEGIN(0, 1);
7432 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7433 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
7434 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
7435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7436 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
7437 IEM_MC_ADVANCE_RIP_AND_FINISH();
7438 IEM_MC_END();
7439 break;
7440
7441 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7442 }
7443 }
7444}
7445
7446
7447
7448
7449/**
7450 * @opcode 0xc8
7451 */
7452FNIEMOP_DEF(iemOp_enter_Iw_Ib)
7453{
7454 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
7455 IEMOP_HLP_MIN_186();
7456 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7457 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
7458 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
7459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7460 IEM_MC_DEFER_TO_CIMPL_3_RET(0, iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
7461}
7462
7463
7464/**
7465 * @opcode 0xc9
7466 */
7467FNIEMOP_DEF(iemOp_leave)
7468{
7469 IEMOP_MNEMONIC(leave, "leave");
7470 IEMOP_HLP_MIN_186();
7471 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7473 IEM_MC_DEFER_TO_CIMPL_1_RET(0, iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
7474}
7475
7476
7477/**
7478 * @opcode 0xca
7479 */
7480FNIEMOP_DEF(iemOp_retf_Iw)
7481{
7482 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
7483 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7485 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH, iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
7486}
7487
7488
7489/**
7490 * @opcode 0xcb
7491 */
7492FNIEMOP_DEF(iemOp_retf)
7493{
7494 IEMOP_MNEMONIC(retf, "retf");
7495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7496 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH, iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
7497}
7498
7499
7500/**
7501 * @opcode 0xcc
7502 */
7503FNIEMOP_DEF(iemOp_int3)
7504{
7505 IEMOP_MNEMONIC(int3, "int3");
7506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7507 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
7508 iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
7509}
7510
7511
7512/**
7513 * @opcode 0xcd
7514 */
7515FNIEMOP_DEF(iemOp_int_Ib)
7516{
7517 IEMOP_MNEMONIC(int_Ib, "int Ib");
7518 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
7519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7520 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
7521 iemCImpl_int, u8Int, IEMINT_INTN);
7522}
7523
7524
7525/**
7526 * @opcode 0xce
7527 */
7528FNIEMOP_DEF(iemOp_into)
7529{
7530 IEMOP_MNEMONIC(into, "into");
7531 IEMOP_HLP_NO_64BIT();
7532 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
7533 iemCImpl_int, X86_XCPT_OF, IEMINT_INTO);
7534}
7535
7536
7537/**
7538 * @opcode 0xcf
7539 */
7540FNIEMOP_DEF(iemOp_iret)
7541{
7542 IEMOP_MNEMONIC(iret, "iret");
7543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7544 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
7545 iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
7546}
7547
7548
7549/**
7550 * @opcode 0xd0
7551 */
7552FNIEMOP_DEF(iemOp_Grp2_Eb_1)
7553{
7554 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7555 PCIEMOPSHIFTSIZES pImpl;
7556 switch (IEM_GET_MODRM_REG_8(bRm))
7557 {
7558 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
7559 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
7560 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
7561 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
7562 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
7563 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
7564 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
7565 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7566 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
7567 }
7568 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7569
7570 if (IEM_IS_MODRM_REG_MODE(bRm))
7571 {
7572 /* register */
7573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7574 IEM_MC_BEGIN(3, 0);
7575 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7576 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
7577 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7578 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7579 IEM_MC_REF_EFLAGS(pEFlags);
7580 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7581 IEM_MC_ADVANCE_RIP_AND_FINISH();
7582 IEM_MC_END();
7583 }
7584 else
7585 {
7586 /* memory */
7587 IEM_MC_BEGIN(3, 2);
7588 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7589 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
7590 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7591 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7592
7593 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7595 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7596 IEM_MC_FETCH_EFLAGS(EFlags);
7597 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7598
7599 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7600 IEM_MC_COMMIT_EFLAGS(EFlags);
7601 IEM_MC_ADVANCE_RIP_AND_FINISH();
7602 IEM_MC_END();
7603 }
7604}
7605
7606
7607
7608/**
7609 * @opcode 0xd1
7610 */
7611FNIEMOP_DEF(iemOp_Grp2_Ev_1)
7612{
7613 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7614 PCIEMOPSHIFTSIZES pImpl;
7615 switch (IEM_GET_MODRM_REG_8(bRm))
7616 {
7617 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
7618 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
7619 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
7620 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
7621 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
7622 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
7623 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
7624 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7625 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
7626 }
7627 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7628
7629 if (IEM_IS_MODRM_REG_MODE(bRm))
7630 {
7631 /* register */
7632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7633 switch (pVCpu->iem.s.enmEffOpSize)
7634 {
7635 case IEMMODE_16BIT:
7636 IEM_MC_BEGIN(3, 0);
7637 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7638 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7639 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7640 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7641 IEM_MC_REF_EFLAGS(pEFlags);
7642 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7643 IEM_MC_ADVANCE_RIP_AND_FINISH();
7644 IEM_MC_END();
7645 break;
7646
7647 case IEMMODE_32BIT:
7648 IEM_MC_BEGIN(3, 0);
7649 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7650 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7651 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7652 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7653 IEM_MC_REF_EFLAGS(pEFlags);
7654 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7655 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7656 IEM_MC_ADVANCE_RIP_AND_FINISH();
7657 IEM_MC_END();
7658 break;
7659
7660 case IEMMODE_64BIT:
7661 IEM_MC_BEGIN(3, 0);
7662 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7663 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7664 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7665 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7666 IEM_MC_REF_EFLAGS(pEFlags);
7667 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7668 IEM_MC_ADVANCE_RIP_AND_FINISH();
7669 IEM_MC_END();
7670 break;
7671
7672 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7673 }
7674 }
7675 else
7676 {
7677 /* memory */
7678 switch (pVCpu->iem.s.enmEffOpSize)
7679 {
7680 case IEMMODE_16BIT:
7681 IEM_MC_BEGIN(3, 2);
7682 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7683 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7684 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7685 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7686
7687 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7689 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7690 IEM_MC_FETCH_EFLAGS(EFlags);
7691 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7692
7693 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7694 IEM_MC_COMMIT_EFLAGS(EFlags);
7695 IEM_MC_ADVANCE_RIP_AND_FINISH();
7696 IEM_MC_END();
7697 break;
7698
7699 case IEMMODE_32BIT:
7700 IEM_MC_BEGIN(3, 2);
7701 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7702 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7703 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7704 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7705
7706 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7708 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7709 IEM_MC_FETCH_EFLAGS(EFlags);
7710 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7711
7712 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7713 IEM_MC_COMMIT_EFLAGS(EFlags);
7714 IEM_MC_ADVANCE_RIP_AND_FINISH();
7715 IEM_MC_END();
7716 break;
7717
7718 case IEMMODE_64BIT:
7719 IEM_MC_BEGIN(3, 2);
7720 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7721 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7722 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7723 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7724
7725 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7727 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7728 IEM_MC_FETCH_EFLAGS(EFlags);
7729 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7730
7731 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7732 IEM_MC_COMMIT_EFLAGS(EFlags);
7733 IEM_MC_ADVANCE_RIP_AND_FINISH();
7734 IEM_MC_END();
7735 break;
7736
7737 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7738 }
7739 }
7740}
7741
7742
7743/**
7744 * @opcode 0xd2
7745 */
7746FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
7747{
7748 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7749 PCIEMOPSHIFTSIZES pImpl;
7750 switch (IEM_GET_MODRM_REG_8(bRm))
7751 {
7752 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
7753 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
7754 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
7755 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
7756 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
7757 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
7758 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
7759 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7760 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
7761 }
7762 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7763
7764 if (IEM_IS_MODRM_REG_MODE(bRm))
7765 {
7766 /* register */
7767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7768 IEM_MC_BEGIN(3, 0);
7769 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7770 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7771 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7772 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7773 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7774 IEM_MC_REF_EFLAGS(pEFlags);
7775 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7776 IEM_MC_ADVANCE_RIP_AND_FINISH();
7777 IEM_MC_END();
7778 }
7779 else
7780 {
7781 /* memory */
7782 IEM_MC_BEGIN(3, 2);
7783 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7784 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7785 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7786 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7787
7788 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7790 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7791 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7792 IEM_MC_FETCH_EFLAGS(EFlags);
7793 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7794
7795 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7796 IEM_MC_COMMIT_EFLAGS(EFlags);
7797 IEM_MC_ADVANCE_RIP_AND_FINISH();
7798 IEM_MC_END();
7799 }
7800}
7801
7802
7803/**
7804 * @opcode 0xd3
7805 */
7806FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
7807{
7808 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7809 PCIEMOPSHIFTSIZES pImpl;
7810 switch (IEM_GET_MODRM_REG_8(bRm))
7811 {
7812 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
7813 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
7814 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
7815 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
7816 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
7817 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
7818 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
7819 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7820 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
7821 }
7822 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7823
7824 if (IEM_IS_MODRM_REG_MODE(bRm))
7825 {
7826 /* register */
7827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7828 switch (pVCpu->iem.s.enmEffOpSize)
7829 {
7830 case IEMMODE_16BIT:
7831 IEM_MC_BEGIN(3, 0);
7832 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7833 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7834 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7835 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7836 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7837 IEM_MC_REF_EFLAGS(pEFlags);
7838 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7839 IEM_MC_ADVANCE_RIP_AND_FINISH();
7840 IEM_MC_END();
7841 break;
7842
7843 case IEMMODE_32BIT:
7844 IEM_MC_BEGIN(3, 0);
7845 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7846 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7847 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7848 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7849 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7850 IEM_MC_REF_EFLAGS(pEFlags);
7851 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7852 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7853 IEM_MC_ADVANCE_RIP_AND_FINISH();
7854 IEM_MC_END();
7855 break;
7856
7857 case IEMMODE_64BIT:
7858 IEM_MC_BEGIN(3, 0);
7859 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7860 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7861 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7862 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7863 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7864 IEM_MC_REF_EFLAGS(pEFlags);
7865 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7866 IEM_MC_ADVANCE_RIP_AND_FINISH();
7867 IEM_MC_END();
7868 break;
7869
7870 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7871 }
7872 }
7873 else
7874 {
7875 /* memory */
7876 switch (pVCpu->iem.s.enmEffOpSize)
7877 {
7878 case IEMMODE_16BIT:
7879 IEM_MC_BEGIN(3, 2);
7880 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7881 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7882 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7883 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7884
7885 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7887 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7888 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7889 IEM_MC_FETCH_EFLAGS(EFlags);
7890 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7891
7892 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7893 IEM_MC_COMMIT_EFLAGS(EFlags);
7894 IEM_MC_ADVANCE_RIP_AND_FINISH();
7895 IEM_MC_END();
7896 break;
7897
7898 case IEMMODE_32BIT:
7899 IEM_MC_BEGIN(3, 2);
7900 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7901 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7902 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7903 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7904
7905 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7906 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7907 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7908 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7909 IEM_MC_FETCH_EFLAGS(EFlags);
7910 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7911
7912 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7913 IEM_MC_COMMIT_EFLAGS(EFlags);
7914 IEM_MC_ADVANCE_RIP_AND_FINISH();
7915 IEM_MC_END();
7916 break;
7917
7918 case IEMMODE_64BIT:
7919 IEM_MC_BEGIN(3, 2);
7920 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7921 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7922 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7923 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7924
7925 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7927 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7928 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7929 IEM_MC_FETCH_EFLAGS(EFlags);
7930 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7931
7932 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7933 IEM_MC_COMMIT_EFLAGS(EFlags);
7934 IEM_MC_ADVANCE_RIP_AND_FINISH();
7935 IEM_MC_END();
7936 break;
7937
7938 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7939 }
7940 }
7941}
7942
7943/**
7944 * @opcode 0xd4
7945 */
7946FNIEMOP_DEF(iemOp_aam_Ib)
7947{
7948 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
7949 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
7950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7951 IEMOP_HLP_NO_64BIT();
7952 if (!bImm)
7953 IEMOP_RAISE_DIVIDE_ERROR_RET();
7954 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aam, bImm);
7955}
7956
7957
7958/**
7959 * @opcode 0xd5
7960 */
7961FNIEMOP_DEF(iemOp_aad_Ib)
7962{
7963 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
7964 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
7965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7966 IEMOP_HLP_NO_64BIT();
7967 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aad, bImm);
7968}
7969
7970
7971/**
7972 * @opcode 0xd6
7973 */
7974FNIEMOP_DEF(iemOp_salc)
7975{
7976 IEMOP_MNEMONIC(salc, "salc");
7977 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7978 IEMOP_HLP_NO_64BIT();
7979
7980 IEM_MC_BEGIN(0, 0);
7981 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7982 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
7983 } IEM_MC_ELSE() {
7984 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
7985 } IEM_MC_ENDIF();
7986 IEM_MC_ADVANCE_RIP_AND_FINISH();
7987 IEM_MC_END();
7988}
7989
7990
7991/**
7992 * @opcode 0xd7
7993 */
7994FNIEMOP_DEF(iemOp_xlat)
7995{
7996 IEMOP_MNEMONIC(xlat, "xlat");
7997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7998 switch (pVCpu->iem.s.enmEffAddrMode)
7999 {
8000 case IEMMODE_16BIT:
8001 IEM_MC_BEGIN(2, 0);
8002 IEM_MC_LOCAL(uint8_t, u8Tmp);
8003 IEM_MC_LOCAL(uint16_t, u16Addr);
8004 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
8005 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
8006 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
8007 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
8008 IEM_MC_ADVANCE_RIP_AND_FINISH();
8009 IEM_MC_END();
8010 break;
8011
8012 case IEMMODE_32BIT:
8013 IEM_MC_BEGIN(2, 0);
8014 IEM_MC_LOCAL(uint8_t, u8Tmp);
8015 IEM_MC_LOCAL(uint32_t, u32Addr);
8016 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
8017 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
8018 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
8019 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
8020 IEM_MC_ADVANCE_RIP_AND_FINISH();
8021 IEM_MC_END();
8022 break;
8023
8024 case IEMMODE_64BIT:
8025 IEM_MC_BEGIN(2, 0);
8026 IEM_MC_LOCAL(uint8_t, u8Tmp);
8027 IEM_MC_LOCAL(uint64_t, u64Addr);
8028 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
8029 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
8030 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
8031 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
8032 IEM_MC_ADVANCE_RIP_AND_FINISH();
8033 IEM_MC_END();
8034 break;
8035
8036 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8037 }
8038}
8039
8040
8041/**
8042 * Common worker for FPU instructions working on ST0 and STn, and storing the
8043 * result in ST0.
8044 *
8045 * @param bRm Mod R/M byte.
8046 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8047 */
8048FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
8049{
8050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8051
8052 IEM_MC_BEGIN(3, 1);
8053 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8054 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8055 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8056 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8057
8058 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8059 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8060 IEM_MC_PREPARE_FPU_USAGE();
8061 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8062 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
8063 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
8064 } IEM_MC_ELSE() {
8065 IEM_MC_FPU_STACK_UNDERFLOW(0);
8066 } IEM_MC_ENDIF();
8067 IEM_MC_ADVANCE_RIP_AND_FINISH();
8068
8069 IEM_MC_END();
8070}
8071
8072
8073/**
8074 * Common worker for FPU instructions working on ST0 and STn, and only affecting
8075 * flags.
8076 *
8077 * @param bRm Mod R/M byte.
8078 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8079 */
8080FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8081{
8082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8083
8084 IEM_MC_BEGIN(3, 1);
8085 IEM_MC_LOCAL(uint16_t, u16Fsw);
8086 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8087 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8088 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8089
8090 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8091 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8092 IEM_MC_PREPARE_FPU_USAGE();
8093 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8094 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8095 IEM_MC_UPDATE_FSW(u16Fsw);
8096 } IEM_MC_ELSE() {
8097 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
8098 } IEM_MC_ENDIF();
8099 IEM_MC_ADVANCE_RIP_AND_FINISH();
8100
8101 IEM_MC_END();
8102}
8103
8104
8105/**
8106 * Common worker for FPU instructions working on ST0 and STn, only affecting
8107 * flags, and popping when done.
8108 *
8109 * @param bRm Mod R/M byte.
8110 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8111 */
8112FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8113{
8114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8115
8116 IEM_MC_BEGIN(3, 1);
8117 IEM_MC_LOCAL(uint16_t, u16Fsw);
8118 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8119 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8120 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8121
8122 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8123 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8124 IEM_MC_PREPARE_FPU_USAGE();
8125 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8126 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8127 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
8128 } IEM_MC_ELSE() {
8129 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
8130 } IEM_MC_ENDIF();
8131 IEM_MC_ADVANCE_RIP_AND_FINISH();
8132
8133 IEM_MC_END();
8134}
8135
8136
8137/** Opcode 0xd8 11/0. */
8138FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
8139{
8140 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
8141 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
8142}
8143
8144
8145/** Opcode 0xd8 11/1. */
8146FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
8147{
8148 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
8149 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
8150}
8151
8152
8153/** Opcode 0xd8 11/2. */
8154FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
8155{
8156 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
8157 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
8158}
8159
8160
8161/** Opcode 0xd8 11/3. */
8162FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
8163{
8164 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
8165 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
8166}
8167
8168
8169/** Opcode 0xd8 11/4. */
8170FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
8171{
8172 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
8173 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
8174}
8175
8176
8177/** Opcode 0xd8 11/5. */
8178FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
8179{
8180 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
8181 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
8182}
8183
8184
8185/** Opcode 0xd8 11/6. */
8186FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
8187{
8188 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
8189 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
8190}
8191
8192
8193/** Opcode 0xd8 11/7. */
8194FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
8195{
8196 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
8197 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
8198}
8199
8200
8201/**
8202 * Common worker for FPU instructions working on ST0 and an m32r, and storing
8203 * the result in ST0.
8204 *
8205 * @param bRm Mod R/M byte.
8206 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8207 */
8208FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
8209{
8210 IEM_MC_BEGIN(3, 3);
8211 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8212 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8213 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8214 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8215 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8216 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8217
8218 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8220
8221 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8222 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8223 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8224
8225 IEM_MC_PREPARE_FPU_USAGE();
8226 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8227 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
8228 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
8229 } IEM_MC_ELSE() {
8230 IEM_MC_FPU_STACK_UNDERFLOW(0);
8231 } IEM_MC_ENDIF();
8232 IEM_MC_ADVANCE_RIP_AND_FINISH();
8233
8234 IEM_MC_END();
8235}
8236
8237
8238/** Opcode 0xd8 !11/0. */
8239FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
8240{
8241 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
8242 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
8243}
8244
8245
8246/** Opcode 0xd8 !11/1. */
8247FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
8248{
8249 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
8250 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
8251}
8252
8253
8254/** Opcode 0xd8 !11/2. */
8255FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
8256{
8257 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
8258
8259 IEM_MC_BEGIN(3, 3);
8260 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8261 IEM_MC_LOCAL(uint16_t, u16Fsw);
8262 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8263 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8264 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8265 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8266
8267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8269
8270 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8271 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8272 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8273
8274 IEM_MC_PREPARE_FPU_USAGE();
8275 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8276 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
8277 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8278 } IEM_MC_ELSE() {
8279 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8280 } IEM_MC_ENDIF();
8281 IEM_MC_ADVANCE_RIP_AND_FINISH();
8282
8283 IEM_MC_END();
8284}
8285
8286
8287/** Opcode 0xd8 !11/3. */
8288FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
8289{
8290 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
8291
8292 IEM_MC_BEGIN(3, 3);
8293 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8294 IEM_MC_LOCAL(uint16_t, u16Fsw);
8295 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8296 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8297 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8298 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8299
8300 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8302
8303 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8304 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8305 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8306
8307 IEM_MC_PREPARE_FPU_USAGE();
8308 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8309 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
8310 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8311 } IEM_MC_ELSE() {
8312 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8313 } IEM_MC_ENDIF();
8314 IEM_MC_ADVANCE_RIP_AND_FINISH();
8315
8316 IEM_MC_END();
8317}
8318
8319
8320/** Opcode 0xd8 !11/4. */
8321FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
8322{
8323 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
8324 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
8325}
8326
8327
8328/** Opcode 0xd8 !11/5. */
8329FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
8330{
8331 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
8332 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
8333}
8334
8335
8336/** Opcode 0xd8 !11/6. */
8337FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
8338{
8339 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
8340 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
8341}
8342
8343
8344/** Opcode 0xd8 !11/7. */
8345FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
8346{
8347 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
8348 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
8349}
8350
8351
8352/**
8353 * @opcode 0xd8
8354 */
8355FNIEMOP_DEF(iemOp_EscF0)
8356{
8357 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8358 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
8359
8360 if (IEM_IS_MODRM_REG_MODE(bRm))
8361 {
8362 switch (IEM_GET_MODRM_REG_8(bRm))
8363 {
8364 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
8365 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
8366 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
8367 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
8368 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
8369 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
8370 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
8371 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
8372 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8373 }
8374 }
8375 else
8376 {
8377 switch (IEM_GET_MODRM_REG_8(bRm))
8378 {
8379 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
8380 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
8381 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
8382 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
8383 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
8384 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
8385 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
8386 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
8387 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8388 }
8389 }
8390}
8391
8392
8393/** Opcode 0xd9 /0 mem32real
8394 * @sa iemOp_fld_m64r */
8395FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
8396{
8397 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
8398
8399 IEM_MC_BEGIN(2, 3);
8400 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8401 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8402 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
8403 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8404 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
8405
8406 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8408
8409 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8410 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8411 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8412
8413 IEM_MC_PREPARE_FPU_USAGE();
8414 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
8415 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r32, pFpuRes, pr32Val);
8416 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8417 } IEM_MC_ELSE() {
8418 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8419 } IEM_MC_ENDIF();
8420 IEM_MC_ADVANCE_RIP_AND_FINISH();
8421
8422 IEM_MC_END();
8423}
8424
8425
8426/** Opcode 0xd9 !11/2 mem32real */
8427FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
8428{
8429 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
8430 IEM_MC_BEGIN(3, 2);
8431 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8432 IEM_MC_LOCAL(uint16_t, u16Fsw);
8433 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8434 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
8435 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8436
8437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8439 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8440 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8441
8442 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8443 IEM_MC_PREPARE_FPU_USAGE();
8444 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8445 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
8446 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8447 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8448 } IEM_MC_ELSE() {
8449 IEM_MC_IF_FCW_IM() {
8450 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
8451 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
8452 } IEM_MC_ENDIF();
8453 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8454 } IEM_MC_ENDIF();
8455 IEM_MC_ADVANCE_RIP_AND_FINISH();
8456
8457 IEM_MC_END();
8458}
8459
8460
8461/** Opcode 0xd9 !11/3 */
8462FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
8463{
8464 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
8465 IEM_MC_BEGIN(3, 2);
8466 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8467 IEM_MC_LOCAL(uint16_t, u16Fsw);
8468 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8469 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
8470 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8471
8472 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8474 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8475 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8476
8477 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8478 IEM_MC_PREPARE_FPU_USAGE();
8479 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8480 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
8481 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8482 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8483 } IEM_MC_ELSE() {
8484 IEM_MC_IF_FCW_IM() {
8485 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
8486 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
8487 } IEM_MC_ENDIF();
8488 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8489 } IEM_MC_ENDIF();
8490 IEM_MC_ADVANCE_RIP_AND_FINISH();
8491
8492 IEM_MC_END();
8493}
8494
8495
8496/** Opcode 0xd9 !11/4 */
8497FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
8498{
8499 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
8500 IEM_MC_BEGIN(3, 0);
8501 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
8502 IEM_MC_ARG(uint8_t, iEffSeg, 1);
8503 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
8504 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8506 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8507 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8508 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8509 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
8510 IEM_MC_END();
8511}
8512
8513
8514/** Opcode 0xd9 !11/5 */
8515FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
8516{
8517 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
8518 IEM_MC_BEGIN(1, 1);
8519 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8520 IEM_MC_ARG(uint16_t, u16Fsw, 0);
8521 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8523 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8524 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8525 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8526 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_FPU, iemCImpl_fldcw, u16Fsw);
8527 IEM_MC_END();
8528}
8529
8530
8531/** Opcode 0xd9 !11/6 */
8532FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
8533{
8534 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
8535 IEM_MC_BEGIN(3, 0);
8536 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
8537 IEM_MC_ARG(uint8_t, iEffSeg, 1);
8538 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
8539 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8541 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8542 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8543 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8544 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
8545 IEM_MC_END();
8546}
8547
8548
8549/** Opcode 0xd9 !11/7 */
8550FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
8551{
8552 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
8553 IEM_MC_BEGIN(2, 0);
8554 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8555 IEM_MC_LOCAL(uint16_t, u16Fcw);
8556 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8558 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8559 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8560 IEM_MC_FETCH_FCW(u16Fcw);
8561 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
8562 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
8563 IEM_MC_END();
8564}
8565
8566
8567/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
8568FNIEMOP_DEF(iemOp_fnop)
8569{
8570 IEMOP_MNEMONIC(fnop, "fnop");
8571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8572
8573 IEM_MC_BEGIN(0, 0);
8574 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8575 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8576 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8577 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
8578 * intel optimizations. Investigate. */
8579 IEM_MC_UPDATE_FPU_OPCODE_IP();
8580 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
8581 IEM_MC_END();
8582}
8583
8584
8585/** Opcode 0xd9 11/0 stN */
8586FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
8587{
8588 IEMOP_MNEMONIC(fld_stN, "fld stN");
8589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8590
8591 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
8592 * indicates that it does. */
8593 IEM_MC_BEGIN(0, 2);
8594 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
8595 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8596 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8597 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8598
8599 IEM_MC_PREPARE_FPU_USAGE();
8600 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, IEM_GET_MODRM_RM_8(bRm)) {
8601 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
8602 IEM_MC_PUSH_FPU_RESULT(FpuRes);
8603 } IEM_MC_ELSE() {
8604 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
8605 } IEM_MC_ENDIF();
8606
8607 IEM_MC_ADVANCE_RIP_AND_FINISH();
8608 IEM_MC_END();
8609}
8610
8611
8612/** Opcode 0xd9 11/3 stN */
8613FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
8614{
8615 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
8616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8617
8618 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
8619 * indicates that it does. */
8620 IEM_MC_BEGIN(2, 3);
8621 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
8622 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
8623 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8624 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ IEM_GET_MODRM_RM_8(bRm), 0);
8625 IEM_MC_ARG_CONST(uint16_t, uFpuOpcode, /*=*/ pVCpu->iem.s.uFpuOpcode, 1);
8626 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8627 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8628
8629 IEM_MC_PREPARE_FPU_USAGE();
8630 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8631 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
8632 IEM_MC_STORE_FPUREG_R80_SRC_REF(IEM_GET_MODRM_RM_8(bRm), pr80Value1);
8633 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
8634 } IEM_MC_ELSE() {
8635 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, iemCImpl_fxch_underflow, iStReg, uFpuOpcode);
8636 } IEM_MC_ENDIF();
8637
8638 IEM_MC_ADVANCE_RIP_AND_FINISH();
8639 IEM_MC_END();
8640}
8641
8642
8643/** Opcode 0xd9 11/4, 0xdd 11/2. */
8644FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
8645{
8646 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
8647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8648
8649 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
8650 uint8_t const iDstReg = IEM_GET_MODRM_RM_8(bRm);
8651 if (!iDstReg)
8652 {
8653 IEM_MC_BEGIN(0, 1);
8654 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
8655 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8656 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8657
8658 IEM_MC_PREPARE_FPU_USAGE();
8659 IEM_MC_IF_FPUREG_NOT_EMPTY(0) {
8660 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
8661 } IEM_MC_ELSE() {
8662 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
8663 } IEM_MC_ENDIF();
8664
8665 IEM_MC_ADVANCE_RIP_AND_FINISH();
8666 IEM_MC_END();
8667 }
8668 else
8669 {
8670 IEM_MC_BEGIN(0, 2);
8671 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
8672 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8673 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8674 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8675
8676 IEM_MC_PREPARE_FPU_USAGE();
8677 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8678 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
8679 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
8680 } IEM_MC_ELSE() {
8681 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
8682 } IEM_MC_ENDIF();
8683
8684 IEM_MC_ADVANCE_RIP_AND_FINISH();
8685 IEM_MC_END();
8686 }
8687}
8688
8689
8690/**
8691 * Common worker for FPU instructions working on ST0 and replaces it with the
8692 * result, i.e. unary operators.
8693 *
8694 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8695 */
8696FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
8697{
8698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8699
8700 IEM_MC_BEGIN(2, 1);
8701 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8702 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8703 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
8704
8705 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8706 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8707 IEM_MC_PREPARE_FPU_USAGE();
8708 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8709 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
8710 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
8711 } IEM_MC_ELSE() {
8712 IEM_MC_FPU_STACK_UNDERFLOW(0);
8713 } IEM_MC_ENDIF();
8714 IEM_MC_ADVANCE_RIP_AND_FINISH();
8715
8716 IEM_MC_END();
8717}
8718
8719
8720/** Opcode 0xd9 0xe0. */
8721FNIEMOP_DEF(iemOp_fchs)
8722{
8723 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
8724 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
8725}
8726
8727
8728/** Opcode 0xd9 0xe1. */
8729FNIEMOP_DEF(iemOp_fabs)
8730{
8731 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
8732 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
8733}
8734
8735
8736/** Opcode 0xd9 0xe4. */
8737FNIEMOP_DEF(iemOp_ftst)
8738{
8739 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
8740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8741
8742 IEM_MC_BEGIN(2, 1);
8743 IEM_MC_LOCAL(uint16_t, u16Fsw);
8744 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8745 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
8746
8747 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8748 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8749 IEM_MC_PREPARE_FPU_USAGE();
8750 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8751 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_ftst_r80, pu16Fsw, pr80Value);
8752 IEM_MC_UPDATE_FSW(u16Fsw);
8753 } IEM_MC_ELSE() {
8754 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
8755 } IEM_MC_ENDIF();
8756 IEM_MC_ADVANCE_RIP_AND_FINISH();
8757
8758 IEM_MC_END();
8759}
8760
8761
8762/** Opcode 0xd9 0xe5. */
8763FNIEMOP_DEF(iemOp_fxam)
8764{
8765 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
8766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8767
8768 IEM_MC_BEGIN(2, 1);
8769 IEM_MC_LOCAL(uint16_t, u16Fsw);
8770 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8771 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
8772
8773 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8774 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8775 IEM_MC_PREPARE_FPU_USAGE();
8776 IEM_MC_REF_FPUREG(pr80Value, 0);
8777 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fxam_r80, pu16Fsw, pr80Value);
8778 IEM_MC_UPDATE_FSW(u16Fsw);
8779 IEM_MC_ADVANCE_RIP_AND_FINISH();
8780
8781 IEM_MC_END();
8782}
8783
8784
8785/**
8786 * Common worker for FPU instructions pushing a constant onto the FPU stack.
8787 *
8788 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8789 */
8790FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
8791{
8792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8793
8794 IEM_MC_BEGIN(1, 1);
8795 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8796 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8797
8798 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8799 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8800 IEM_MC_PREPARE_FPU_USAGE();
8801 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
8802 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
8803 IEM_MC_PUSH_FPU_RESULT(FpuRes);
8804 } IEM_MC_ELSE() {
8805 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
8806 } IEM_MC_ENDIF();
8807 IEM_MC_ADVANCE_RIP_AND_FINISH();
8808
8809 IEM_MC_END();
8810}
8811
8812
8813/** Opcode 0xd9 0xe8. */
8814FNIEMOP_DEF(iemOp_fld1)
8815{
8816 IEMOP_MNEMONIC(fld1, "fld1");
8817 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
8818}
8819
8820
8821/** Opcode 0xd9 0xe9. */
8822FNIEMOP_DEF(iemOp_fldl2t)
8823{
8824 IEMOP_MNEMONIC(fldl2t, "fldl2t");
8825 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
8826}
8827
8828
8829/** Opcode 0xd9 0xea. */
8830FNIEMOP_DEF(iemOp_fldl2e)
8831{
8832 IEMOP_MNEMONIC(fldl2e, "fldl2e");
8833 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
8834}
8835
8836/** Opcode 0xd9 0xeb. */
8837FNIEMOP_DEF(iemOp_fldpi)
8838{
8839 IEMOP_MNEMONIC(fldpi, "fldpi");
8840 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
8841}
8842
8843
8844/** Opcode 0xd9 0xec. */
8845FNIEMOP_DEF(iemOp_fldlg2)
8846{
8847 IEMOP_MNEMONIC(fldlg2, "fldlg2");
8848 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
8849}
8850
8851/** Opcode 0xd9 0xed. */
8852FNIEMOP_DEF(iemOp_fldln2)
8853{
8854 IEMOP_MNEMONIC(fldln2, "fldln2");
8855 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
8856}
8857
8858
8859/** Opcode 0xd9 0xee. */
8860FNIEMOP_DEF(iemOp_fldz)
8861{
8862 IEMOP_MNEMONIC(fldz, "fldz");
8863 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
8864}
8865
8866
8867/** Opcode 0xd9 0xf0.
8868 *
8869 * The f2xm1 instruction works on values +1.0 thru -1.0, currently (the range on
8870 * 287 & 8087 was +0.5 thru 0.0 according to docs). In addition is does appear
8871 * to produce proper results for +Inf and -Inf.
8872 *
8873 * This is probably usful in the implementation pow() and similar.
8874 */
8875FNIEMOP_DEF(iemOp_f2xm1)
8876{
8877 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
8878 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
8879}
8880
8881
8882/**
8883 * Common worker for FPU instructions working on STn and ST0, storing the result
8884 * in STn, and popping the stack unless IE, DE or ZE was raised.
8885 *
8886 * @param bRm Mod R/M byte.
8887 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8888 */
8889FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
8890{
8891 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8892
8893 IEM_MC_BEGIN(3, 1);
8894 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8895 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8896 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8897 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8898
8899 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8900 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8901
8902 IEM_MC_PREPARE_FPU_USAGE();
8903 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
8904 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
8905 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, IEM_GET_MODRM_RM_8(bRm));
8906 } IEM_MC_ELSE() {
8907 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(IEM_GET_MODRM_RM_8(bRm));
8908 } IEM_MC_ENDIF();
8909 IEM_MC_ADVANCE_RIP_AND_FINISH();
8910
8911 IEM_MC_END();
8912}
8913
8914
8915/** Opcode 0xd9 0xf1. */
8916FNIEMOP_DEF(iemOp_fyl2x)
8917{
8918 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
8919 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
8920}
8921
8922
8923/**
8924 * Common worker for FPU instructions working on ST0 and having two outputs, one
8925 * replacing ST0 and one pushed onto the stack.
8926 *
8927 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8928 */
8929FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
8930{
8931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8932
8933 IEM_MC_BEGIN(2, 1);
8934 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
8935 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
8936 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
8937
8938 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8939 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8940 IEM_MC_PREPARE_FPU_USAGE();
8941 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8942 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
8943 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
8944 } IEM_MC_ELSE() {
8945 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
8946 } IEM_MC_ENDIF();
8947 IEM_MC_ADVANCE_RIP_AND_FINISH();
8948
8949 IEM_MC_END();
8950}
8951
8952
8953/** Opcode 0xd9 0xf2. */
8954FNIEMOP_DEF(iemOp_fptan)
8955{
8956 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
8957 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
8958}
8959
8960
8961/** Opcode 0xd9 0xf3. */
8962FNIEMOP_DEF(iemOp_fpatan)
8963{
8964 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
8965 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
8966}
8967
8968
8969/** Opcode 0xd9 0xf4. */
8970FNIEMOP_DEF(iemOp_fxtract)
8971{
8972 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
8973 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
8974}
8975
8976
8977/** Opcode 0xd9 0xf5. */
8978FNIEMOP_DEF(iemOp_fprem1)
8979{
8980 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
8981 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
8982}
8983
8984
8985/** Opcode 0xd9 0xf6. */
8986FNIEMOP_DEF(iemOp_fdecstp)
8987{
8988 IEMOP_MNEMONIC(fdecstp, "fdecstp");
8989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8990 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
8991 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
8992 * FINCSTP and FDECSTP. */
8993
8994 IEM_MC_BEGIN(0,0);
8995
8996 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8997 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8998
8999 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9000 IEM_MC_FPU_STACK_DEC_TOP();
9001 IEM_MC_UPDATE_FSW_CONST(0);
9002
9003 IEM_MC_ADVANCE_RIP_AND_FINISH();
9004 IEM_MC_END();
9005}
9006
9007
9008/** Opcode 0xd9 0xf7. */
9009FNIEMOP_DEF(iemOp_fincstp)
9010{
9011 IEMOP_MNEMONIC(fincstp, "fincstp");
9012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9013 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
9014 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
9015 * FINCSTP and FDECSTP. */
9016
9017 IEM_MC_BEGIN(0,0);
9018
9019 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9020 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9021
9022 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9023 IEM_MC_FPU_STACK_INC_TOP();
9024 IEM_MC_UPDATE_FSW_CONST(0);
9025
9026 IEM_MC_ADVANCE_RIP_AND_FINISH();
9027 IEM_MC_END();
9028}
9029
9030
9031/** Opcode 0xd9 0xf8. */
9032FNIEMOP_DEF(iemOp_fprem)
9033{
9034 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
9035 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
9036}
9037
9038
9039/** Opcode 0xd9 0xf9. */
9040FNIEMOP_DEF(iemOp_fyl2xp1)
9041{
9042 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
9043 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
9044}
9045
9046
9047/** Opcode 0xd9 0xfa. */
9048FNIEMOP_DEF(iemOp_fsqrt)
9049{
9050 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
9051 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
9052}
9053
9054
9055/** Opcode 0xd9 0xfb. */
9056FNIEMOP_DEF(iemOp_fsincos)
9057{
9058 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
9059 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
9060}
9061
9062
9063/** Opcode 0xd9 0xfc. */
9064FNIEMOP_DEF(iemOp_frndint)
9065{
9066 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
9067 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
9068}
9069
9070
9071/** Opcode 0xd9 0xfd. */
9072FNIEMOP_DEF(iemOp_fscale)
9073{
9074 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
9075 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
9076}
9077
9078
9079/** Opcode 0xd9 0xfe. */
9080FNIEMOP_DEF(iemOp_fsin)
9081{
9082 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
9083 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
9084}
9085
9086
9087/** Opcode 0xd9 0xff. */
9088FNIEMOP_DEF(iemOp_fcos)
9089{
9090 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
9091 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
9092}
9093
9094
9095/** Used by iemOp_EscF1. */
9096IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
9097{
9098 /* 0xe0 */ iemOp_fchs,
9099 /* 0xe1 */ iemOp_fabs,
9100 /* 0xe2 */ iemOp_Invalid,
9101 /* 0xe3 */ iemOp_Invalid,
9102 /* 0xe4 */ iemOp_ftst,
9103 /* 0xe5 */ iemOp_fxam,
9104 /* 0xe6 */ iemOp_Invalid,
9105 /* 0xe7 */ iemOp_Invalid,
9106 /* 0xe8 */ iemOp_fld1,
9107 /* 0xe9 */ iemOp_fldl2t,
9108 /* 0xea */ iemOp_fldl2e,
9109 /* 0xeb */ iemOp_fldpi,
9110 /* 0xec */ iemOp_fldlg2,
9111 /* 0xed */ iemOp_fldln2,
9112 /* 0xee */ iemOp_fldz,
9113 /* 0xef */ iemOp_Invalid,
9114 /* 0xf0 */ iemOp_f2xm1,
9115 /* 0xf1 */ iemOp_fyl2x,
9116 /* 0xf2 */ iemOp_fptan,
9117 /* 0xf3 */ iemOp_fpatan,
9118 /* 0xf4 */ iemOp_fxtract,
9119 /* 0xf5 */ iemOp_fprem1,
9120 /* 0xf6 */ iemOp_fdecstp,
9121 /* 0xf7 */ iemOp_fincstp,
9122 /* 0xf8 */ iemOp_fprem,
9123 /* 0xf9 */ iemOp_fyl2xp1,
9124 /* 0xfa */ iemOp_fsqrt,
9125 /* 0xfb */ iemOp_fsincos,
9126 /* 0xfc */ iemOp_frndint,
9127 /* 0xfd */ iemOp_fscale,
9128 /* 0xfe */ iemOp_fsin,
9129 /* 0xff */ iemOp_fcos
9130};
9131
9132
9133/**
9134 * @opcode 0xd9
9135 */
9136FNIEMOP_DEF(iemOp_EscF1)
9137{
9138 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9139 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
9140
9141 if (IEM_IS_MODRM_REG_MODE(bRm))
9142 {
9143 switch (IEM_GET_MODRM_REG_8(bRm))
9144 {
9145 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
9146 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
9147 case 2:
9148 if (bRm == 0xd0)
9149 return FNIEMOP_CALL(iemOp_fnop);
9150 IEMOP_RAISE_INVALID_OPCODE_RET();
9151 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
9152 case 4:
9153 case 5:
9154 case 6:
9155 case 7:
9156 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
9157 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
9158 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9159 }
9160 }
9161 else
9162 {
9163 switch (IEM_GET_MODRM_REG_8(bRm))
9164 {
9165 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
9166 case 1: IEMOP_RAISE_INVALID_OPCODE_RET();
9167 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
9168 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
9169 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
9170 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
9171 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
9172 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
9173 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9174 }
9175 }
9176}
9177
9178
9179/** Opcode 0xda 11/0. */
9180FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
9181{
9182 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
9183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9184
9185 IEM_MC_BEGIN(0, 1);
9186 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9187
9188 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9189 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9190
9191 IEM_MC_PREPARE_FPU_USAGE();
9192 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9193 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9194 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9195 } IEM_MC_ENDIF();
9196 IEM_MC_UPDATE_FPU_OPCODE_IP();
9197 } IEM_MC_ELSE() {
9198 IEM_MC_FPU_STACK_UNDERFLOW(0);
9199 } IEM_MC_ENDIF();
9200 IEM_MC_ADVANCE_RIP_AND_FINISH();
9201
9202 IEM_MC_END();
9203}
9204
9205
9206/** Opcode 0xda 11/1. */
9207FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
9208{
9209 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
9210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9211
9212 IEM_MC_BEGIN(0, 1);
9213 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9214
9215 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9216 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9217
9218 IEM_MC_PREPARE_FPU_USAGE();
9219 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9220 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9221 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9222 } IEM_MC_ENDIF();
9223 IEM_MC_UPDATE_FPU_OPCODE_IP();
9224 } IEM_MC_ELSE() {
9225 IEM_MC_FPU_STACK_UNDERFLOW(0);
9226 } IEM_MC_ENDIF();
9227 IEM_MC_ADVANCE_RIP_AND_FINISH();
9228
9229 IEM_MC_END();
9230}
9231
9232
9233/** Opcode 0xda 11/2. */
9234FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
9235{
9236 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
9237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9238
9239 IEM_MC_BEGIN(0, 1);
9240 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9241
9242 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9243 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9244
9245 IEM_MC_PREPARE_FPU_USAGE();
9246 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9247 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9248 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9249 } IEM_MC_ENDIF();
9250 IEM_MC_UPDATE_FPU_OPCODE_IP();
9251 } IEM_MC_ELSE() {
9252 IEM_MC_FPU_STACK_UNDERFLOW(0);
9253 } IEM_MC_ENDIF();
9254 IEM_MC_ADVANCE_RIP_AND_FINISH();
9255
9256 IEM_MC_END();
9257}
9258
9259
9260/** Opcode 0xda 11/3. */
9261FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
9262{
9263 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
9264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9265
9266 IEM_MC_BEGIN(0, 1);
9267 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9268
9269 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9270 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9271
9272 IEM_MC_PREPARE_FPU_USAGE();
9273 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9274 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9275 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9276 } IEM_MC_ENDIF();
9277 IEM_MC_UPDATE_FPU_OPCODE_IP();
9278 } IEM_MC_ELSE() {
9279 IEM_MC_FPU_STACK_UNDERFLOW(0);
9280 } IEM_MC_ENDIF();
9281 IEM_MC_ADVANCE_RIP_AND_FINISH();
9282
9283 IEM_MC_END();
9284}
9285
9286
9287/**
9288 * Common worker for FPU instructions working on ST0 and ST1, only affecting
9289 * flags, and popping twice when done.
9290 *
9291 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9292 */
9293FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
9294{
9295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9296
9297 IEM_MC_BEGIN(3, 1);
9298 IEM_MC_LOCAL(uint16_t, u16Fsw);
9299 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9300 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9301 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9302
9303 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9304 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9305
9306 IEM_MC_PREPARE_FPU_USAGE();
9307 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1) {
9308 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
9309 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
9310 } IEM_MC_ELSE() {
9311 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
9312 } IEM_MC_ENDIF();
9313 IEM_MC_ADVANCE_RIP_AND_FINISH();
9314
9315 IEM_MC_END();
9316}
9317
9318
9319/** Opcode 0xda 0xe9. */
9320FNIEMOP_DEF(iemOp_fucompp)
9321{
9322 IEMOP_MNEMONIC(fucompp, "fucompp");
9323 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fucom_r80_by_r80);
9324}
9325
9326
9327/**
9328 * Common worker for FPU instructions working on ST0 and an m32i, and storing
9329 * the result in ST0.
9330 *
9331 * @param bRm Mod R/M byte.
9332 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9333 */
9334FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
9335{
9336 IEM_MC_BEGIN(3, 3);
9337 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9338 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9339 IEM_MC_LOCAL(int32_t, i32Val2);
9340 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9341 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9342 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
9343
9344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9346
9347 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9348 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9349 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9350
9351 IEM_MC_PREPARE_FPU_USAGE();
9352 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9353 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
9354 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
9355 } IEM_MC_ELSE() {
9356 IEM_MC_FPU_STACK_UNDERFLOW(0);
9357 } IEM_MC_ENDIF();
9358 IEM_MC_ADVANCE_RIP_AND_FINISH();
9359
9360 IEM_MC_END();
9361}
9362
9363
9364/** Opcode 0xda !11/0. */
9365FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
9366{
9367 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
9368 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
9369}
9370
9371
9372/** Opcode 0xda !11/1. */
9373FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
9374{
9375 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
9376 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
9377}
9378
9379
9380/** Opcode 0xda !11/2. */
9381FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
9382{
9383 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
9384
9385 IEM_MC_BEGIN(3, 3);
9386 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9387 IEM_MC_LOCAL(uint16_t, u16Fsw);
9388 IEM_MC_LOCAL(int32_t, i32Val2);
9389 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9390 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9391 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
9392
9393 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9395
9396 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9397 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9398 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9399
9400 IEM_MC_PREPARE_FPU_USAGE();
9401 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9402 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
9403 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9404 } IEM_MC_ELSE() {
9405 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9406 } IEM_MC_ENDIF();
9407 IEM_MC_ADVANCE_RIP_AND_FINISH();
9408
9409 IEM_MC_END();
9410}
9411
9412
9413/** Opcode 0xda !11/3. */
9414FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
9415{
9416 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
9417
9418 IEM_MC_BEGIN(3, 3);
9419 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9420 IEM_MC_LOCAL(uint16_t, u16Fsw);
9421 IEM_MC_LOCAL(int32_t, i32Val2);
9422 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9423 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9424 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
9425
9426 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9428
9429 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9430 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9431 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9432
9433 IEM_MC_PREPARE_FPU_USAGE();
9434 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9435 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
9436 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9437 } IEM_MC_ELSE() {
9438 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9439 } IEM_MC_ENDIF();
9440 IEM_MC_ADVANCE_RIP_AND_FINISH();
9441
9442 IEM_MC_END();
9443}
9444
9445
9446/** Opcode 0xda !11/4. */
9447FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
9448{
9449 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
9450 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
9451}
9452
9453
9454/** Opcode 0xda !11/5. */
9455FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
9456{
9457 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
9458 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
9459}
9460
9461
9462/** Opcode 0xda !11/6. */
9463FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
9464{
9465 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
9466 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
9467}
9468
9469
9470/** Opcode 0xda !11/7. */
9471FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
9472{
9473 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
9474 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
9475}
9476
9477
9478/**
9479 * @opcode 0xda
9480 */
9481FNIEMOP_DEF(iemOp_EscF2)
9482{
9483 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9484 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
9485 if (IEM_IS_MODRM_REG_MODE(bRm))
9486 {
9487 switch (IEM_GET_MODRM_REG_8(bRm))
9488 {
9489 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
9490 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
9491 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
9492 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
9493 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
9494 case 5:
9495 if (bRm == 0xe9)
9496 return FNIEMOP_CALL(iemOp_fucompp);
9497 IEMOP_RAISE_INVALID_OPCODE_RET();
9498 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9499 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
9500 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9501 }
9502 }
9503 else
9504 {
9505 switch (IEM_GET_MODRM_REG_8(bRm))
9506 {
9507 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
9508 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
9509 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
9510 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
9511 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
9512 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
9513 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
9514 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
9515 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9516 }
9517 }
9518}
9519
9520
9521/** Opcode 0xdb !11/0. */
9522FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
9523{
9524 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
9525
9526 IEM_MC_BEGIN(2, 3);
9527 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9528 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9529 IEM_MC_LOCAL(int32_t, i32Val);
9530 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9531 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
9532
9533 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9534 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9535
9536 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9537 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9538 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9539
9540 IEM_MC_PREPARE_FPU_USAGE();
9541 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
9542 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i32, pFpuRes, pi32Val);
9543 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9544 } IEM_MC_ELSE() {
9545 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9546 } IEM_MC_ENDIF();
9547 IEM_MC_ADVANCE_RIP_AND_FINISH();
9548
9549 IEM_MC_END();
9550}
9551
9552
9553/** Opcode 0xdb !11/1. */
9554FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
9555{
9556 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
9557 IEM_MC_BEGIN(3, 2);
9558 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9559 IEM_MC_LOCAL(uint16_t, u16Fsw);
9560 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9561 IEM_MC_ARG(int32_t *, pi32Dst, 1);
9562 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9563
9564 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9566 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9567 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9568
9569 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9570 IEM_MC_PREPARE_FPU_USAGE();
9571 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9572 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
9573 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9574 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9575 } IEM_MC_ELSE() {
9576 IEM_MC_IF_FCW_IM() {
9577 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
9578 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
9579 } IEM_MC_ENDIF();
9580 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9581 } IEM_MC_ENDIF();
9582 IEM_MC_ADVANCE_RIP_AND_FINISH();
9583
9584 IEM_MC_END();
9585}
9586
9587
9588/** Opcode 0xdb !11/2. */
9589FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
9590{
9591 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
9592 IEM_MC_BEGIN(3, 2);
9593 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9594 IEM_MC_LOCAL(uint16_t, u16Fsw);
9595 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9596 IEM_MC_ARG(int32_t *, pi32Dst, 1);
9597 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9598
9599 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9601 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9602 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9603
9604 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9605 IEM_MC_PREPARE_FPU_USAGE();
9606 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9607 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
9608 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9609 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9610 } IEM_MC_ELSE() {
9611 IEM_MC_IF_FCW_IM() {
9612 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
9613 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
9614 } IEM_MC_ENDIF();
9615 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9616 } IEM_MC_ENDIF();
9617 IEM_MC_ADVANCE_RIP_AND_FINISH();
9618
9619 IEM_MC_END();
9620}
9621
9622
9623/** Opcode 0xdb !11/3. */
9624FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
9625{
9626 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
9627 IEM_MC_BEGIN(3, 2);
9628 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9629 IEM_MC_LOCAL(uint16_t, u16Fsw);
9630 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9631 IEM_MC_ARG(int32_t *, pi32Dst, 1);
9632 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9633
9634 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9636 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9637 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9638
9639 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9640 IEM_MC_PREPARE_FPU_USAGE();
9641 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9642 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
9643 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9644 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9645 } IEM_MC_ELSE() {
9646 IEM_MC_IF_FCW_IM() {
9647 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
9648 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
9649 } IEM_MC_ENDIF();
9650 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9651 } IEM_MC_ENDIF();
9652 IEM_MC_ADVANCE_RIP_AND_FINISH();
9653
9654 IEM_MC_END();
9655}
9656
9657
9658/** Opcode 0xdb !11/5. */
9659FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
9660{
9661 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
9662
9663 IEM_MC_BEGIN(2, 3);
9664 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9665 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9666 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
9667 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9668 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
9669
9670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9672
9673 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9674 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9675 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9676
9677 IEM_MC_PREPARE_FPU_USAGE();
9678 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
9679 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
9680 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9681 } IEM_MC_ELSE() {
9682 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9683 } IEM_MC_ENDIF();
9684 IEM_MC_ADVANCE_RIP_AND_FINISH();
9685
9686 IEM_MC_END();
9687}
9688
9689
9690/** Opcode 0xdb !11/7. */
9691FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
9692{
9693 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
9694 IEM_MC_BEGIN(3, 2);
9695 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9696 IEM_MC_LOCAL(uint16_t, u16Fsw);
9697 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9698 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
9699 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9700
9701 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9703 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9704 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9705
9706 IEM_MC_MEM_MAP_EX(pr80Dst, IEM_ACCESS_DATA_W, sizeof(*pr80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
9707 IEM_MC_PREPARE_FPU_USAGE();
9708 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9709 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
9710 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
9711 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9712 } IEM_MC_ELSE() {
9713 IEM_MC_IF_FCW_IM() {
9714 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
9715 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
9716 } IEM_MC_ENDIF();
9717 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9718 } IEM_MC_ENDIF();
9719 IEM_MC_ADVANCE_RIP_AND_FINISH();
9720
9721 IEM_MC_END();
9722}
9723
9724
9725/** Opcode 0xdb 11/0. */
9726FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
9727{
9728 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
9729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9730
9731 IEM_MC_BEGIN(0, 1);
9732 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9733
9734 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9735 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9736
9737 IEM_MC_PREPARE_FPU_USAGE();
9738 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9739 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF) {
9740 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9741 } IEM_MC_ENDIF();
9742 IEM_MC_UPDATE_FPU_OPCODE_IP();
9743 } IEM_MC_ELSE() {
9744 IEM_MC_FPU_STACK_UNDERFLOW(0);
9745 } IEM_MC_ENDIF();
9746 IEM_MC_ADVANCE_RIP_AND_FINISH();
9747
9748 IEM_MC_END();
9749}
9750
9751
9752/** Opcode 0xdb 11/1. */
9753FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
9754{
9755 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
9756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9757
9758 IEM_MC_BEGIN(0, 1);
9759 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9760
9761 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9762 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9763
9764 IEM_MC_PREPARE_FPU_USAGE();
9765 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9766 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
9767 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9768 } IEM_MC_ENDIF();
9769 IEM_MC_UPDATE_FPU_OPCODE_IP();
9770 } IEM_MC_ELSE() {
9771 IEM_MC_FPU_STACK_UNDERFLOW(0);
9772 } IEM_MC_ENDIF();
9773 IEM_MC_ADVANCE_RIP_AND_FINISH();
9774
9775 IEM_MC_END();
9776}
9777
9778
9779/** Opcode 0xdb 11/2. */
9780FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
9781{
9782 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
9783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9784
9785 IEM_MC_BEGIN(0, 1);
9786 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9787
9788 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9789 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9790
9791 IEM_MC_PREPARE_FPU_USAGE();
9792 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9793 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9794 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9795 } IEM_MC_ENDIF();
9796 IEM_MC_UPDATE_FPU_OPCODE_IP();
9797 } IEM_MC_ELSE() {
9798 IEM_MC_FPU_STACK_UNDERFLOW(0);
9799 } IEM_MC_ENDIF();
9800 IEM_MC_ADVANCE_RIP_AND_FINISH();
9801
9802 IEM_MC_END();
9803}
9804
9805
9806/** Opcode 0xdb 11/3. */
9807FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
9808{
9809 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
9810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9811
9812 IEM_MC_BEGIN(0, 1);
9813 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9814
9815 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9816 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9817
9818 IEM_MC_PREPARE_FPU_USAGE();
9819 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9820 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF) {
9821 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9822 } IEM_MC_ENDIF();
9823 IEM_MC_UPDATE_FPU_OPCODE_IP();
9824 } IEM_MC_ELSE() {
9825 IEM_MC_FPU_STACK_UNDERFLOW(0);
9826 } IEM_MC_ENDIF();
9827 IEM_MC_ADVANCE_RIP_AND_FINISH();
9828
9829 IEM_MC_END();
9830}
9831
9832
9833/** Opcode 0xdb 0xe0. */
9834FNIEMOP_DEF(iemOp_fneni)
9835{
9836 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
9837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9838 IEM_MC_BEGIN(0,0);
9839 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9840 IEM_MC_ADVANCE_RIP_AND_FINISH();
9841 IEM_MC_END();
9842}
9843
9844
9845/** Opcode 0xdb 0xe1. */
9846FNIEMOP_DEF(iemOp_fndisi)
9847{
9848 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
9849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9850 IEM_MC_BEGIN(0,0);
9851 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9852 IEM_MC_ADVANCE_RIP_AND_FINISH();
9853 IEM_MC_END();
9854}
9855
9856
9857/** Opcode 0xdb 0xe2. */
9858FNIEMOP_DEF(iemOp_fnclex)
9859{
9860 IEMOP_MNEMONIC(fnclex, "fnclex");
9861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9862
9863 IEM_MC_BEGIN(0,0);
9864 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9865 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9866 IEM_MC_CLEAR_FSW_EX();
9867 IEM_MC_ADVANCE_RIP_AND_FINISH();
9868 IEM_MC_END();
9869}
9870
9871
9872/** Opcode 0xdb 0xe3. */
9873FNIEMOP_DEF(iemOp_fninit)
9874{
9875 IEMOP_MNEMONIC(fninit, "fninit");
9876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9877 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_FPU, iemCImpl_finit, false /*fCheckXcpts*/);
9878}
9879
9880
9881/** Opcode 0xdb 0xe4. */
9882FNIEMOP_DEF(iemOp_fnsetpm)
9883{
9884 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
9885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9886 IEM_MC_BEGIN(0,0);
9887 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9888 IEM_MC_ADVANCE_RIP_AND_FINISH();
9889 IEM_MC_END();
9890}
9891
9892
9893/** Opcode 0xdb 0xe5. */
9894FNIEMOP_DEF(iemOp_frstpm)
9895{
9896 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
9897#if 0 /* #UDs on newer CPUs */
9898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9899 IEM_MC_BEGIN(0,0);
9900 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9901 IEM_MC_ADVANCE_RIP_AND_FINISH();
9902 IEM_MC_END();
9903 return VINF_SUCCESS;
9904#else
9905 IEMOP_RAISE_INVALID_OPCODE_RET();
9906#endif
9907}
9908
9909
9910/** Opcode 0xdb 11/5. */
9911FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
9912{
9913 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
9914 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
9915 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fucomi_r80_by_r80,
9916 0 /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
9917}
9918
9919
9920/** Opcode 0xdb 11/6. */
9921FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
9922{
9923 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
9924 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
9925 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80,
9926 false /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
9927}
9928
9929
9930/**
9931 * @opcode 0xdb
9932 */
9933FNIEMOP_DEF(iemOp_EscF3)
9934{
9935 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9936 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
9937 if (IEM_IS_MODRM_REG_MODE(bRm))
9938 {
9939 switch (IEM_GET_MODRM_REG_8(bRm))
9940 {
9941 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
9942 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
9943 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
9944 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
9945 case 4:
9946 switch (bRm)
9947 {
9948 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
9949 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
9950 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
9951 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
9952 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
9953 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
9954 case 0xe6: IEMOP_RAISE_INVALID_OPCODE_RET();
9955 case 0xe7: IEMOP_RAISE_INVALID_OPCODE_RET();
9956 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9957 }
9958 break;
9959 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
9960 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
9961 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
9962 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9963 }
9964 }
9965 else
9966 {
9967 switch (IEM_GET_MODRM_REG_8(bRm))
9968 {
9969 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
9970 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
9971 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
9972 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
9973 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
9974 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
9975 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9976 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
9977 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9978 }
9979 }
9980}
9981
9982
9983/**
9984 * Common worker for FPU instructions working on STn and ST0, and storing the
9985 * result in STn unless IE, DE or ZE was raised.
9986 *
9987 * @param bRm Mod R/M byte.
9988 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9989 */
9990FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
9991{
9992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9993
9994 IEM_MC_BEGIN(3, 1);
9995 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9996 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9997 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9998 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9999
10000 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10001 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10002
10003 IEM_MC_PREPARE_FPU_USAGE();
10004 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
10005 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
10006 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm));
10007 } IEM_MC_ELSE() {
10008 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm));
10009 } IEM_MC_ENDIF();
10010 IEM_MC_ADVANCE_RIP_AND_FINISH();
10011
10012 IEM_MC_END();
10013}
10014
10015
10016/** Opcode 0xdc 11/0. */
10017FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
10018{
10019 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
10020 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
10021}
10022
10023
10024/** Opcode 0xdc 11/1. */
10025FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
10026{
10027 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
10028 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
10029}
10030
10031
10032/** Opcode 0xdc 11/4. */
10033FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
10034{
10035 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
10036 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
10037}
10038
10039
10040/** Opcode 0xdc 11/5. */
10041FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
10042{
10043 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
10044 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
10045}
10046
10047
10048/** Opcode 0xdc 11/6. */
10049FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
10050{
10051 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
10052 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
10053}
10054
10055
10056/** Opcode 0xdc 11/7. */
10057FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
10058{
10059 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
10060 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
10061}
10062
10063
10064/**
10065 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
10066 * memory operand, and storing the result in ST0.
10067 *
10068 * @param bRm Mod R/M byte.
10069 * @param pfnImpl Pointer to the instruction implementation (assembly).
10070 */
10071FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
10072{
10073 IEM_MC_BEGIN(3, 3);
10074 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10075 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10076 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
10077 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10078 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
10079 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
10080
10081 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10083 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10084 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10085
10086 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10087 IEM_MC_PREPARE_FPU_USAGE();
10088 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0) {
10089 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
10090 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10091 } IEM_MC_ELSE() {
10092 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10093 } IEM_MC_ENDIF();
10094 IEM_MC_ADVANCE_RIP_AND_FINISH();
10095
10096 IEM_MC_END();
10097}
10098
10099
10100/** Opcode 0xdc !11/0. */
10101FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
10102{
10103 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
10104 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
10105}
10106
10107
10108/** Opcode 0xdc !11/1. */
10109FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
10110{
10111 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
10112 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
10113}
10114
10115
10116/** Opcode 0xdc !11/2. */
10117FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
10118{
10119 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
10120
10121 IEM_MC_BEGIN(3, 3);
10122 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10123 IEM_MC_LOCAL(uint16_t, u16Fsw);
10124 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
10125 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10126 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10127 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
10128
10129 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10131
10132 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10133 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10134 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10135
10136 IEM_MC_PREPARE_FPU_USAGE();
10137 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10138 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
10139 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10140 } IEM_MC_ELSE() {
10141 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10142 } IEM_MC_ENDIF();
10143 IEM_MC_ADVANCE_RIP_AND_FINISH();
10144
10145 IEM_MC_END();
10146}
10147
10148
10149/** Opcode 0xdc !11/3. */
10150FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
10151{
10152 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
10153
10154 IEM_MC_BEGIN(3, 3);
10155 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10156 IEM_MC_LOCAL(uint16_t, u16Fsw);
10157 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
10158 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10159 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10160 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
10161
10162 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10164
10165 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10166 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10167 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10168
10169 IEM_MC_PREPARE_FPU_USAGE();
10170 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10171 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
10172 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10173 } IEM_MC_ELSE() {
10174 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10175 } IEM_MC_ENDIF();
10176 IEM_MC_ADVANCE_RIP_AND_FINISH();
10177
10178 IEM_MC_END();
10179}
10180
10181
10182/** Opcode 0xdc !11/4. */
10183FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
10184{
10185 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
10186 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
10187}
10188
10189
10190/** Opcode 0xdc !11/5. */
10191FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
10192{
10193 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
10194 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
10195}
10196
10197
10198/** Opcode 0xdc !11/6. */
10199FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
10200{
10201 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
10202 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
10203}
10204
10205
10206/** Opcode 0xdc !11/7. */
10207FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
10208{
10209 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
10210 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
10211}
10212
10213
10214/**
10215 * @opcode 0xdc
10216 */
10217FNIEMOP_DEF(iemOp_EscF4)
10218{
10219 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10220 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
10221 if (IEM_IS_MODRM_REG_MODE(bRm))
10222 {
10223 switch (IEM_GET_MODRM_REG_8(bRm))
10224 {
10225 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
10226 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
10227 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
10228 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
10229 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
10230 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
10231 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
10232 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
10233 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10234 }
10235 }
10236 else
10237 {
10238 switch (IEM_GET_MODRM_REG_8(bRm))
10239 {
10240 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
10241 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
10242 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
10243 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
10244 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
10245 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
10246 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
10247 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
10248 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10249 }
10250 }
10251}
10252
10253
10254/** Opcode 0xdd !11/0.
10255 * @sa iemOp_fld_m32r */
10256FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
10257{
10258 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
10259
10260 IEM_MC_BEGIN(2, 3);
10261 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10262 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10263 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
10264 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10265 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
10266
10267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10269 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10270 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10271
10272 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10273 IEM_MC_PREPARE_FPU_USAGE();
10274 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10275 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r64, pFpuRes, pr64Val);
10276 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10277 } IEM_MC_ELSE() {
10278 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10279 } IEM_MC_ENDIF();
10280 IEM_MC_ADVANCE_RIP_AND_FINISH();
10281
10282 IEM_MC_END();
10283}
10284
10285
10286/** Opcode 0xdd !11/0. */
10287FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
10288{
10289 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
10290 IEM_MC_BEGIN(3, 2);
10291 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10292 IEM_MC_LOCAL(uint16_t, u16Fsw);
10293 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10294 IEM_MC_ARG(int64_t *, pi64Dst, 1);
10295 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10296
10297 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10299 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10300 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10301
10302 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10303 IEM_MC_PREPARE_FPU_USAGE();
10304 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10305 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
10306 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10307 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10308 } IEM_MC_ELSE() {
10309 IEM_MC_IF_FCW_IM() {
10310 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
10311 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
10312 } IEM_MC_ENDIF();
10313 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10314 } IEM_MC_ENDIF();
10315 IEM_MC_ADVANCE_RIP_AND_FINISH();
10316
10317 IEM_MC_END();
10318}
10319
10320
10321/** Opcode 0xdd !11/0. */
10322FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
10323{
10324 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
10325 IEM_MC_BEGIN(3, 2);
10326 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10327 IEM_MC_LOCAL(uint16_t, u16Fsw);
10328 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10329 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
10330 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10331
10332 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10334 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10335 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10336
10337 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10338 IEM_MC_PREPARE_FPU_USAGE();
10339 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10340 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
10341 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10342 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10343 } IEM_MC_ELSE() {
10344 IEM_MC_IF_FCW_IM() {
10345 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
10346 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
10347 } IEM_MC_ENDIF();
10348 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10349 } IEM_MC_ENDIF();
10350 IEM_MC_ADVANCE_RIP_AND_FINISH();
10351
10352 IEM_MC_END();
10353}
10354
10355
10356
10357
10358/** Opcode 0xdd !11/0. */
10359FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
10360{
10361 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
10362 IEM_MC_BEGIN(3, 2);
10363 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10364 IEM_MC_LOCAL(uint16_t, u16Fsw);
10365 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10366 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
10367 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10368
10369 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10371 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10372 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10373
10374 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10375 IEM_MC_PREPARE_FPU_USAGE();
10376 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10377 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
10378 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10379 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10380 } IEM_MC_ELSE() {
10381 IEM_MC_IF_FCW_IM() {
10382 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
10383 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
10384 } IEM_MC_ENDIF();
10385 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10386 } IEM_MC_ENDIF();
10387 IEM_MC_ADVANCE_RIP_AND_FINISH();
10388
10389 IEM_MC_END();
10390}
10391
10392
10393/** Opcode 0xdd !11/0. */
10394FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
10395{
10396 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
10397 IEM_MC_BEGIN(3, 0);
10398 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
10399 IEM_MC_ARG(uint8_t, iEffSeg, 1);
10400 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
10401 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10403 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10404 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10405 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
10406 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
10407 IEM_MC_END();
10408}
10409
10410
10411/** Opcode 0xdd !11/0. */
10412FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
10413{
10414 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
10415 IEM_MC_BEGIN(3, 0);
10416 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
10417 IEM_MC_ARG(uint8_t, iEffSeg, 1);
10418 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
10419 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10421 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10422 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); /* Note! Implicit fninit after the save, do not use FOR_READ here! */
10423 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
10424 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
10425 IEM_MC_END();
10426}
10427
10428/** Opcode 0xdd !11/0. */
10429FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
10430{
10431 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
10432
10433 IEM_MC_BEGIN(0, 2);
10434 IEM_MC_LOCAL(uint16_t, u16Tmp);
10435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10436
10437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10439 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10440
10441 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
10442 IEM_MC_FETCH_FSW(u16Tmp);
10443 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
10444 IEM_MC_ADVANCE_RIP_AND_FINISH();
10445
10446/** @todo Debug / drop a hint to the verifier that things may differ
10447 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
10448 * NT4SP1. (X86_FSW_PE) */
10449 IEM_MC_END();
10450}
10451
10452
10453/** Opcode 0xdd 11/0. */
10454FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
10455{
10456 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
10457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10458 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
10459 unmodified. */
10460
10461 IEM_MC_BEGIN(0, 0);
10462
10463 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10464 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10465
10466 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10467 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
10468 IEM_MC_UPDATE_FPU_OPCODE_IP();
10469
10470 IEM_MC_ADVANCE_RIP_AND_FINISH();
10471 IEM_MC_END();
10472}
10473
10474
10475/** Opcode 0xdd 11/1. */
10476FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
10477{
10478 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
10479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10480
10481 IEM_MC_BEGIN(0, 2);
10482 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
10483 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10484 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10485 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10486
10487 IEM_MC_PREPARE_FPU_USAGE();
10488 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10489 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
10490 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm));
10491 } IEM_MC_ELSE() {
10492 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm));
10493 } IEM_MC_ENDIF();
10494
10495 IEM_MC_ADVANCE_RIP_AND_FINISH();
10496 IEM_MC_END();
10497}
10498
10499
10500/** Opcode 0xdd 11/3. */
10501FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
10502{
10503 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
10504 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
10505}
10506
10507
10508/** Opcode 0xdd 11/4. */
10509FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
10510{
10511 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
10512 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
10513}
10514
10515
10516/**
10517 * @opcode 0xdd
10518 */
10519FNIEMOP_DEF(iemOp_EscF5)
10520{
10521 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10522 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
10523 if (IEM_IS_MODRM_REG_MODE(bRm))
10524 {
10525 switch (IEM_GET_MODRM_REG_8(bRm))
10526 {
10527 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
10528 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
10529 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
10530 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
10531 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
10532 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
10533 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10534 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
10535 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10536 }
10537 }
10538 else
10539 {
10540 switch (IEM_GET_MODRM_REG_8(bRm))
10541 {
10542 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
10543 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
10544 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
10545 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
10546 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
10547 case 5: IEMOP_RAISE_INVALID_OPCODE_RET();
10548 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
10549 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
10550 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10551 }
10552 }
10553}
10554
10555
10556/** Opcode 0xde 11/0. */
10557FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
10558{
10559 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
10560 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
10561}
10562
10563
10564/** Opcode 0xde 11/0. */
10565FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
10566{
10567 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
10568 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
10569}
10570
10571
10572/** Opcode 0xde 0xd9. */
10573FNIEMOP_DEF(iemOp_fcompp)
10574{
10575 IEMOP_MNEMONIC(fcompp, "fcompp");
10576 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fcom_r80_by_r80);
10577}
10578
10579
10580/** Opcode 0xde 11/4. */
10581FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
10582{
10583 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
10584 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
10585}
10586
10587
10588/** Opcode 0xde 11/5. */
10589FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
10590{
10591 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
10592 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
10593}
10594
10595
10596/** Opcode 0xde 11/6. */
10597FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
10598{
10599 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
10600 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
10601}
10602
10603
10604/** Opcode 0xde 11/7. */
10605FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
10606{
10607 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
10608 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
10609}
10610
10611
10612/**
10613 * Common worker for FPU instructions working on ST0 and an m16i, and storing
10614 * the result in ST0.
10615 *
10616 * @param bRm Mod R/M byte.
10617 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10618 */
10619FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
10620{
10621 IEM_MC_BEGIN(3, 3);
10622 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10623 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10624 IEM_MC_LOCAL(int16_t, i16Val2);
10625 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10626 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10627 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
10628
10629 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10631
10632 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10633 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10634 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10635
10636 IEM_MC_PREPARE_FPU_USAGE();
10637 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10638 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
10639 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
10640 } IEM_MC_ELSE() {
10641 IEM_MC_FPU_STACK_UNDERFLOW(0);
10642 } IEM_MC_ENDIF();
10643 IEM_MC_ADVANCE_RIP_AND_FINISH();
10644
10645 IEM_MC_END();
10646}
10647
10648
10649/** Opcode 0xde !11/0. */
10650FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
10651{
10652 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
10653 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
10654}
10655
10656
10657/** Opcode 0xde !11/1. */
10658FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
10659{
10660 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
10661 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
10662}
10663
10664
10665/** Opcode 0xde !11/2. */
10666FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
10667{
10668 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
10669
10670 IEM_MC_BEGIN(3, 3);
10671 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10672 IEM_MC_LOCAL(uint16_t, u16Fsw);
10673 IEM_MC_LOCAL(int16_t, i16Val2);
10674 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10675 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10676 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
10677
10678 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10680
10681 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10682 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10683 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10684
10685 IEM_MC_PREPARE_FPU_USAGE();
10686 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10687 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
10688 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10689 } IEM_MC_ELSE() {
10690 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10691 } IEM_MC_ENDIF();
10692 IEM_MC_ADVANCE_RIP_AND_FINISH();
10693
10694 IEM_MC_END();
10695}
10696
10697
10698/** Opcode 0xde !11/3. */
10699FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
10700{
10701 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
10702
10703 IEM_MC_BEGIN(3, 3);
10704 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10705 IEM_MC_LOCAL(uint16_t, u16Fsw);
10706 IEM_MC_LOCAL(int16_t, i16Val2);
10707 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10708 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10709 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
10710
10711 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10713
10714 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10715 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10716 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10717
10718 IEM_MC_PREPARE_FPU_USAGE();
10719 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10720 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
10721 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10722 } IEM_MC_ELSE() {
10723 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10724 } IEM_MC_ENDIF();
10725 IEM_MC_ADVANCE_RIP_AND_FINISH();
10726
10727 IEM_MC_END();
10728}
10729
10730
10731/** Opcode 0xde !11/4. */
10732FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
10733{
10734 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
10735 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
10736}
10737
10738
10739/** Opcode 0xde !11/5. */
10740FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
10741{
10742 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
10743 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
10744}
10745
10746
10747/** Opcode 0xde !11/6. */
10748FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
10749{
10750 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
10751 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
10752}
10753
10754
10755/** Opcode 0xde !11/7. */
10756FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
10757{
10758 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
10759 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
10760}
10761
10762
10763/**
10764 * @opcode 0xde
10765 */
10766FNIEMOP_DEF(iemOp_EscF6)
10767{
10768 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10769 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
10770 if (IEM_IS_MODRM_REG_MODE(bRm))
10771 {
10772 switch (IEM_GET_MODRM_REG_8(bRm))
10773 {
10774 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
10775 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
10776 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
10777 case 3: if (bRm == 0xd9)
10778 return FNIEMOP_CALL(iemOp_fcompp);
10779 IEMOP_RAISE_INVALID_OPCODE_RET();
10780 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
10781 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
10782 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
10783 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
10784 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10785 }
10786 }
10787 else
10788 {
10789 switch (IEM_GET_MODRM_REG_8(bRm))
10790 {
10791 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
10792 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
10793 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
10794 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
10795 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
10796 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
10797 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
10798 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
10799 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10800 }
10801 }
10802}
10803
10804
10805/** Opcode 0xdf 11/0.
10806 * Undocument instruction, assumed to work like ffree + fincstp. */
10807FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
10808{
10809 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
10810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10811
10812 IEM_MC_BEGIN(0, 0);
10813
10814 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10815 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10816
10817 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10818 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
10819 IEM_MC_FPU_STACK_INC_TOP();
10820 IEM_MC_UPDATE_FPU_OPCODE_IP();
10821
10822 IEM_MC_ADVANCE_RIP_AND_FINISH();
10823 IEM_MC_END();
10824}
10825
10826
10827/** Opcode 0xdf 0xe0. */
10828FNIEMOP_DEF(iemOp_fnstsw_ax)
10829{
10830 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
10831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10832
10833 IEM_MC_BEGIN(0, 1);
10834 IEM_MC_LOCAL(uint16_t, u16Tmp);
10835 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10836 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
10837 IEM_MC_FETCH_FSW(u16Tmp);
10838 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
10839 IEM_MC_ADVANCE_RIP_AND_FINISH();
10840 IEM_MC_END();
10841}
10842
10843
10844/** Opcode 0xdf 11/5. */
10845FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
10846{
10847 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
10848 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
10849 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80,
10850 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
10851}
10852
10853
10854/** Opcode 0xdf 11/6. */
10855FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
10856{
10857 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
10858 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
10859 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80,
10860 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
10861}
10862
10863
10864/** Opcode 0xdf !11/0. */
10865FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
10866{
10867 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
10868
10869 IEM_MC_BEGIN(2, 3);
10870 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10871 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10872 IEM_MC_LOCAL(int16_t, i16Val);
10873 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10874 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
10875
10876 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10878
10879 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10880 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10881 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10882
10883 IEM_MC_PREPARE_FPU_USAGE();
10884 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10885 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i16, pFpuRes, pi16Val);
10886 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10887 } IEM_MC_ELSE() {
10888 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10889 } IEM_MC_ENDIF();
10890 IEM_MC_ADVANCE_RIP_AND_FINISH();
10891
10892 IEM_MC_END();
10893}
10894
10895
10896/** Opcode 0xdf !11/1. */
10897FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
10898{
10899 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
10900 IEM_MC_BEGIN(3, 2);
10901 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10902 IEM_MC_LOCAL(uint16_t, u16Fsw);
10903 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10904 IEM_MC_ARG(int16_t *, pi16Dst, 1);
10905 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10906
10907 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10909 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10910 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10911
10912 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10913 IEM_MC_PREPARE_FPU_USAGE();
10914 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10915 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
10916 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
10917 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10918 } IEM_MC_ELSE() {
10919 IEM_MC_IF_FCW_IM() {
10920 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
10921 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
10922 } IEM_MC_ENDIF();
10923 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10924 } IEM_MC_ENDIF();
10925 IEM_MC_ADVANCE_RIP_AND_FINISH();
10926
10927 IEM_MC_END();
10928}
10929
10930
10931/** Opcode 0xdf !11/2. */
10932FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
10933{
10934 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
10935 IEM_MC_BEGIN(3, 2);
10936 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10937 IEM_MC_LOCAL(uint16_t, u16Fsw);
10938 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10939 IEM_MC_ARG(int16_t *, pi16Dst, 1);
10940 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10941
10942 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10944 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10945 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10946
10947 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10948 IEM_MC_PREPARE_FPU_USAGE();
10949 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10950 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
10951 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
10952 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10953 } IEM_MC_ELSE() {
10954 IEM_MC_IF_FCW_IM() {
10955 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
10956 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
10957 } IEM_MC_ENDIF();
10958 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10959 } IEM_MC_ENDIF();
10960 IEM_MC_ADVANCE_RIP_AND_FINISH();
10961
10962 IEM_MC_END();
10963}
10964
10965
10966/** Opcode 0xdf !11/3. */
10967FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
10968{
10969 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
10970 IEM_MC_BEGIN(3, 2);
10971 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10972 IEM_MC_LOCAL(uint16_t, u16Fsw);
10973 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10974 IEM_MC_ARG(int16_t *, pi16Dst, 1);
10975 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10976
10977 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10978 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10979 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10980 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10981
10982 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10983 IEM_MC_PREPARE_FPU_USAGE();
10984 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10985 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
10986 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
10987 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10988 } IEM_MC_ELSE() {
10989 IEM_MC_IF_FCW_IM() {
10990 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
10991 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
10992 } IEM_MC_ENDIF();
10993 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10994 } IEM_MC_ENDIF();
10995 IEM_MC_ADVANCE_RIP_AND_FINISH();
10996
10997 IEM_MC_END();
10998}
10999
11000
11001/** Opcode 0xdf !11/4. */
11002FNIEMOP_DEF_1(iemOp_fbld_m80d, uint8_t, bRm)
11003{
11004 IEMOP_MNEMONIC(fbld_m80d, "fbld m80d");
11005
11006 IEM_MC_BEGIN(2, 3);
11007 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11008 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11009 IEM_MC_LOCAL(RTPBCD80U, d80Val);
11010 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11011 IEM_MC_ARG_LOCAL_REF(PCRTPBCD80U, pd80Val, d80Val, 1);
11012
11013 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11015
11016 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11017 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11018 IEM_MC_FETCH_MEM_D80(d80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11019
11020 IEM_MC_PREPARE_FPU_USAGE();
11021 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11022 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_d80, pFpuRes, pd80Val);
11023 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11024 } IEM_MC_ELSE() {
11025 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11026 } IEM_MC_ENDIF();
11027 IEM_MC_ADVANCE_RIP_AND_FINISH();
11028
11029 IEM_MC_END();
11030}
11031
11032
11033/** Opcode 0xdf !11/5. */
11034FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
11035{
11036 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
11037
11038 IEM_MC_BEGIN(2, 3);
11039 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11040 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11041 IEM_MC_LOCAL(int64_t, i64Val);
11042 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11043 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
11044
11045 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11047
11048 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11049 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11050 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11051
11052 IEM_MC_PREPARE_FPU_USAGE();
11053 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11054 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i64, pFpuRes, pi64Val);
11055 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11056 } IEM_MC_ELSE() {
11057 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11058 } IEM_MC_ENDIF();
11059 IEM_MC_ADVANCE_RIP_AND_FINISH();
11060
11061 IEM_MC_END();
11062}
11063
11064
11065/** Opcode 0xdf !11/6. */
11066FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
11067{
11068 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
11069 IEM_MC_BEGIN(3, 2);
11070 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11071 IEM_MC_LOCAL(uint16_t, u16Fsw);
11072 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11073 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
11074 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11075
11076 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11077 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11078 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11079 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11080
11081 IEM_MC_MEM_MAP_EX(pd80Dst, IEM_ACCESS_DATA_W, sizeof(*pd80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
11082 IEM_MC_PREPARE_FPU_USAGE();
11083 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11084 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
11085 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pd80Dst, IEM_ACCESS_DATA_W, u16Fsw);
11086 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11087 } IEM_MC_ELSE() {
11088 IEM_MC_IF_FCW_IM() {
11089 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
11090 IEM_MC_MEM_COMMIT_AND_UNMAP(pd80Dst, IEM_ACCESS_DATA_W);
11091 } IEM_MC_ENDIF();
11092 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11093 } IEM_MC_ENDIF();
11094 IEM_MC_ADVANCE_RIP_AND_FINISH();
11095
11096 IEM_MC_END();
11097}
11098
11099
11100/** Opcode 0xdf !11/7. */
11101FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
11102{
11103 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
11104 IEM_MC_BEGIN(3, 2);
11105 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11106 IEM_MC_LOCAL(uint16_t, u16Fsw);
11107 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11108 IEM_MC_ARG(int64_t *, pi64Dst, 1);
11109 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11110
11111 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11113 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11114 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11115
11116 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11117 IEM_MC_PREPARE_FPU_USAGE();
11118 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11119 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
11120 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11121 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11122 } IEM_MC_ELSE() {
11123 IEM_MC_IF_FCW_IM() {
11124 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
11125 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
11126 } IEM_MC_ENDIF();
11127 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11128 } IEM_MC_ENDIF();
11129 IEM_MC_ADVANCE_RIP_AND_FINISH();
11130
11131 IEM_MC_END();
11132}
11133
11134
11135/**
11136 * @opcode 0xdf
11137 */
11138FNIEMOP_DEF(iemOp_EscF7)
11139{
11140 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11141 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdf & 0x7);
11142 if (IEM_IS_MODRM_REG_MODE(bRm))
11143 {
11144 switch (IEM_GET_MODRM_REG_8(bRm))
11145 {
11146 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
11147 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
11148 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
11149 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
11150 case 4: if (bRm == 0xe0)
11151 return FNIEMOP_CALL(iemOp_fnstsw_ax);
11152 IEMOP_RAISE_INVALID_OPCODE_RET();
11153 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
11154 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
11155 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11156 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11157 }
11158 }
11159 else
11160 {
11161 switch (IEM_GET_MODRM_REG_8(bRm))
11162 {
11163 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
11164 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
11165 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
11166 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
11167 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
11168 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
11169 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
11170 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
11171 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11172 }
11173 }
11174}
11175
11176
11177/**
11178 * @opcode 0xe0
11179 */
11180FNIEMOP_DEF(iemOp_loopne_Jb)
11181{
11182 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
11183 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11185 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11186
11187 switch (pVCpu->iem.s.enmEffAddrMode)
11188 {
11189 case IEMMODE_16BIT:
11190 IEM_MC_BEGIN(0,0);
11191 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11192 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11193 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11194 } IEM_MC_ELSE() {
11195 IEM_MC_ADVANCE_RIP_AND_FINISH();
11196 } IEM_MC_ENDIF();
11197 IEM_MC_END();
11198 break;
11199
11200 case IEMMODE_32BIT:
11201 IEM_MC_BEGIN(0,0);
11202 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11203 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11204 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11205 } IEM_MC_ELSE() {
11206 IEM_MC_ADVANCE_RIP_AND_FINISH();
11207 } IEM_MC_ENDIF();
11208 IEM_MC_END();
11209 break;
11210
11211 case IEMMODE_64BIT:
11212 IEM_MC_BEGIN(0,0);
11213 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
11214 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11215 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11216 } IEM_MC_ELSE() {
11217 IEM_MC_ADVANCE_RIP_AND_FINISH();
11218 } IEM_MC_ENDIF();
11219 IEM_MC_END();
11220 break;
11221
11222 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11223 }
11224}
11225
11226
11227/**
11228 * @opcode 0xe1
11229 */
11230FNIEMOP_DEF(iemOp_loope_Jb)
11231{
11232 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
11233 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11235 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11236
11237 switch (pVCpu->iem.s.enmEffAddrMode)
11238 {
11239 case IEMMODE_16BIT:
11240 IEM_MC_BEGIN(0,0);
11241 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11242 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11243 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11244 } IEM_MC_ELSE() {
11245 IEM_MC_ADVANCE_RIP_AND_FINISH();
11246 } IEM_MC_ENDIF();
11247 IEM_MC_END();
11248 break;
11249
11250 case IEMMODE_32BIT:
11251 IEM_MC_BEGIN(0,0);
11252 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11253 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11254 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11255 } IEM_MC_ELSE() {
11256 IEM_MC_ADVANCE_RIP_AND_FINISH();
11257 } IEM_MC_ENDIF();
11258 IEM_MC_END();
11259 break;
11260
11261 case IEMMODE_64BIT:
11262 IEM_MC_BEGIN(0,0);
11263 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
11264 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11265 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11266 } IEM_MC_ELSE() {
11267 IEM_MC_ADVANCE_RIP_AND_FINISH();
11268 } IEM_MC_ENDIF();
11269 IEM_MC_END();
11270 break;
11271
11272 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11273 }
11274}
11275
11276
11277/**
11278 * @opcode 0xe2
11279 */
11280FNIEMOP_DEF(iemOp_loop_Jb)
11281{
11282 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
11283 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11285 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11286
11287 /** @todo Check out the \#GP case if EIP < CS.Base or EIP > CS.Limit when
11288 * using the 32-bit operand size override. How can that be restarted? See
11289 * weird pseudo code in intel manual. */
11290
11291 /* NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
11292 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
11293 * the loop causes guest crashes, but when logging it's nice to skip a few million
11294 * lines of useless output. */
11295#if defined(LOG_ENABLED)
11296 if ((LogIs3Enabled() || LogIs4Enabled()) && -(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm)
11297 switch (pVCpu->iem.s.enmEffAddrMode)
11298 {
11299 case IEMMODE_16BIT:
11300 IEM_MC_BEGIN(0,0);
11301 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
11302 IEM_MC_ADVANCE_RIP_AND_FINISH();
11303 IEM_MC_END();
11304 break;
11305
11306 case IEMMODE_32BIT:
11307 IEM_MC_BEGIN(0,0);
11308 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
11309 IEM_MC_ADVANCE_RIP_AND_FINISH();
11310 IEM_MC_END();
11311 break;
11312
11313 case IEMMODE_64BIT:
11314 IEM_MC_BEGIN(0,0);
11315 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
11316 IEM_MC_ADVANCE_RIP_AND_FINISH();
11317 IEM_MC_END();
11318 break;
11319
11320 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11321 }
11322#endif
11323
11324 switch (pVCpu->iem.s.enmEffAddrMode)
11325 {
11326 case IEMMODE_16BIT:
11327 IEM_MC_BEGIN(0,0);
11328
11329 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11330 IEM_MC_IF_CX_IS_NZ() {
11331 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11332 } IEM_MC_ELSE() {
11333 IEM_MC_ADVANCE_RIP_AND_FINISH();
11334 } IEM_MC_ENDIF();
11335 IEM_MC_END();
11336 break;
11337
11338 case IEMMODE_32BIT:
11339 IEM_MC_BEGIN(0,0);
11340 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11341 IEM_MC_IF_ECX_IS_NZ() {
11342 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11343 } IEM_MC_ELSE() {
11344 IEM_MC_ADVANCE_RIP_AND_FINISH();
11345 } IEM_MC_ENDIF();
11346 IEM_MC_END();
11347 break;
11348
11349 case IEMMODE_64BIT:
11350 IEM_MC_BEGIN(0,0);
11351 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
11352 IEM_MC_IF_RCX_IS_NZ() {
11353 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11354 } IEM_MC_ELSE() {
11355 IEM_MC_ADVANCE_RIP_AND_FINISH();
11356 } IEM_MC_ENDIF();
11357 IEM_MC_END();
11358 break;
11359
11360 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11361 }
11362}
11363
11364
11365/**
11366 * @opcode 0xe3
11367 */
11368FNIEMOP_DEF(iemOp_jecxz_Jb)
11369{
11370 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
11371 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11373 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11374
11375 switch (pVCpu->iem.s.enmEffAddrMode)
11376 {
11377 case IEMMODE_16BIT:
11378 IEM_MC_BEGIN(0,0);
11379 IEM_MC_IF_CX_IS_NZ() {
11380 IEM_MC_ADVANCE_RIP_AND_FINISH();
11381 } IEM_MC_ELSE() {
11382 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11383 } IEM_MC_ENDIF();
11384 IEM_MC_END();
11385 break;
11386
11387 case IEMMODE_32BIT:
11388 IEM_MC_BEGIN(0,0);
11389 IEM_MC_IF_ECX_IS_NZ() {
11390 IEM_MC_ADVANCE_RIP_AND_FINISH();
11391 } IEM_MC_ELSE() {
11392 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11393 } IEM_MC_ENDIF();
11394 IEM_MC_END();
11395 break;
11396
11397 case IEMMODE_64BIT:
11398 IEM_MC_BEGIN(0,0);
11399 IEM_MC_IF_RCX_IS_NZ() {
11400 IEM_MC_ADVANCE_RIP_AND_FINISH();
11401 } IEM_MC_ELSE() {
11402 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11403 } IEM_MC_ENDIF();
11404 IEM_MC_END();
11405 break;
11406
11407 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11408 }
11409}
11410
11411
11412/** Opcode 0xe4 */
11413FNIEMOP_DEF(iemOp_in_AL_Ib)
11414{
11415 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
11416 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11418 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_in, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
11419}
11420
11421
11422/** Opcode 0xe5 */
11423FNIEMOP_DEF(iemOp_in_eAX_Ib)
11424{
11425 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
11426 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11428 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
11429 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
11430}
11431
11432
11433/** Opcode 0xe6 */
11434FNIEMOP_DEF(iemOp_out_Ib_AL)
11435{
11436 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
11437 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11439 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_out, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
11440}
11441
11442
11443/** Opcode 0xe7 */
11444FNIEMOP_DEF(iemOp_out_Ib_eAX)
11445{
11446 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
11447 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11449 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
11450 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
11451}
11452
11453
11454/**
11455 * @opcode 0xe8
11456 */
11457FNIEMOP_DEF(iemOp_call_Jv)
11458{
11459 IEMOP_MNEMONIC(call_Jv, "call Jv");
11460 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
11461 switch (pVCpu->iem.s.enmEffOpSize)
11462 {
11463 case IEMMODE_16BIT:
11464 {
11465 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
11466 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH, iemCImpl_call_rel_16, (int16_t)u16Imm);
11467 }
11468
11469 case IEMMODE_32BIT:
11470 {
11471 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
11472 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH, iemCImpl_call_rel_32, (int32_t)u32Imm);
11473 }
11474
11475 case IEMMODE_64BIT:
11476 {
11477 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
11478 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH, iemCImpl_call_rel_64, u64Imm);
11479 }
11480
11481 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11482 }
11483}
11484
11485
11486/**
11487 * @opcode 0xe9
11488 */
11489FNIEMOP_DEF(iemOp_jmp_Jv)
11490{
11491 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
11492 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
11493 switch (pVCpu->iem.s.enmEffOpSize)
11494 {
11495 case IEMMODE_16BIT:
11496 {
11497 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
11498 IEM_MC_BEGIN(0, 0);
11499 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
11500 IEM_MC_END();
11501 break;
11502 }
11503
11504 case IEMMODE_64BIT:
11505 case IEMMODE_32BIT:
11506 {
11507 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
11508 IEM_MC_BEGIN(0, 0);
11509 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
11510 IEM_MC_END();
11511 break;
11512 }
11513
11514 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11515 }
11516}
11517
11518
11519/**
11520 * @opcode 0xea
11521 */
11522FNIEMOP_DEF(iemOp_jmp_Ap)
11523{
11524 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
11525 IEMOP_HLP_NO_64BIT();
11526
11527 /* Decode the far pointer address and pass it on to the far call C implementation. */
11528 uint32_t off32Seg;
11529 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
11530 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
11531 else
11532 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
11533 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
11534 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11535 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
11536 iemCImpl_FarJmp, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
11537}
11538
11539
11540/**
11541 * @opcode 0xeb
11542 */
11543FNIEMOP_DEF(iemOp_jmp_Jb)
11544{
11545 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
11546 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11548 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
11549
11550 IEM_MC_BEGIN(0, 0);
11551 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11552 IEM_MC_END();
11553}
11554
11555
11556/** Opcode 0xec */
11557FNIEMOP_DEF(iemOp_in_AL_DX)
11558{
11559 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
11560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11561 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_in_eAX_DX, 1, pVCpu->iem.s.enmEffAddrMode);
11562}
11563
11564
11565/** Opcode 0xed */
11566FNIEMOP_DEF(iemOp_in_eAX_DX)
11567{
11568 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
11569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11570 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT,
11571 iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
11572 pVCpu->iem.s.enmEffAddrMode);
11573}
11574
11575
11576/** Opcode 0xee */
11577FNIEMOP_DEF(iemOp_out_DX_AL)
11578{
11579 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
11580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11581 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_out_DX_eAX, 1, pVCpu->iem.s.enmEffAddrMode);
11582}
11583
11584
11585/** Opcode 0xef */
11586FNIEMOP_DEF(iemOp_out_DX_eAX)
11587{
11588 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
11589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11590 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT,
11591 iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
11592 pVCpu->iem.s.enmEffAddrMode);
11593}
11594
11595
11596/**
11597 * @opcode 0xf0
11598 */
11599FNIEMOP_DEF(iemOp_lock)
11600{
11601 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
11602 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
11603 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
11604
11605 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
11606 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
11607}
11608
11609
11610/**
11611 * @opcode 0xf1
11612 */
11613FNIEMOP_DEF(iemOp_int1)
11614{
11615 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
11616 /** @todo Does not generate \#UD on 286, or so they say... Was allegedly a
11617 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
11618 * LOADALL memo. Needs some testing. */
11619 IEMOP_HLP_MIN_386();
11620 /** @todo testcase! */
11621 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
11622 iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
11623}
11624
11625
11626/**
11627 * @opcode 0xf2
11628 */
11629FNIEMOP_DEF(iemOp_repne)
11630{
11631 /* This overrides any previous REPE prefix. */
11632 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
11633 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
11634 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
11635
11636 /* For the 4 entry opcode tables, REPNZ overrides any previous
11637 REPZ and operand size prefixes. */
11638 pVCpu->iem.s.idxPrefix = 3;
11639
11640 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
11641 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
11642}
11643
11644
11645/**
11646 * @opcode 0xf3
11647 */
11648FNIEMOP_DEF(iemOp_repe)
11649{
11650 /* This overrides any previous REPNE prefix. */
11651 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
11652 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
11653 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
11654
11655 /* For the 4 entry opcode tables, REPNZ overrides any previous
11656 REPNZ and operand size prefixes. */
11657 pVCpu->iem.s.idxPrefix = 2;
11658
11659 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
11660 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
11661}
11662
11663
11664/**
11665 * @opcode 0xf4
11666 */
11667FNIEMOP_DEF(iemOp_hlt)
11668{
11669 IEMOP_MNEMONIC(hlt, "hlt");
11670 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11671 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, iemCImpl_hlt);
11672}
11673
11674
11675/**
11676 * @opcode 0xf5
11677 */
11678FNIEMOP_DEF(iemOp_cmc)
11679{
11680 IEMOP_MNEMONIC(cmc, "cmc");
11681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11682 IEM_MC_BEGIN(0, 0);
11683 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
11684 IEM_MC_ADVANCE_RIP_AND_FINISH();
11685 IEM_MC_END();
11686}
11687
11688
11689/**
11690 * Body for of 'inc/dec/not/neg Eb'.
11691 */
11692#define IEMOP_BODY_UNARY_Eb(a_bRm, a_fnNormalU8, a_fnLockedU8) \
11693 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
11694 { \
11695 /* register access */ \
11696 IEMOP_HLP_DONE_DECODING(); \
11697 IEM_MC_BEGIN(2, 0); \
11698 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
11699 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
11700 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
11701 IEM_MC_REF_EFLAGS(pEFlags); \
11702 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
11703 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11704 IEM_MC_END(); \
11705 } \
11706 else \
11707 { \
11708 /* memory access. */ \
11709 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
11710 { \
11711 IEM_MC_BEGIN(2, 2); \
11712 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
11713 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11714 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11715 \
11716 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
11717 IEMOP_HLP_DONE_DECODING(); \
11718 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11719 IEM_MC_FETCH_EFLAGS(EFlags); \
11720 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
11721 \
11722 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW); \
11723 IEM_MC_COMMIT_EFLAGS(EFlags); \
11724 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11725 IEM_MC_END(); \
11726 } \
11727 else \
11728 { \
11729 IEM_MC_BEGIN(2, 2); \
11730 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
11731 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11732 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11733 \
11734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
11735 IEMOP_HLP_DONE_DECODING(); \
11736 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11737 IEM_MC_FETCH_EFLAGS(EFlags); \
11738 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU8, pu8Dst, pEFlags); \
11739 \
11740 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW); \
11741 IEM_MC_COMMIT_EFLAGS(EFlags); \
11742 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11743 IEM_MC_END(); \
11744 } \
11745 } \
11746 (void)0
11747
11748
11749/**
11750 * Body for 'inc/dec/not/neg Ev' (groups 3 and 5).
11751 */
11752#define IEMOP_BODY_UNARY_Ev(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
11753 if (IEM_IS_MODRM_REG_MODE(bRm)) \
11754 { \
11755 /* \
11756 * Register target \
11757 */ \
11758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11759 switch (pVCpu->iem.s.enmEffOpSize) \
11760 { \
11761 case IEMMODE_16BIT: \
11762 IEM_MC_BEGIN(2, 0); \
11763 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
11764 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
11765 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11766 IEM_MC_REF_EFLAGS(pEFlags); \
11767 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
11768 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11769 IEM_MC_END(); \
11770 break; \
11771 \
11772 case IEMMODE_32BIT: \
11773 IEM_MC_BEGIN(2, 0); \
11774 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
11775 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
11776 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11777 IEM_MC_REF_EFLAGS(pEFlags); \
11778 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
11779 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
11780 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11781 IEM_MC_END(); \
11782 break; \
11783 \
11784 case IEMMODE_64BIT: \
11785 IEM_MC_BEGIN(2, 0); \
11786 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
11787 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
11788 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11789 IEM_MC_REF_EFLAGS(pEFlags); \
11790 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
11791 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11792 IEM_MC_END(); \
11793 break; \
11794 \
11795 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11796 } \
11797 } \
11798 else \
11799 { \
11800 /* \
11801 * Memory target. \
11802 */ \
11803 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
11804 { \
11805 switch (pVCpu->iem.s.enmEffOpSize) \
11806 { \
11807 case IEMMODE_16BIT: \
11808 IEM_MC_BEGIN(2, 2); \
11809 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
11810 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11811 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11812 \
11813 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11814 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11815 IEM_MC_FETCH_EFLAGS(EFlags); \
11816 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
11817 \
11818 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
11819 IEM_MC_COMMIT_EFLAGS(EFlags); \
11820 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11821 IEM_MC_END(); \
11822 break; \
11823 \
11824 case IEMMODE_32BIT: \
11825 IEM_MC_BEGIN(2, 2); \
11826 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
11827 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11828 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11829 \
11830 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11831 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11832 IEM_MC_FETCH_EFLAGS(EFlags); \
11833 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
11834 \
11835 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); \
11836 IEM_MC_COMMIT_EFLAGS(EFlags); \
11837 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11838 IEM_MC_END(); \
11839 break; \
11840 \
11841 case IEMMODE_64BIT: \
11842 IEM_MC_BEGIN(2, 2); \
11843 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
11844 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11845 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11846 \
11847 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11848 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11849 IEM_MC_FETCH_EFLAGS(EFlags); \
11850 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
11851 \
11852 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
11853 IEM_MC_COMMIT_EFLAGS(EFlags); \
11854 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11855 IEM_MC_END(); \
11856 break; \
11857 \
11858 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11859 } \
11860 } \
11861 else \
11862 { \
11863 (void)0
11864
11865#define IEMOP_BODY_UNARY_Ev_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
11866 switch (pVCpu->iem.s.enmEffOpSize) \
11867 { \
11868 case IEMMODE_16BIT: \
11869 IEM_MC_BEGIN(2, 2); \
11870 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
11871 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11872 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11873 \
11874 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11875 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11876 IEM_MC_FETCH_EFLAGS(EFlags); \
11877 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU16, pu16Dst, pEFlags); \
11878 \
11879 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
11880 IEM_MC_COMMIT_EFLAGS(EFlags); \
11881 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11882 IEM_MC_END(); \
11883 break; \
11884 \
11885 case IEMMODE_32BIT: \
11886 IEM_MC_BEGIN(2, 2); \
11887 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
11888 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11889 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11890 \
11891 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11892 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11893 IEM_MC_FETCH_EFLAGS(EFlags); \
11894 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU32, pu32Dst, pEFlags); \
11895 \
11896 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); \
11897 IEM_MC_COMMIT_EFLAGS(EFlags); \
11898 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11899 IEM_MC_END(); \
11900 break; \
11901 \
11902 case IEMMODE_64BIT: \
11903 IEM_MC_BEGIN(2, 2); \
11904 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
11905 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11906 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11907 \
11908 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11909 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11910 IEM_MC_FETCH_EFLAGS(EFlags); \
11911 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU64, pu64Dst, pEFlags); \
11912 \
11913 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
11914 IEM_MC_COMMIT_EFLAGS(EFlags); \
11915 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11916 IEM_MC_END(); \
11917 break; \
11918 \
11919 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11920 } \
11921 } \
11922 } \
11923 (void)0
11924
11925
11926/**
11927 * @opmaps grp3_f6
11928 * @opcode /0
11929 * @todo also /1
11930 */
11931FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
11932{
11933 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
11934 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11935
11936 if (IEM_IS_MODRM_REG_MODE(bRm))
11937 {
11938 /* register access */
11939 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11941
11942 IEM_MC_BEGIN(3, 0);
11943 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11944 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
11945 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11946 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11947 IEM_MC_REF_EFLAGS(pEFlags);
11948 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
11949 IEM_MC_ADVANCE_RIP_AND_FINISH();
11950 IEM_MC_END();
11951 }
11952 else
11953 {
11954 /* memory access. */
11955 IEM_MC_BEGIN(3, 2);
11956 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11957 IEM_MC_ARG(uint8_t, u8Src, 1);
11958 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
11959 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11960
11961 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
11962 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11963 IEM_MC_ASSIGN(u8Src, u8Imm);
11964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11965 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11966 IEM_MC_FETCH_EFLAGS(EFlags);
11967 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
11968
11969 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
11970 IEM_MC_COMMIT_EFLAGS(EFlags);
11971 IEM_MC_ADVANCE_RIP_AND_FINISH();
11972 IEM_MC_END();
11973 }
11974}
11975
11976
11977/** Opcode 0xf6 /4, /5, /6 and /7. */
11978FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
11979{
11980 if (IEM_IS_MODRM_REG_MODE(bRm))
11981 {
11982 /* register access */
11983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11984 IEM_MC_BEGIN(3, 1);
11985 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11986 IEM_MC_ARG(uint8_t, u8Value, 1);
11987 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11988 IEM_MC_LOCAL(int32_t, rc);
11989
11990 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11991 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11992 IEM_MC_REF_EFLAGS(pEFlags);
11993 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
11994 IEM_MC_IF_LOCAL_IS_Z(rc) {
11995 IEM_MC_ADVANCE_RIP_AND_FINISH();
11996 } IEM_MC_ELSE() {
11997 IEM_MC_RAISE_DIVIDE_ERROR();
11998 } IEM_MC_ENDIF();
11999
12000 IEM_MC_END();
12001 }
12002 else
12003 {
12004 /* memory access. */
12005 IEM_MC_BEGIN(3, 2);
12006 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12007 IEM_MC_ARG(uint8_t, u8Value, 1);
12008 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12009 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12010 IEM_MC_LOCAL(int32_t, rc);
12011
12012 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12013 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12014 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12015 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12016 IEM_MC_REF_EFLAGS(pEFlags);
12017 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
12018 IEM_MC_IF_LOCAL_IS_Z(rc) {
12019 IEM_MC_ADVANCE_RIP_AND_FINISH();
12020 } IEM_MC_ELSE() {
12021 IEM_MC_RAISE_DIVIDE_ERROR();
12022 } IEM_MC_ENDIF();
12023
12024 IEM_MC_END();
12025 }
12026}
12027
12028
12029/** Opcode 0xf7 /4, /5, /6 and /7. */
12030FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
12031{
12032 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12033
12034 if (IEM_IS_MODRM_REG_MODE(bRm))
12035 {
12036 /* register access */
12037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12038 switch (pVCpu->iem.s.enmEffOpSize)
12039 {
12040 case IEMMODE_16BIT:
12041 {
12042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12043 IEM_MC_BEGIN(4, 1);
12044 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12045 IEM_MC_ARG(uint16_t *, pu16DX, 1);
12046 IEM_MC_ARG(uint16_t, u16Value, 2);
12047 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12048 IEM_MC_LOCAL(int32_t, rc);
12049
12050 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12051 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12052 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
12053 IEM_MC_REF_EFLAGS(pEFlags);
12054 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
12055 IEM_MC_IF_LOCAL_IS_Z(rc) {
12056 IEM_MC_ADVANCE_RIP_AND_FINISH();
12057 } IEM_MC_ELSE() {
12058 IEM_MC_RAISE_DIVIDE_ERROR();
12059 } IEM_MC_ENDIF();
12060
12061 IEM_MC_END();
12062 break;
12063 }
12064
12065 case IEMMODE_32BIT:
12066 {
12067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12068 IEM_MC_BEGIN(4, 1);
12069 IEM_MC_ARG(uint32_t *, pu32AX, 0);
12070 IEM_MC_ARG(uint32_t *, pu32DX, 1);
12071 IEM_MC_ARG(uint32_t, u32Value, 2);
12072 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12073 IEM_MC_LOCAL(int32_t, rc);
12074
12075 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12076 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
12077 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
12078 IEM_MC_REF_EFLAGS(pEFlags);
12079 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
12080 IEM_MC_IF_LOCAL_IS_Z(rc) {
12081 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
12082 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
12083 IEM_MC_ADVANCE_RIP_AND_FINISH();
12084 } IEM_MC_ELSE() {
12085 IEM_MC_RAISE_DIVIDE_ERROR();
12086 } IEM_MC_ENDIF();
12087
12088 IEM_MC_END();
12089 break;
12090 }
12091
12092 case IEMMODE_64BIT:
12093 {
12094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12095 IEM_MC_BEGIN(4, 1);
12096 IEM_MC_ARG(uint64_t *, pu64AX, 0);
12097 IEM_MC_ARG(uint64_t *, pu64DX, 1);
12098 IEM_MC_ARG(uint64_t, u64Value, 2);
12099 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12100 IEM_MC_LOCAL(int32_t, rc);
12101
12102 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12103 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
12104 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
12105 IEM_MC_REF_EFLAGS(pEFlags);
12106 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
12107 IEM_MC_IF_LOCAL_IS_Z(rc) {
12108 IEM_MC_ADVANCE_RIP_AND_FINISH();
12109 } IEM_MC_ELSE() {
12110 IEM_MC_RAISE_DIVIDE_ERROR();
12111 } IEM_MC_ENDIF();
12112
12113 IEM_MC_END();
12114 break;
12115 }
12116
12117 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12118 }
12119 }
12120 else
12121 {
12122 /* memory access. */
12123 switch (pVCpu->iem.s.enmEffOpSize)
12124 {
12125 case IEMMODE_16BIT:
12126 {
12127 IEM_MC_BEGIN(4, 2);
12128 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12129 IEM_MC_ARG(uint16_t *, pu16DX, 1);
12130 IEM_MC_ARG(uint16_t, u16Value, 2);
12131 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12133 IEM_MC_LOCAL(int32_t, rc);
12134
12135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12137 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12138 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12139 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
12140 IEM_MC_REF_EFLAGS(pEFlags);
12141 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
12142 IEM_MC_IF_LOCAL_IS_Z(rc) {
12143 IEM_MC_ADVANCE_RIP_AND_FINISH();
12144 } IEM_MC_ELSE() {
12145 IEM_MC_RAISE_DIVIDE_ERROR();
12146 } IEM_MC_ENDIF();
12147
12148 IEM_MC_END();
12149 break;
12150 }
12151
12152 case IEMMODE_32BIT:
12153 {
12154 IEM_MC_BEGIN(4, 2);
12155 IEM_MC_ARG(uint32_t *, pu32AX, 0);
12156 IEM_MC_ARG(uint32_t *, pu32DX, 1);
12157 IEM_MC_ARG(uint32_t, u32Value, 2);
12158 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12159 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12160 IEM_MC_LOCAL(int32_t, rc);
12161
12162 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12164 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12165 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
12166 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
12167 IEM_MC_REF_EFLAGS(pEFlags);
12168 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
12169 IEM_MC_IF_LOCAL_IS_Z(rc) {
12170 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
12171 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
12172 IEM_MC_ADVANCE_RIP_AND_FINISH();
12173 } IEM_MC_ELSE() {
12174 IEM_MC_RAISE_DIVIDE_ERROR();
12175 } IEM_MC_ENDIF();
12176
12177 IEM_MC_END();
12178 break;
12179 }
12180
12181 case IEMMODE_64BIT:
12182 {
12183 IEM_MC_BEGIN(4, 2);
12184 IEM_MC_ARG(uint64_t *, pu64AX, 0);
12185 IEM_MC_ARG(uint64_t *, pu64DX, 1);
12186 IEM_MC_ARG(uint64_t, u64Value, 2);
12187 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12188 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12189 IEM_MC_LOCAL(int32_t, rc);
12190
12191 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12193 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12194 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
12195 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
12196 IEM_MC_REF_EFLAGS(pEFlags);
12197 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
12198 IEM_MC_IF_LOCAL_IS_Z(rc) {
12199 IEM_MC_ADVANCE_RIP_AND_FINISH();
12200 } IEM_MC_ELSE() {
12201 IEM_MC_RAISE_DIVIDE_ERROR();
12202 } IEM_MC_ENDIF();
12203
12204 IEM_MC_END();
12205 break;
12206 }
12207
12208 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12209 }
12210 }
12211}
12212
12213
12214/**
12215 * @opmaps grp3_f6
12216 * @opcode /2
12217 */
12218FNIEMOP_DEF_1(iemOp_grp3_not_Eb, uint8_t, bRm)
12219{
12220 IEMOP_MNEMONIC(not_Eb, "not Eb");
12221 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_not_u8, iemAImpl_not_u8_locked);
12222}
12223
12224
12225/**
12226 * @opmaps grp3_f6
12227 * @opcode /3
12228 */
12229FNIEMOP_DEF_1(iemOp_grp3_neg_Eb, uint8_t, bRm)
12230{
12231 IEMOP_MNEMONIC(net_Eb, "neg Eb");
12232 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_neg_u8, iemAImpl_neg_u8_locked);
12233}
12234
12235
12236/**
12237 * @opcode 0xf6
12238 */
12239FNIEMOP_DEF(iemOp_Grp3_Eb)
12240{
12241 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12242 switch (IEM_GET_MODRM_REG_8(bRm))
12243 {
12244 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
12245 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
12246 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Eb, bRm);
12247 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Eb, bRm);
12248 case 4:
12249 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
12250 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12251 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
12252 case 5:
12253 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
12254 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12255 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
12256 case 6:
12257 IEMOP_MNEMONIC(div_Eb, "div Eb");
12258 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12259 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
12260 case 7:
12261 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
12262 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12263 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
12264 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12265 }
12266}
12267
12268
12269/** Opcode 0xf7 /0. */
12270FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
12271{
12272 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
12273 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12274
12275 if (IEM_IS_MODRM_REG_MODE(bRm))
12276 {
12277 /* register access */
12278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12279 switch (pVCpu->iem.s.enmEffOpSize)
12280 {
12281 case IEMMODE_16BIT:
12282 {
12283 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12284 IEM_MC_BEGIN(3, 0);
12285 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12286 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
12287 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12288 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12289 IEM_MC_REF_EFLAGS(pEFlags);
12290 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
12291 IEM_MC_ADVANCE_RIP_AND_FINISH();
12292 IEM_MC_END();
12293 break;
12294 }
12295
12296 case IEMMODE_32BIT:
12297 {
12298 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12299 IEM_MC_BEGIN(3, 0);
12300 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12301 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
12302 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12303 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12304 IEM_MC_REF_EFLAGS(pEFlags);
12305 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
12306 /* No clearing the high dword here - test doesn't write back the result. */
12307 IEM_MC_ADVANCE_RIP_AND_FINISH();
12308 IEM_MC_END();
12309 break;
12310 }
12311
12312 case IEMMODE_64BIT:
12313 {
12314 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12315 IEM_MC_BEGIN(3, 0);
12316 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12317 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
12318 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12319 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12320 IEM_MC_REF_EFLAGS(pEFlags);
12321 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
12322 IEM_MC_ADVANCE_RIP_AND_FINISH();
12323 IEM_MC_END();
12324 break;
12325 }
12326
12327 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12328 }
12329 }
12330 else
12331 {
12332 /* memory access. */
12333 switch (pVCpu->iem.s.enmEffOpSize)
12334 {
12335 case IEMMODE_16BIT:
12336 {
12337 IEM_MC_BEGIN(3, 2);
12338 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12339 IEM_MC_ARG(uint16_t, u16Src, 1);
12340 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
12341 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12342
12343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
12344 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12345 IEM_MC_ASSIGN(u16Src, u16Imm);
12346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12347 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12348 IEM_MC_FETCH_EFLAGS(EFlags);
12349 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
12350
12351 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
12352 IEM_MC_COMMIT_EFLAGS(EFlags);
12353 IEM_MC_ADVANCE_RIP_AND_FINISH();
12354 IEM_MC_END();
12355 break;
12356 }
12357
12358 case IEMMODE_32BIT:
12359 {
12360 IEM_MC_BEGIN(3, 2);
12361 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12362 IEM_MC_ARG(uint32_t, u32Src, 1);
12363 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
12364 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12365
12366 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12367 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12368 IEM_MC_ASSIGN(u32Src, u32Imm);
12369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12370 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12371 IEM_MC_FETCH_EFLAGS(EFlags);
12372 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
12373
12374 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
12375 IEM_MC_COMMIT_EFLAGS(EFlags);
12376 IEM_MC_ADVANCE_RIP_AND_FINISH();
12377 IEM_MC_END();
12378 break;
12379 }
12380
12381 case IEMMODE_64BIT:
12382 {
12383 IEM_MC_BEGIN(3, 2);
12384 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12385 IEM_MC_ARG(uint64_t, u64Src, 1);
12386 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
12387 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12388
12389 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12390 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12391 IEM_MC_ASSIGN(u64Src, u64Imm);
12392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12393 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12394 IEM_MC_FETCH_EFLAGS(EFlags);
12395 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
12396
12397 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
12398 IEM_MC_COMMIT_EFLAGS(EFlags);
12399 IEM_MC_ADVANCE_RIP_AND_FINISH();
12400 IEM_MC_END();
12401 break;
12402 }
12403
12404 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12405 }
12406 }
12407}
12408
12409
12410/** Opcode 0xf7 /2. */
12411FNIEMOP_DEF_1(iemOp_grp3_not_Ev, uint8_t, bRm)
12412{
12413 IEMOP_MNEMONIC(not_Ev, "not Ev");
12414 IEMOP_BODY_UNARY_Ev( iemAImpl_not_u16, iemAImpl_not_u32, iemAImpl_not_u64);
12415 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_not_u16_locked, iemAImpl_not_u32_locked, iemAImpl_not_u64_locked);
12416}
12417
12418
12419/** Opcode 0xf7 /3. */
12420FNIEMOP_DEF_1(iemOp_grp3_neg_Ev, uint8_t, bRm)
12421{
12422 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
12423 IEMOP_BODY_UNARY_Ev( iemAImpl_neg_u16, iemAImpl_neg_u32, iemAImpl_neg_u64);
12424 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_neg_u16_locked, iemAImpl_neg_u32_locked, iemAImpl_neg_u64_locked);
12425}
12426
12427
12428/**
12429 * @opcode 0xf7
12430 */
12431FNIEMOP_DEF(iemOp_Grp3_Ev)
12432{
12433 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12434 switch (IEM_GET_MODRM_REG_8(bRm))
12435 {
12436 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
12437 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
12438 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Ev, bRm);
12439 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Ev, bRm);
12440 case 4:
12441 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
12442 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12443 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
12444 case 5:
12445 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
12446 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12447 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
12448 case 6:
12449 IEMOP_MNEMONIC(div_Ev, "div Ev");
12450 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12451 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
12452 case 7:
12453 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
12454 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12455 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
12456 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12457 }
12458}
12459
12460
12461/**
12462 * @opcode 0xf8
12463 */
12464FNIEMOP_DEF(iemOp_clc)
12465{
12466 IEMOP_MNEMONIC(clc, "clc");
12467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12468 IEM_MC_BEGIN(0, 0);
12469 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
12470 IEM_MC_ADVANCE_RIP_AND_FINISH();
12471 IEM_MC_END();
12472}
12473
12474
12475/**
12476 * @opcode 0xf9
12477 */
12478FNIEMOP_DEF(iemOp_stc)
12479{
12480 IEMOP_MNEMONIC(stc, "stc");
12481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12482 IEM_MC_BEGIN(0, 0);
12483 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
12484 IEM_MC_ADVANCE_RIP_AND_FINISH();
12485 IEM_MC_END();
12486}
12487
12488
12489/**
12490 * @opcode 0xfa
12491 */
12492FNIEMOP_DEF(iemOp_cli)
12493{
12494 IEMOP_MNEMONIC(cli, "cli");
12495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12496 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, iemCImpl_cli);
12497}
12498
12499
12500FNIEMOP_DEF(iemOp_sti)
12501{
12502 IEMOP_MNEMONIC(sti, "sti");
12503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12504 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, iemCImpl_sti);
12505}
12506
12507
12508/**
12509 * @opcode 0xfc
12510 */
12511FNIEMOP_DEF(iemOp_cld)
12512{
12513 IEMOP_MNEMONIC(cld, "cld");
12514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12515 IEM_MC_BEGIN(0, 0);
12516 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
12517 IEM_MC_ADVANCE_RIP_AND_FINISH();
12518 IEM_MC_END();
12519}
12520
12521
12522/**
12523 * @opcode 0xfd
12524 */
12525FNIEMOP_DEF(iemOp_std)
12526{
12527 IEMOP_MNEMONIC(std, "std");
12528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12529 IEM_MC_BEGIN(0, 0);
12530 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
12531 IEM_MC_ADVANCE_RIP_AND_FINISH();
12532 IEM_MC_END();
12533}
12534
12535
12536/**
12537 * @opmaps grp4
12538 * @opcode /0
12539 */
12540FNIEMOP_DEF_1(iemOp_Grp4_inc_Eb, uint8_t, bRm)
12541{
12542 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
12543 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_inc_u8, iemAImpl_inc_u8_locked);
12544}
12545
12546
12547/**
12548 * @opmaps grp4
12549 * @opcode /1
12550 */
12551FNIEMOP_DEF_1(iemOp_Grp4_dec_Eb, uint8_t, bRm)
12552{
12553 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
12554 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_dec_u8, iemAImpl_dec_u8_locked);
12555}
12556
12557
12558/**
12559 * @opcode 0xfe
12560 */
12561FNIEMOP_DEF(iemOp_Grp4)
12562{
12563 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12564 switch (IEM_GET_MODRM_REG_8(bRm))
12565 {
12566 case 0: return FNIEMOP_CALL_1(iemOp_Grp4_inc_Eb, bRm);
12567 case 1: return FNIEMOP_CALL_1(iemOp_Grp4_dec_Eb, bRm);
12568 default:
12569 /** @todo is the eff-addr decoded? */
12570 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
12571 IEMOP_RAISE_INVALID_OPCODE_RET();
12572 }
12573}
12574
12575/** Opcode 0xff /0. */
12576FNIEMOP_DEF_1(iemOp_Grp5_inc_Ev, uint8_t, bRm)
12577{
12578 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
12579 IEMOP_BODY_UNARY_Ev( iemAImpl_inc_u16, iemAImpl_inc_u32, iemAImpl_inc_u64);
12580 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_inc_u16_locked, iemAImpl_inc_u32_locked, iemAImpl_inc_u64_locked);
12581}
12582
12583
12584/** Opcode 0xff /1. */
12585FNIEMOP_DEF_1(iemOp_Grp5_dec_Ev, uint8_t, bRm)
12586{
12587 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
12588 IEMOP_BODY_UNARY_Ev( iemAImpl_dec_u16, iemAImpl_dec_u32, iemAImpl_dec_u64);
12589 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_dec_u16_locked, iemAImpl_dec_u32_locked, iemAImpl_dec_u64_locked);
12590}
12591
12592
12593/**
12594 * Opcode 0xff /2.
12595 * @param bRm The RM byte.
12596 */
12597FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
12598{
12599 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
12600 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12601
12602 if (IEM_IS_MODRM_REG_MODE(bRm))
12603 {
12604 /* The new RIP is taken from a register. */
12605 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12606 switch (pVCpu->iem.s.enmEffOpSize)
12607 {
12608 case IEMMODE_16BIT:
12609 IEM_MC_BEGIN(1, 0);
12610 IEM_MC_ARG(uint16_t, u16Target, 0);
12611 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12612 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH, iemCImpl_call_16, u16Target);
12613 IEM_MC_END();
12614 break;
12615
12616 case IEMMODE_32BIT:
12617 IEM_MC_BEGIN(1, 0);
12618 IEM_MC_ARG(uint32_t, u32Target, 0);
12619 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12620 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH, iemCImpl_call_32, u32Target);
12621 IEM_MC_END();
12622 break;
12623
12624 case IEMMODE_64BIT:
12625 IEM_MC_BEGIN(1, 0);
12626 IEM_MC_ARG(uint64_t, u64Target, 0);
12627 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12628 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH, iemCImpl_call_64, u64Target);
12629 IEM_MC_END();
12630 break;
12631
12632 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12633 }
12634 }
12635 else
12636 {
12637 /* The new RIP is taken from a register. */
12638 switch (pVCpu->iem.s.enmEffOpSize)
12639 {
12640 case IEMMODE_16BIT:
12641 IEM_MC_BEGIN(1, 1);
12642 IEM_MC_ARG(uint16_t, u16Target, 0);
12643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12646 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12647 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH, iemCImpl_call_16, u16Target);
12648 IEM_MC_END();
12649 break;
12650
12651 case IEMMODE_32BIT:
12652 IEM_MC_BEGIN(1, 1);
12653 IEM_MC_ARG(uint32_t, u32Target, 0);
12654 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12655 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12657 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12658 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH, iemCImpl_call_32, u32Target);
12659 IEM_MC_END();
12660 break;
12661
12662 case IEMMODE_64BIT:
12663 IEM_MC_BEGIN(1, 1);
12664 IEM_MC_ARG(uint64_t, u64Target, 0);
12665 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12666 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12668 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12669 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH, iemCImpl_call_64, u64Target);
12670 IEM_MC_END();
12671 break;
12672
12673 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12674 }
12675 }
12676}
12677
12678#define IEMOP_BODY_GRP5_FAR_EP(a_bRm, a_fnCImpl) \
12679 /* Registers? How?? */ \
12680 if (RT_LIKELY(IEM_IS_MODRM_MEM_MODE(a_bRm))) \
12681 { /* likely */ } \
12682 else \
12683 IEMOP_RAISE_INVALID_OPCODE_RET(); /* callf eax is not legal */ \
12684 \
12685 /* 64-bit mode: Default is 32-bit, but only intel respects a REX.W prefix. */ \
12686 /** @todo what does VIA do? */ \
12687 if (!IEM_IS_64BIT_CODE(pVCpu) || pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT || IEM_IS_GUEST_CPU_INTEL(pVCpu)) \
12688 { /* likely */ } \
12689 else \
12690 pVCpu->iem.s.enmEffOpSize = IEMMODE_32BIT; \
12691 \
12692 /* Far pointer loaded from memory. */ \
12693 switch (pVCpu->iem.s.enmEffOpSize) \
12694 { \
12695 case IEMMODE_16BIT: \
12696 IEM_MC_BEGIN(3, 1); \
12697 IEM_MC_ARG(uint16_t, u16Sel, 0); \
12698 IEM_MC_ARG(uint16_t, offSeg, 1); \
12699 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2); \
12700 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
12701 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
12702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12703 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
12704 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2); \
12705 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, \
12706 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
12707 IEM_MC_END(); \
12708 break; \
12709 \
12710 case IEMMODE_32BIT: \
12711 IEM_MC_BEGIN(3, 1); \
12712 IEM_MC_ARG(uint16_t, u16Sel, 0); \
12713 IEM_MC_ARG(uint32_t, offSeg, 1); \
12714 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2); \
12715 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
12716 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
12717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12718 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
12719 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4); \
12720 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, \
12721 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
12722 IEM_MC_END(); \
12723 break; \
12724 \
12725 case IEMMODE_64BIT: \
12726 Assert(!IEM_IS_GUEST_CPU_AMD(pVCpu)); \
12727 IEM_MC_BEGIN(3, 1); \
12728 IEM_MC_ARG(uint16_t, u16Sel, 0); \
12729 IEM_MC_ARG(uint64_t, offSeg, 1); \
12730 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_64BIT, 2); \
12731 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
12732 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
12733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12734 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
12735 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8); \
12736 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_MODE /* no gates */, \
12737 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
12738 IEM_MC_END(); \
12739 break; \
12740 \
12741 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12742 } do {} while (0)
12743
12744
12745/**
12746 * Opcode 0xff /3.
12747 * @param bRm The RM byte.
12748 */
12749FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
12750{
12751 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
12752 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_callf);
12753}
12754
12755
12756/**
12757 * Opcode 0xff /4.
12758 * @param bRm The RM byte.
12759 */
12760FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
12761{
12762 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
12763 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12764
12765 if (IEM_IS_MODRM_REG_MODE(bRm))
12766 {
12767 /* The new RIP is taken from a register. */
12768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12769 switch (pVCpu->iem.s.enmEffOpSize)
12770 {
12771 case IEMMODE_16BIT:
12772 IEM_MC_BEGIN(0, 1);
12773 IEM_MC_LOCAL(uint16_t, u16Target);
12774 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12775 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
12776 IEM_MC_END();
12777 break;
12778
12779 case IEMMODE_32BIT:
12780 IEM_MC_BEGIN(0, 1);
12781 IEM_MC_LOCAL(uint32_t, u32Target);
12782 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12783 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
12784 IEM_MC_END();
12785 break;
12786
12787 case IEMMODE_64BIT:
12788 IEM_MC_BEGIN(0, 1);
12789 IEM_MC_LOCAL(uint64_t, u64Target);
12790 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12791 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
12792 IEM_MC_END();
12793 break;
12794
12795 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12796 }
12797 }
12798 else
12799 {
12800 /* The new RIP is taken from a memory location. */
12801 switch (pVCpu->iem.s.enmEffOpSize)
12802 {
12803 case IEMMODE_16BIT:
12804 IEM_MC_BEGIN(0, 2);
12805 IEM_MC_LOCAL(uint16_t, u16Target);
12806 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12807 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12809 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12810 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
12811 IEM_MC_END();
12812 break;
12813
12814 case IEMMODE_32BIT:
12815 IEM_MC_BEGIN(0, 2);
12816 IEM_MC_LOCAL(uint32_t, u32Target);
12817 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12818 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12820 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12821 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
12822 IEM_MC_END();
12823 break;
12824
12825 case IEMMODE_64BIT:
12826 IEM_MC_BEGIN(0, 2);
12827 IEM_MC_LOCAL(uint64_t, u64Target);
12828 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12829 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12831 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12832 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
12833 IEM_MC_END();
12834 break;
12835
12836 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12837 }
12838 }
12839}
12840
12841
12842/**
12843 * Opcode 0xff /5.
12844 * @param bRm The RM byte.
12845 */
12846FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
12847{
12848 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
12849 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_FarJmp);
12850}
12851
12852
12853/**
12854 * Opcode 0xff /6.
12855 * @param bRm The RM byte.
12856 */
12857FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
12858{
12859 IEMOP_MNEMONIC(push_Ev, "push Ev");
12860
12861 /* Registers are handled by a common worker. */
12862 if (IEM_IS_MODRM_REG_MODE(bRm))
12863 return FNIEMOP_CALL_1(iemOpCommonPushGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
12864
12865 /* Memory we do here. */
12866 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12867 switch (pVCpu->iem.s.enmEffOpSize)
12868 {
12869 case IEMMODE_16BIT:
12870 IEM_MC_BEGIN(0, 2);
12871 IEM_MC_LOCAL(uint16_t, u16Src);
12872 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12875 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12876 IEM_MC_PUSH_U16(u16Src);
12877 IEM_MC_ADVANCE_RIP_AND_FINISH();
12878 IEM_MC_END();
12879 break;
12880
12881 case IEMMODE_32BIT:
12882 IEM_MC_BEGIN(0, 2);
12883 IEM_MC_LOCAL(uint32_t, u32Src);
12884 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12885 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12887 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12888 IEM_MC_PUSH_U32(u32Src);
12889 IEM_MC_ADVANCE_RIP_AND_FINISH();
12890 IEM_MC_END();
12891 break;
12892
12893 case IEMMODE_64BIT:
12894 IEM_MC_BEGIN(0, 2);
12895 IEM_MC_LOCAL(uint64_t, u64Src);
12896 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12897 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12899 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12900 IEM_MC_PUSH_U64(u64Src);
12901 IEM_MC_ADVANCE_RIP_AND_FINISH();
12902 IEM_MC_END();
12903 break;
12904
12905 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12906 }
12907}
12908
12909
12910/**
12911 * @opcode 0xff
12912 */
12913FNIEMOP_DEF(iemOp_Grp5)
12914{
12915 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12916 switch (IEM_GET_MODRM_REG_8(bRm))
12917 {
12918 case 0:
12919 return FNIEMOP_CALL_1(iemOp_Grp5_inc_Ev, bRm);
12920 case 1:
12921 return FNIEMOP_CALL_1(iemOp_Grp5_dec_Ev, bRm);
12922 case 2:
12923 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
12924 case 3:
12925 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
12926 case 4:
12927 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
12928 case 5:
12929 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
12930 case 6:
12931 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
12932 case 7:
12933 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
12934 IEMOP_RAISE_INVALID_OPCODE_RET();
12935 }
12936 AssertFailedReturn(VERR_IEM_IPE_3);
12937}
12938
12939
12940
12941const PFNIEMOP g_apfnOneByteMap[256] =
12942{
12943 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
12944 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
12945 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
12946 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
12947 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
12948 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
12949 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
12950 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
12951 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
12952 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
12953 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
12954 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
12955 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
12956 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
12957 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
12958 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
12959 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
12960 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
12961 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
12962 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
12963 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
12964 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
12965 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
12966 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
12967 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
12968 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
12969 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
12970 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
12971 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
12972 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
12973 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
12974 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
12975 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
12976 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
12977 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
12978 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
12979 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
12980 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
12981 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
12982 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
12983 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
12984 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
12985 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
12986 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
12987 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
12988 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
12989 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
12990 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
12991 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
12992 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
12993 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
12994 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
12995 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
12996 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
12997 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
12998 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
12999 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
13000 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
13001 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
13002 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
13003 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
13004 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
13005 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
13006 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
13007};
13008
13009
13010/** @} */
13011
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette