VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsOneByte.cpp.h@ 100148

Last change on this file since 100148 was 100148, checked in by vboxsync, 18 months ago

VMM/IEM: Made the python scripts pick up and deal with the IEM_MC_DEFER_TO_CIMPL_[0-5]_RET short-hand macros. bugref:10369

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 445.7 KB
Line 
1/* $Id: IEMAllInstructionsOneByte.cpp.h 100148 2023-06-10 19:44:02Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*******************************************************************************
30* Global Variables *
31*******************************************************************************/
32extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
33
34/* Instruction group definitions: */
35
36/** @defgroup og_gen General
37 * @{ */
38 /** @defgroup og_gen_arith Arithmetic
39 * @{ */
40 /** @defgroup og_gen_arith_bin Binary numbers */
41 /** @defgroup og_gen_arith_dec Decimal numbers */
42 /** @} */
43/** @} */
44
45/** @defgroup og_stack Stack
46 * @{ */
47 /** @defgroup og_stack_sreg Segment registers */
48/** @} */
49
50/** @defgroup og_prefix Prefixes */
51/** @defgroup og_escapes Escape bytes */
52
53
54
55/** @name One byte opcodes.
56 * @{
57 */
58
59/**
60 * Body for instructions like ADD, AND, OR, TEST, CMP, ++ with a byte
61 * memory/register as the destination.
62 *
63 * Used with IEMOP_BODY_BINARY_rm_r8_NO_LOCK or IEMOP_BODY_BINARY_rm_r8_LOCKED.
64 */
65#define IEMOP_BODY_BINARY_rm_r8(a_fnNormalU8, a_fRW) \
66 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
67 \
68 /* \
69 * If rm is denoting a register, no more instruction bytes. \
70 */ \
71 if (IEM_IS_MODRM_REG_MODE(bRm)) \
72 { \
73 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
74 \
75 IEM_MC_BEGIN(3, 0); \
76 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
77 IEM_MC_ARG(uint8_t, u8Src, 1); \
78 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
79 \
80 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
81 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
82 IEM_MC_REF_EFLAGS(pEFlags); \
83 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
84 \
85 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
86 IEM_MC_END(); \
87 } \
88 else \
89 { \
90 /* \
91 * We're accessing memory. \
92 * Note! We're putting the eflags on the stack here so we can commit them \
93 * after the memory. \
94 */ \
95 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
96 { \
97 IEM_MC_BEGIN(3, 2); \
98 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
99 IEM_MC_ARG(uint8_t, u8Src, 1); \
100 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
102 \
103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
104 IEMOP_HLP_DONE_DECODING(); \
105 IEM_MC_MEM_MAP(pu8Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
106 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
107 IEM_MC_FETCH_EFLAGS(EFlags); \
108 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
109 \
110 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, a_fRW); \
111 IEM_MC_COMMIT_EFLAGS(EFlags); \
112 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
113 IEM_MC_END(); \
114 } \
115 else \
116 { \
117 (void)0
118
119#define IEMOP_BODY_BINARY_rm_r8_NO_LOCK() \
120 IEMOP_HLP_DONE_DECODING(); \
121 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
122 } \
123 } \
124 (void)0
125
126#define IEMOP_BODY_BINARY_rm_r8_LOCKED(a_fnLockedU8) \
127 IEM_MC_BEGIN(3, 2); \
128 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
129 IEM_MC_ARG(uint8_t, u8Src, 1); \
130 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
131 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
132 \
133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
134 IEMOP_HLP_DONE_DECODING(); \
135 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
136 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
137 IEM_MC_FETCH_EFLAGS(EFlags); \
138 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
139 \
140 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW); \
141 IEM_MC_COMMIT_EFLAGS(EFlags); \
142 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
143 IEM_MC_END(); \
144 } \
145 } \
146 (void)0
147
148/**
149 * Body for byte instructions like ADD, AND, OR, ++ with a register as the
150 * destination.
151 */
152#define IEMOP_BODY_BINARY_r8_rm(a_fnNormalU8) \
153 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
154 \
155 /* \
156 * If rm is denoting a register, no more instruction bytes. \
157 */ \
158 if (IEM_IS_MODRM_REG_MODE(bRm)) \
159 { \
160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
161 IEM_MC_BEGIN(3, 0); \
162 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
163 IEM_MC_ARG(uint8_t, u8Src, 1); \
164 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
165 \
166 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
167 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
168 IEM_MC_REF_EFLAGS(pEFlags); \
169 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
170 \
171 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
172 IEM_MC_END(); \
173 } \
174 else \
175 { \
176 /* \
177 * We're accessing memory. \
178 */ \
179 IEM_MC_BEGIN(3, 1); \
180 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
181 IEM_MC_ARG(uint8_t, u8Src, 1); \
182 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
183 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
184 \
185 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
187 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
188 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
189 IEM_MC_REF_EFLAGS(pEFlags); \
190 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
191 \
192 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
193 IEM_MC_END(); \
194 } \
195 (void)0
196
197
198/**
199 * Body for word/dword/qword instructions like ADD, AND, OR, ++ with
200 * memory/register as the destination.
201 */
202#define IEMOP_BODY_BINARY_rm_rv(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fRW) \
203 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
204 \
205 /* \
206 * If rm is denoting a register, no more instruction bytes. \
207 */ \
208 if (IEM_IS_MODRM_REG_MODE(bRm)) \
209 { \
210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
211 switch (pVCpu->iem.s.enmEffOpSize) \
212 { \
213 case IEMMODE_16BIT: \
214 IEM_MC_BEGIN(3, 0); \
215 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
216 IEM_MC_ARG(uint16_t, u16Src, 1); \
217 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
218 \
219 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
220 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
221 IEM_MC_REF_EFLAGS(pEFlags); \
222 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
223 \
224 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
225 IEM_MC_END(); \
226 break; \
227 \
228 case IEMMODE_32BIT: \
229 IEM_MC_BEGIN(3, 0); \
230 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
231 IEM_MC_ARG(uint32_t, u32Src, 1); \
232 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
233 \
234 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
235 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
236 IEM_MC_REF_EFLAGS(pEFlags); \
237 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
238 \
239 if ((a_fRW) == IEM_ACCESS_DATA_RW) /* not TEST and CMP */ \
240 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
241 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
242 IEM_MC_END(); \
243 break; \
244 \
245 case IEMMODE_64BIT: \
246 IEM_MC_BEGIN(3, 0); \
247 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
248 IEM_MC_ARG(uint64_t, u64Src, 1); \
249 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
250 \
251 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
252 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
253 IEM_MC_REF_EFLAGS(pEFlags); \
254 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
255 \
256 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
257 IEM_MC_END(); \
258 break; \
259 \
260 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
261 } \
262 } \
263 else \
264 { \
265 /* \
266 * We're accessing memory. \
267 * Note! We're putting the eflags on the stack here so we can commit them \
268 * after the memory. \
269 */ \
270 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
271 { \
272 switch (pVCpu->iem.s.enmEffOpSize) \
273 { \
274 case IEMMODE_16BIT: \
275 IEM_MC_BEGIN(3, 2); \
276 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
277 IEM_MC_ARG(uint16_t, u16Src, 1); \
278 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
279 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
280 \
281 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
282 IEMOP_HLP_DONE_DECODING(); \
283 IEM_MC_MEM_MAP(pu16Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
284 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
285 IEM_MC_FETCH_EFLAGS(EFlags); \
286 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
287 \
288 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, a_fRW); \
289 IEM_MC_COMMIT_EFLAGS(EFlags); \
290 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
291 IEM_MC_END(); \
292 break; \
293 \
294 case IEMMODE_32BIT: \
295 IEM_MC_BEGIN(3, 2); \
296 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
297 IEM_MC_ARG(uint32_t, u32Src, 1); \
298 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
299 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
300 \
301 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
302 IEMOP_HLP_DONE_DECODING(); \
303 IEM_MC_MEM_MAP(pu32Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
304 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
305 IEM_MC_FETCH_EFLAGS(EFlags); \
306 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
307 \
308 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, a_fRW); \
309 IEM_MC_COMMIT_EFLAGS(EFlags); \
310 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
311 IEM_MC_END(); \
312 break; \
313 \
314 case IEMMODE_64BIT: \
315 IEM_MC_BEGIN(3, 2); \
316 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
317 IEM_MC_ARG(uint64_t, u64Src, 1); \
318 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
319 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
320 \
321 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
322 IEMOP_HLP_DONE_DECODING(); \
323 IEM_MC_MEM_MAP(pu64Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
324 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
325 IEM_MC_FETCH_EFLAGS(EFlags); \
326 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
327 \
328 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, a_fRW); \
329 IEM_MC_COMMIT_EFLAGS(EFlags); \
330 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
331 IEM_MC_END(); \
332 break; \
333 \
334 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
335 } \
336 } \
337 else \
338 { \
339 (void)0
340
341#define IEMOP_BODY_BINARY_rm_rv_NO_LOCK() \
342 IEMOP_HLP_DONE_DECODING(); \
343 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
344 } \
345 } \
346 (void)0
347
348#define IEMOP_BODY_BINARY_rm_rv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
349 switch (pVCpu->iem.s.enmEffOpSize) \
350 { \
351 case IEMMODE_16BIT: \
352 IEM_MC_BEGIN(3, 2); \
353 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
354 IEM_MC_ARG(uint16_t, u16Src, 1); \
355 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
356 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
357 \
358 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
359 IEMOP_HLP_DONE_DECODING(); \
360 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
361 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
362 IEM_MC_FETCH_EFLAGS(EFlags); \
363 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
364 \
365 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
366 IEM_MC_COMMIT_EFLAGS(EFlags); \
367 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
368 IEM_MC_END(); \
369 break; \
370 \
371 case IEMMODE_32BIT: \
372 IEM_MC_BEGIN(3, 2); \
373 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
374 IEM_MC_ARG(uint32_t, u32Src, 1); \
375 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
376 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
377 \
378 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
379 IEMOP_HLP_DONE_DECODING(); \
380 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
381 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
382 IEM_MC_FETCH_EFLAGS(EFlags); \
383 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
384 \
385 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW /* CMP,TEST */); \
386 IEM_MC_COMMIT_EFLAGS(EFlags); \
387 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
388 IEM_MC_END(); \
389 break; \
390 \
391 case IEMMODE_64BIT: \
392 IEM_MC_BEGIN(3, 2); \
393 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
394 IEM_MC_ARG(uint64_t, u64Src, 1); \
395 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
397 \
398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
399 IEMOP_HLP_DONE_DECODING(); \
400 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
401 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
402 IEM_MC_FETCH_EFLAGS(EFlags); \
403 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
404 \
405 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
406 IEM_MC_COMMIT_EFLAGS(EFlags); \
407 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
408 IEM_MC_END(); \
409 break; \
410 \
411 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
412 } \
413 } \
414 } \
415 (void)0
416
417
418/**
419 * Body for instructions like ADD, AND, OR, ++ with working on AL with
420 * a byte immediate.
421 */
422#define IEMOP_BODY_BINARY_AL_Ib(a_fnNormalU8) \
423 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
425 \
426 IEM_MC_BEGIN(3, 0); \
427 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
428 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1); \
429 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
430 \
431 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX); \
432 IEM_MC_REF_EFLAGS(pEFlags); \
433 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
434 \
435 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
436 IEM_MC_END()
437
438/**
439 * Body for instructions like ADD, AND, OR, ++ with working on
440 * AX/EAX/RAX with a word/dword immediate.
441 */
442#define IEMOP_BODY_BINARY_rAX_Iz(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fModifiesDstReg) \
443 switch (pVCpu->iem.s.enmEffOpSize) \
444 { \
445 case IEMMODE_16BIT: \
446 { \
447 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
449 \
450 IEM_MC_BEGIN(3, 0); \
451 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
452 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1); \
453 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
454 \
455 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX); \
456 IEM_MC_REF_EFLAGS(pEFlags); \
457 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
458 \
459 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
460 IEM_MC_END(); \
461 } \
462 \
463 case IEMMODE_32BIT: \
464 { \
465 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
467 \
468 IEM_MC_BEGIN(3, 0); \
469 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
470 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1); \
471 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
472 \
473 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX); \
474 IEM_MC_REF_EFLAGS(pEFlags); \
475 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
476 \
477 if (a_fModifiesDstReg) \
478 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
479 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
480 IEM_MC_END(); \
481 } \
482 \
483 case IEMMODE_64BIT: \
484 { \
485 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
487 \
488 IEM_MC_BEGIN(3, 0); \
489 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
490 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1); \
491 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
492 \
493 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX); \
494 IEM_MC_REF_EFLAGS(pEFlags); \
495 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
496 \
497 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
498 IEM_MC_END(); \
499 } \
500 \
501 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
502 } \
503 (void)0
504
505
506
507/* Instruction specification format - work in progress: */
508
509/**
510 * @opcode 0x00
511 * @opmnemonic add
512 * @op1 rm:Eb
513 * @op2 reg:Gb
514 * @opmaps one
515 * @openc ModR/M
516 * @opflmodify cf,pf,af,zf,sf,of
517 * @ophints harmless ignores_op_sizes
518 * @opstats add_Eb_Gb
519 * @opgroup og_gen_arith_bin
520 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
521 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
522 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
523 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
524 */
525FNIEMOP_DEF(iemOp_add_Eb_Gb)
526{
527 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
528 IEMOP_BODY_BINARY_rm_r8( iemAImpl_add_u8, IEM_ACCESS_DATA_RW);
529 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_add_u8_locked);
530}
531
532
533/**
534 * @opcode 0x01
535 * @opgroup og_gen_arith_bin
536 * @opflmodify cf,pf,af,zf,sf,of
537 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
538 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
539 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
540 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
541 */
542FNIEMOP_DEF(iemOp_add_Ev_Gv)
543{
544 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
545 IEMOP_BODY_BINARY_rm_rv( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, IEM_ACCESS_DATA_RW);
546 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
547}
548
549
550/**
551 * @opcode 0x02
552 * @opgroup og_gen_arith_bin
553 * @opflmodify cf,pf,af,zf,sf,of
554 * @opcopytests iemOp_add_Eb_Gb
555 */
556FNIEMOP_DEF(iemOp_add_Gb_Eb)
557{
558 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
559 IEMOP_BODY_BINARY_r8_rm(iemAImpl_add_u8);
560}
561
562
563/**
564 * @opcode 0x03
565 * @opgroup og_gen_arith_bin
566 * @opflmodify cf,pf,af,zf,sf,of
567 * @opcopytests iemOp_add_Ev_Gv
568 */
569FNIEMOP_DEF(iemOp_add_Gv_Ev)
570{
571 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
572 IEMOP_BODY_BINARY_rv_rm(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
573}
574
575
576/**
577 * @opcode 0x04
578 * @opgroup og_gen_arith_bin
579 * @opflmodify cf,pf,af,zf,sf,of
580 * @opcopytests iemOp_add_Eb_Gb
581 */
582FNIEMOP_DEF(iemOp_add_Al_Ib)
583{
584 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
585 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_add_u8);
586}
587
588
589/**
590 * @opcode 0x05
591 * @opgroup og_gen_arith_bin
592 * @opflmodify cf,pf,af,zf,sf,of
593 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
594 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
595 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
596 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
597 */
598FNIEMOP_DEF(iemOp_add_eAX_Iz)
599{
600 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
601 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
602}
603
604
605/**
606 * @opcode 0x06
607 * @opgroup og_stack_sreg
608 */
609FNIEMOP_DEF(iemOp_push_ES)
610{
611 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
612 IEMOP_HLP_NO_64BIT();
613 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
614}
615
616
617/**
618 * @opcode 0x07
619 * @opgroup og_stack_sreg
620 */
621FNIEMOP_DEF(iemOp_pop_ES)
622{
623 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
624 IEMOP_HLP_NO_64BIT();
625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
626 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE, iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
627}
628
629
630/**
631 * @opcode 0x08
632 * @opgroup og_gen_arith_bin
633 * @opflmodify cf,pf,af,zf,sf,of
634 * @opflundef af
635 * @opflclear of,cf
636 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
637 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
638 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
639 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
640 */
641FNIEMOP_DEF(iemOp_or_Eb_Gb)
642{
643 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
644 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
645 IEMOP_BODY_BINARY_rm_r8( iemAImpl_or_u8, IEM_ACCESS_DATA_RW);
646 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_or_u8_locked);
647}
648
649
650/*
651 * @opcode 0x09
652 * @opgroup og_gen_arith_bin
653 * @opflmodify cf,pf,af,zf,sf,of
654 * @opflundef af
655 * @opflclear of,cf
656 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
657 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
658 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
659 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
660 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
661 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
662 */
663FNIEMOP_DEF(iemOp_or_Ev_Gv)
664{
665 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
666 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
667 IEMOP_BODY_BINARY_rm_rv( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, IEM_ACCESS_DATA_RW);
668 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
669}
670
671
672/**
673 * @opcode 0x0a
674 * @opgroup og_gen_arith_bin
675 * @opflmodify cf,pf,af,zf,sf,of
676 * @opflundef af
677 * @opflclear of,cf
678 * @opcopytests iemOp_or_Eb_Gb
679 */
680FNIEMOP_DEF(iemOp_or_Gb_Eb)
681{
682 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
683 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
684 IEMOP_BODY_BINARY_r8_rm(iemAImpl_or_u8);
685}
686
687
688/**
689 * @opcode 0x0b
690 * @opgroup og_gen_arith_bin
691 * @opflmodify cf,pf,af,zf,sf,of
692 * @opflundef af
693 * @opflclear of,cf
694 * @opcopytests iemOp_or_Ev_Gv
695 */
696FNIEMOP_DEF(iemOp_or_Gv_Ev)
697{
698 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
699 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
700 IEMOP_BODY_BINARY_rv_rm(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
701}
702
703
704/**
705 * @opcode 0x0c
706 * @opgroup og_gen_arith_bin
707 * @opflmodify cf,pf,af,zf,sf,of
708 * @opflundef af
709 * @opflclear of,cf
710 * @opcopytests iemOp_or_Eb_Gb
711 */
712FNIEMOP_DEF(iemOp_or_Al_Ib)
713{
714 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
715 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
716 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_or_u8);
717}
718
719
720/**
721 * @opcode 0x0d
722 * @opgroup og_gen_arith_bin
723 * @opflmodify cf,pf,af,zf,sf,of
724 * @opflundef af
725 * @opflclear of,cf
726 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
727 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
728 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
729 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
730 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
731 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
732 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
733 */
734FNIEMOP_DEF(iemOp_or_eAX_Iz)
735{
736 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
737 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
738 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
739}
740
741
742/**
743 * @opcode 0x0e
744 * @opgroup og_stack_sreg
745 */
746FNIEMOP_DEF(iemOp_push_CS)
747{
748 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, 0);
749 IEMOP_HLP_NO_64BIT();
750 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
751}
752
753
754/**
755 * @opcode 0x0f
756 * @opmnemonic EscTwo0f
757 * @openc two0f
758 * @opdisenum OP_2B_ESC
759 * @ophints harmless
760 * @opgroup og_escapes
761 */
762FNIEMOP_DEF(iemOp_2byteEscape)
763{
764#ifdef VBOX_STRICT
765 /* Sanity check the table the first time around. */
766 static bool s_fTested = false;
767 if (RT_LIKELY(s_fTested)) { /* likely */ }
768 else
769 {
770 s_fTested = true;
771 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
772 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
773 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
774 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
775 }
776#endif
777
778 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
779 {
780 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
781 IEMOP_HLP_MIN_286();
782 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
783 }
784 /* @opdone */
785
786 /*
787 * On the 8086 this is a POP CS instruction.
788 * For the time being we don't specify this this.
789 */
790 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, IEMOPHINT_SKIP_PYTHON);
791 IEMOP_HLP_NO_64BIT();
792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
793 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_END_TB/*?*/,
794 iemCImpl_pop_Sreg, X86_SREG_CS, pVCpu->iem.s.enmEffOpSize);
795}
796
797/**
798 * @opcode 0x10
799 * @opgroup og_gen_arith_bin
800 * @opfltest cf
801 * @opflmodify cf,pf,af,zf,sf,of
802 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
803 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
804 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
805 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
806 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
807 */
808FNIEMOP_DEF(iemOp_adc_Eb_Gb)
809{
810 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
811 IEMOP_BODY_BINARY_rm_r8( iemAImpl_adc_u8, IEM_ACCESS_DATA_RW);
812 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_adc_u8_locked);
813}
814
815
816/**
817 * @opcode 0x11
818 * @opgroup og_gen_arith_bin
819 * @opfltest cf
820 * @opflmodify cf,pf,af,zf,sf,of
821 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
822 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
823 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
824 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
825 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
826 */
827FNIEMOP_DEF(iemOp_adc_Ev_Gv)
828{
829 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
830 IEMOP_BODY_BINARY_rm_rv( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, IEM_ACCESS_DATA_RW);
831 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
832}
833
834
835/**
836 * @opcode 0x12
837 * @opgroup og_gen_arith_bin
838 * @opfltest cf
839 * @opflmodify cf,pf,af,zf,sf,of
840 * @opcopytests iemOp_adc_Eb_Gb
841 */
842FNIEMOP_DEF(iemOp_adc_Gb_Eb)
843{
844 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
845 IEMOP_BODY_BINARY_r8_rm(iemAImpl_adc_u8);
846}
847
848
849/**
850 * @opcode 0x13
851 * @opgroup og_gen_arith_bin
852 * @opfltest cf
853 * @opflmodify cf,pf,af,zf,sf,of
854 * @opcopytests iemOp_adc_Ev_Gv
855 */
856FNIEMOP_DEF(iemOp_adc_Gv_Ev)
857{
858 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
859 IEMOP_BODY_BINARY_rv_rm(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
860}
861
862
863/**
864 * @opcode 0x14
865 * @opgroup og_gen_arith_bin
866 * @opfltest cf
867 * @opflmodify cf,pf,af,zf,sf,of
868 * @opcopytests iemOp_adc_Eb_Gb
869 */
870FNIEMOP_DEF(iemOp_adc_Al_Ib)
871{
872 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
873 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_adc_u8);
874}
875
876
877/**
878 * @opcode 0x15
879 * @opgroup og_gen_arith_bin
880 * @opfltest cf
881 * @opflmodify cf,pf,af,zf,sf,of
882 * @opcopytests iemOp_adc_Ev_Gv
883 */
884FNIEMOP_DEF(iemOp_adc_eAX_Iz)
885{
886 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
887 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
888}
889
890
891/**
892 * @opcode 0x16
893 */
894FNIEMOP_DEF(iemOp_push_SS)
895{
896 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
897 IEMOP_HLP_NO_64BIT();
898 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
899}
900
901
902/**
903 * @opcode 0x17
904 * @opgroup og_gen_arith_bin
905 * @opfltest cf
906 * @opflmodify cf,pf,af,zf,sf,of
907 */
908FNIEMOP_DEF(iemOp_pop_SS)
909{
910 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
912 IEMOP_HLP_NO_64BIT();
913 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE, iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
914}
915
916
917/**
918 * @opcode 0x18
919 * @opgroup og_gen_arith_bin
920 * @opfltest cf
921 * @opflmodify cf,pf,af,zf,sf,of
922 */
923FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
924{
925 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
926 IEMOP_BODY_BINARY_rm_r8( iemAImpl_sbb_u8, IEM_ACCESS_DATA_RW);
927 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sbb_u8_locked);
928}
929
930
931/**
932 * @opcode 0x19
933 * @opgroup og_gen_arith_bin
934 * @opfltest cf
935 * @opflmodify cf,pf,af,zf,sf,of
936 */
937FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
938{
939 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
940 IEMOP_BODY_BINARY_rm_rv( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, IEM_ACCESS_DATA_RW);
941 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
942}
943
944
945/**
946 * @opcode 0x1a
947 * @opgroup og_gen_arith_bin
948 * @opfltest cf
949 * @opflmodify cf,pf,af,zf,sf,of
950 */
951FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
952{
953 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
954 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sbb_u8);
955}
956
957
958/**
959 * @opcode 0x1b
960 * @opgroup og_gen_arith_bin
961 * @opfltest cf
962 * @opflmodify cf,pf,af,zf,sf,of
963 */
964FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
965{
966 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
967 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
968}
969
970
971/**
972 * @opcode 0x1c
973 * @opgroup og_gen_arith_bin
974 * @opfltest cf
975 * @opflmodify cf,pf,af,zf,sf,of
976 */
977FNIEMOP_DEF(iemOp_sbb_Al_Ib)
978{
979 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
980 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sbb_u8);
981}
982
983
984/**
985 * @opcode 0x1d
986 * @opgroup og_gen_arith_bin
987 * @opfltest cf
988 * @opflmodify cf,pf,af,zf,sf,of
989 */
990FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
991{
992 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
993 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
994}
995
996
997/**
998 * @opcode 0x1e
999 * @opgroup og_stack_sreg
1000 */
1001FNIEMOP_DEF(iemOp_push_DS)
1002{
1003 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1004 IEMOP_HLP_NO_64BIT();
1005 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
1006}
1007
1008
1009/**
1010 * @opcode 0x1f
1011 * @opgroup og_stack_sreg
1012 */
1013FNIEMOP_DEF(iemOp_pop_DS)
1014{
1015 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1017 IEMOP_HLP_NO_64BIT();
1018 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE, iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
1019}
1020
1021
1022/**
1023 * @opcode 0x20
1024 * @opgroup og_gen_arith_bin
1025 * @opflmodify cf,pf,af,zf,sf,of
1026 * @opflundef af
1027 * @opflclear of,cf
1028 */
1029FNIEMOP_DEF(iemOp_and_Eb_Gb)
1030{
1031 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1032 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1033 IEMOP_BODY_BINARY_rm_r8( iemAImpl_and_u8, IEM_ACCESS_DATA_RW);
1034 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_and_u8_locked);
1035}
1036
1037
1038/**
1039 * @opcode 0x21
1040 * @opgroup og_gen_arith_bin
1041 * @opflmodify cf,pf,af,zf,sf,of
1042 * @opflundef af
1043 * @opflclear of,cf
1044 */
1045FNIEMOP_DEF(iemOp_and_Ev_Gv)
1046{
1047 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1048 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1049 IEMOP_BODY_BINARY_rm_rv( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, IEM_ACCESS_DATA_RW);
1050 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
1051}
1052
1053
1054/**
1055 * @opcode 0x22
1056 * @opgroup og_gen_arith_bin
1057 * @opflmodify cf,pf,af,zf,sf,of
1058 * @opflundef af
1059 * @opflclear of,cf
1060 */
1061FNIEMOP_DEF(iemOp_and_Gb_Eb)
1062{
1063 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1064 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1065 IEMOP_BODY_BINARY_r8_rm(iemAImpl_and_u8);
1066}
1067
1068
1069/**
1070 * @opcode 0x23
1071 * @opgroup og_gen_arith_bin
1072 * @opflmodify cf,pf,af,zf,sf,of
1073 * @opflundef af
1074 * @opflclear of,cf
1075 */
1076FNIEMOP_DEF(iemOp_and_Gv_Ev)
1077{
1078 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1079 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1080 IEMOP_BODY_BINARY_rv_rm(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1081}
1082
1083
1084/**
1085 * @opcode 0x24
1086 * @opgroup og_gen_arith_bin
1087 * @opflmodify cf,pf,af,zf,sf,of
1088 * @opflundef af
1089 * @opflclear of,cf
1090 */
1091FNIEMOP_DEF(iemOp_and_Al_Ib)
1092{
1093 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
1094 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1095 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_and_u8);
1096}
1097
1098
1099/**
1100 * @opcode 0x25
1101 * @opgroup og_gen_arith_bin
1102 * @opflmodify cf,pf,af,zf,sf,of
1103 * @opflundef af
1104 * @opflclear of,cf
1105 */
1106FNIEMOP_DEF(iemOp_and_eAX_Iz)
1107{
1108 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1109 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1110 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1111}
1112
1113
1114/**
1115 * @opcode 0x26
1116 * @opmnemonic SEG
1117 * @op1 ES
1118 * @opgroup og_prefix
1119 * @openc prefix
1120 * @opdisenum OP_SEG
1121 * @ophints harmless
1122 */
1123FNIEMOP_DEF(iemOp_seg_ES)
1124{
1125 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
1126 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
1127 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
1128
1129 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1130 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1131}
1132
1133
1134/**
1135 * @opcode 0x27
1136 * @opfltest af,cf
1137 * @opflmodify cf,pf,af,zf,sf,of
1138 * @opflundef of
1139 */
1140FNIEMOP_DEF(iemOp_daa)
1141{
1142 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1143 IEMOP_HLP_NO_64BIT();
1144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1145 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1146 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_daa);
1147}
1148
1149
1150/**
1151 * @opcode 0x28
1152 * @opgroup og_gen_arith_bin
1153 * @opflmodify cf,pf,af,zf,sf,of
1154 */
1155FNIEMOP_DEF(iemOp_sub_Eb_Gb)
1156{
1157 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1158 IEMOP_BODY_BINARY_rm_r8( iemAImpl_sub_u8, IEM_ACCESS_DATA_RW);
1159 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sub_u8_locked);
1160}
1161
1162
1163/**
1164 * @opcode 0x29
1165 * @opgroup og_gen_arith_bin
1166 * @opflmodify cf,pf,af,zf,sf,of
1167 */
1168FNIEMOP_DEF(iemOp_sub_Ev_Gv)
1169{
1170 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1171 IEMOP_BODY_BINARY_rm_rv( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, IEM_ACCESS_DATA_RW);
1172 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
1173}
1174
1175
1176/**
1177 * @opcode 0x2a
1178 * @opgroup og_gen_arith_bin
1179 * @opflmodify cf,pf,af,zf,sf,of
1180 */
1181FNIEMOP_DEF(iemOp_sub_Gb_Eb)
1182{
1183 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1184 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sub_u8);
1185}
1186
1187
1188/**
1189 * @opcode 0x2b
1190 * @opgroup og_gen_arith_bin
1191 * @opflmodify cf,pf,af,zf,sf,of
1192 */
1193FNIEMOP_DEF(iemOp_sub_Gv_Ev)
1194{
1195 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1196 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1197}
1198
1199
1200/**
1201 * @opcode 0x2c
1202 * @opgroup og_gen_arith_bin
1203 * @opflmodify cf,pf,af,zf,sf,of
1204 */
1205FNIEMOP_DEF(iemOp_sub_Al_Ib)
1206{
1207 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1208 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sub_u8);
1209}
1210
1211
1212/**
1213 * @opcode 0x2d
1214 * @opgroup og_gen_arith_bin
1215 * @opflmodify cf,pf,af,zf,sf,of
1216 */
1217FNIEMOP_DEF(iemOp_sub_eAX_Iz)
1218{
1219 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1220 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1221}
1222
1223
1224/**
1225 * @opcode 0x2e
1226 * @opmnemonic SEG
1227 * @op1 CS
1228 * @opgroup og_prefix
1229 * @openc prefix
1230 * @opdisenum OP_SEG
1231 * @ophints harmless
1232 */
1233FNIEMOP_DEF(iemOp_seg_CS)
1234{
1235 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
1236 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
1237 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
1238
1239 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1240 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1241}
1242
1243
1244/**
1245 * @opcode 0x2f
1246 * @opfltest af,cf
1247 * @opflmodify cf,pf,af,zf,sf,of
1248 * @opflundef of
1249 */
1250FNIEMOP_DEF(iemOp_das)
1251{
1252 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1253 IEMOP_HLP_NO_64BIT();
1254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1255 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1256 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_das);
1257}
1258
1259
1260/**
1261 * @opcode 0x30
1262 * @opgroup og_gen_arith_bin
1263 * @opflmodify cf,pf,af,zf,sf,of
1264 * @opflundef af
1265 * @opflclear of,cf
1266 */
1267FNIEMOP_DEF(iemOp_xor_Eb_Gb)
1268{
1269 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1270 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1271 IEMOP_BODY_BINARY_rm_r8( iemAImpl_xor_u8, IEM_ACCESS_DATA_RW);
1272 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_xor_u8_locked);
1273}
1274
1275
1276/**
1277 * @opcode 0x31
1278 * @opgroup og_gen_arith_bin
1279 * @opflmodify cf,pf,af,zf,sf,of
1280 * @opflundef af
1281 * @opflclear of,cf
1282 */
1283FNIEMOP_DEF(iemOp_xor_Ev_Gv)
1284{
1285 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1286 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1287 IEMOP_BODY_BINARY_rm_rv( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, IEM_ACCESS_DATA_RW);
1288 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
1289}
1290
1291
1292/**
1293 * @opcode 0x32
1294 * @opgroup og_gen_arith_bin
1295 * @opflmodify cf,pf,af,zf,sf,of
1296 * @opflundef af
1297 * @opflclear of,cf
1298 */
1299FNIEMOP_DEF(iemOp_xor_Gb_Eb)
1300{
1301 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1302 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1303 IEMOP_BODY_BINARY_r8_rm(iemAImpl_xor_u8);
1304}
1305
1306
1307/**
1308 * @opcode 0x33
1309 * @opgroup og_gen_arith_bin
1310 * @opflmodify cf,pf,af,zf,sf,of
1311 * @opflundef af
1312 * @opflclear of,cf
1313 */
1314FNIEMOP_DEF(iemOp_xor_Gv_Ev)
1315{
1316 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1317 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1318 IEMOP_BODY_BINARY_rv_rm(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1319}
1320
1321
1322/**
1323 * @opcode 0x34
1324 * @opgroup og_gen_arith_bin
1325 * @opflmodify cf,pf,af,zf,sf,of
1326 * @opflundef af
1327 * @opflclear of,cf
1328 */
1329FNIEMOP_DEF(iemOp_xor_Al_Ib)
1330{
1331 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1332 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1333 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_xor_u8);
1334}
1335
1336
1337/**
1338 * @opcode 0x35
1339 * @opgroup og_gen_arith_bin
1340 * @opflmodify cf,pf,af,zf,sf,of
1341 * @opflundef af
1342 * @opflclear of,cf
1343 */
1344FNIEMOP_DEF(iemOp_xor_eAX_Iz)
1345{
1346 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1347 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1348 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1349}
1350
1351
1352/**
1353 * @opcode 0x36
1354 * @opmnemonic SEG
1355 * @op1 SS
1356 * @opgroup og_prefix
1357 * @openc prefix
1358 * @opdisenum OP_SEG
1359 * @ophints harmless
1360 */
1361FNIEMOP_DEF(iemOp_seg_SS)
1362{
1363 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
1364 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
1365 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
1366
1367 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1368 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1369}
1370
1371
1372/**
1373 * @opcode 0x37
1374 * @opfltest af,cf
1375 * @opflmodify cf,pf,af,zf,sf,of
1376 * @opflundef pf,zf,sf,of
1377 * @opgroup og_gen_arith_dec
1378 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
1379 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
1380 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1381 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1382 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1383 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
1384 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
1385 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
1386 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
1387 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
1388 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
1389 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
1390 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
1391 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
1392 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
1393 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1394 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1395 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1396 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1397 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
1398 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
1399 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
1400 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
1401 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
1402 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
1403 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1404 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
1405 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
1406 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
1407 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
1408 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
1409 */
1410FNIEMOP_DEF(iemOp_aaa)
1411{
1412 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1413 IEMOP_HLP_NO_64BIT();
1414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1415 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1416
1417 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aaa);
1418}
1419
1420
1421/**
1422 * @opcode 0x38
1423 */
1424FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
1425{
1426 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
1427 IEMOP_BODY_BINARY_rm_r8(iemAImpl_cmp_u8, IEM_ACCESS_DATA_R);
1428 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
1429}
1430
1431
1432/**
1433 * @opcode 0x39
1434 */
1435FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
1436{
1437 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
1438 IEMOP_BODY_BINARY_rm_rv(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, IEM_ACCESS_DATA_R);
1439 IEMOP_BODY_BINARY_rm_rv_NO_LOCK();
1440}
1441
1442
1443/**
1444 * @opcode 0x3a
1445 */
1446FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
1447{
1448 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
1449 IEMOP_BODY_BINARY_r8_rm(iemAImpl_cmp_u8);
1450}
1451
1452
1453/**
1454 * @opcode 0x3b
1455 */
1456FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
1457{
1458 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
1459 IEMOP_BODY_BINARY_rv_rm(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1460}
1461
1462
1463/**
1464 * @opcode 0x3c
1465 */
1466FNIEMOP_DEF(iemOp_cmp_Al_Ib)
1467{
1468 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
1469 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_cmp_u8);
1470}
1471
1472
1473/**
1474 * @opcode 0x3d
1475 */
1476FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1477{
1478 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1479 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1480}
1481
1482
1483/**
1484 * @opcode 0x3e
1485 */
1486FNIEMOP_DEF(iemOp_seg_DS)
1487{
1488 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1489 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1490 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1491
1492 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1493 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1494}
1495
1496
1497/**
1498 * @opcode 0x3f
1499 * @opfltest af,cf
1500 * @opflmodify cf,pf,af,zf,sf,of
1501 * @opflundef pf,zf,sf,of
1502 * @opgroup og_gen_arith_dec
1503 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1504 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1505 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1506 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1507 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1508 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1509 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1510 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1511 * @optest amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1512 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1513 * @optest amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1514 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1515 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1516 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1517 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1518 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1519 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1520 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1521 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1522 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1523 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
1524 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1525 * @optest amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
1526 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1527 * @optest amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
1528 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1529 * @optest amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
1530 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1531 * @optest amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
1532 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1533 * @optest amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
1534 * @optest intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1535 * @optest amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1536 * @optest intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1537 * @optest amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1538 */
1539FNIEMOP_DEF(iemOp_aas)
1540{
1541 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1542 IEMOP_HLP_NO_64BIT();
1543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1544 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1545
1546 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aas);
1547}
1548
1549
1550/**
1551 * Common 'inc/dec register' helper.
1552 *
1553 * Not for 64-bit code, only for what became the rex prefixes.
1554 */
1555#define IEMOP_BODY_UNARY_GReg(a_fnNormalU16, a_fnNormalU32, a_iReg) \
1556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1557 switch (pVCpu->iem.s.enmEffOpSize) \
1558 { \
1559 case IEMMODE_16BIT: \
1560 IEM_MC_BEGIN(2, 0); \
1561 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
1562 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1563 IEM_MC_REF_GREG_U16(pu16Dst, a_iReg); \
1564 IEM_MC_REF_EFLAGS(pEFlags); \
1565 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
1566 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1567 IEM_MC_END(); \
1568 break; \
1569 \
1570 case IEMMODE_32BIT: \
1571 IEM_MC_BEGIN(2, 0); \
1572 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
1573 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1574 IEM_MC_REF_GREG_U32(pu32Dst, a_iReg); \
1575 IEM_MC_REF_EFLAGS(pEFlags); \
1576 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
1577 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
1578 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1579 IEM_MC_END(); \
1580 break; \
1581 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1582 } \
1583 (void)0
1584
1585/**
1586 * @opcode 0x40
1587 */
1588FNIEMOP_DEF(iemOp_inc_eAX)
1589{
1590 /*
1591 * This is a REX prefix in 64-bit mode.
1592 */
1593 if (IEM_IS_64BIT_CODE(pVCpu))
1594 {
1595 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1596 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1597
1598 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1599 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1600 }
1601
1602 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1603 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xAX);
1604}
1605
1606
1607/**
1608 * @opcode 0x41
1609 */
1610FNIEMOP_DEF(iemOp_inc_eCX)
1611{
1612 /*
1613 * This is a REX prefix in 64-bit mode.
1614 */
1615 if (IEM_IS_64BIT_CODE(pVCpu))
1616 {
1617 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1618 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1619 pVCpu->iem.s.uRexB = 1 << 3;
1620
1621 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1622 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1623 }
1624
1625 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1626 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xCX);
1627}
1628
1629
1630/**
1631 * @opcode 0x42
1632 */
1633FNIEMOP_DEF(iemOp_inc_eDX)
1634{
1635 /*
1636 * This is a REX prefix in 64-bit mode.
1637 */
1638 if (IEM_IS_64BIT_CODE(pVCpu))
1639 {
1640 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1641 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1642 pVCpu->iem.s.uRexIndex = 1 << 3;
1643
1644 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1645 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1646 }
1647
1648 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1649 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDX);
1650}
1651
1652
1653
1654/**
1655 * @opcode 0x43
1656 */
1657FNIEMOP_DEF(iemOp_inc_eBX)
1658{
1659 /*
1660 * This is a REX prefix in 64-bit mode.
1661 */
1662 if (IEM_IS_64BIT_CODE(pVCpu))
1663 {
1664 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1665 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1666 pVCpu->iem.s.uRexB = 1 << 3;
1667 pVCpu->iem.s.uRexIndex = 1 << 3;
1668
1669 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1670 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1671 }
1672
1673 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1674 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBX);
1675}
1676
1677
1678/**
1679 * @opcode 0x44
1680 */
1681FNIEMOP_DEF(iemOp_inc_eSP)
1682{
1683 /*
1684 * This is a REX prefix in 64-bit mode.
1685 */
1686 if (IEM_IS_64BIT_CODE(pVCpu))
1687 {
1688 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1689 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1690 pVCpu->iem.s.uRexReg = 1 << 3;
1691
1692 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1693 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1694 }
1695
1696 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1697 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSP);
1698}
1699
1700
1701/**
1702 * @opcode 0x45
1703 */
1704FNIEMOP_DEF(iemOp_inc_eBP)
1705{
1706 /*
1707 * This is a REX prefix in 64-bit mode.
1708 */
1709 if (IEM_IS_64BIT_CODE(pVCpu))
1710 {
1711 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1712 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1713 pVCpu->iem.s.uRexReg = 1 << 3;
1714 pVCpu->iem.s.uRexB = 1 << 3;
1715
1716 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1717 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1718 }
1719
1720 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1721 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBP);
1722}
1723
1724
1725/**
1726 * @opcode 0x46
1727 */
1728FNIEMOP_DEF(iemOp_inc_eSI)
1729{
1730 /*
1731 * This is a REX prefix in 64-bit mode.
1732 */
1733 if (IEM_IS_64BIT_CODE(pVCpu))
1734 {
1735 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1736 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1737 pVCpu->iem.s.uRexReg = 1 << 3;
1738 pVCpu->iem.s.uRexIndex = 1 << 3;
1739
1740 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1741 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1742 }
1743
1744 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1745 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSI);
1746}
1747
1748
1749/**
1750 * @opcode 0x47
1751 */
1752FNIEMOP_DEF(iemOp_inc_eDI)
1753{
1754 /*
1755 * This is a REX prefix in 64-bit mode.
1756 */
1757 if (IEM_IS_64BIT_CODE(pVCpu))
1758 {
1759 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1760 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1761 pVCpu->iem.s.uRexReg = 1 << 3;
1762 pVCpu->iem.s.uRexB = 1 << 3;
1763 pVCpu->iem.s.uRexIndex = 1 << 3;
1764
1765 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1766 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1767 }
1768
1769 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1770 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDI);
1771}
1772
1773
1774/**
1775 * @opcode 0x48
1776 */
1777FNIEMOP_DEF(iemOp_dec_eAX)
1778{
1779 /*
1780 * This is a REX prefix in 64-bit mode.
1781 */
1782 if (IEM_IS_64BIT_CODE(pVCpu))
1783 {
1784 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
1785 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
1786 iemRecalEffOpSize(pVCpu);
1787
1788 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1789 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1790 }
1791
1792 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
1793 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xAX);
1794}
1795
1796
1797/**
1798 * @opcode 0x49
1799 */
1800FNIEMOP_DEF(iemOp_dec_eCX)
1801{
1802 /*
1803 * This is a REX prefix in 64-bit mode.
1804 */
1805 if (IEM_IS_64BIT_CODE(pVCpu))
1806 {
1807 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
1808 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1809 pVCpu->iem.s.uRexB = 1 << 3;
1810 iemRecalEffOpSize(pVCpu);
1811
1812 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1813 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1814 }
1815
1816 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
1817 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xCX);
1818}
1819
1820
1821/**
1822 * @opcode 0x4a
1823 */
1824FNIEMOP_DEF(iemOp_dec_eDX)
1825{
1826 /*
1827 * This is a REX prefix in 64-bit mode.
1828 */
1829 if (IEM_IS_64BIT_CODE(pVCpu))
1830 {
1831 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
1832 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1833 pVCpu->iem.s.uRexIndex = 1 << 3;
1834 iemRecalEffOpSize(pVCpu);
1835
1836 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1837 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1838 }
1839
1840 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
1841 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDX);
1842}
1843
1844
1845/**
1846 * @opcode 0x4b
1847 */
1848FNIEMOP_DEF(iemOp_dec_eBX)
1849{
1850 /*
1851 * This is a REX prefix in 64-bit mode.
1852 */
1853 if (IEM_IS_64BIT_CODE(pVCpu))
1854 {
1855 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
1856 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1857 pVCpu->iem.s.uRexB = 1 << 3;
1858 pVCpu->iem.s.uRexIndex = 1 << 3;
1859 iemRecalEffOpSize(pVCpu);
1860
1861 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1862 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1863 }
1864
1865 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
1866 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBX);
1867}
1868
1869
1870/**
1871 * @opcode 0x4c
1872 */
1873FNIEMOP_DEF(iemOp_dec_eSP)
1874{
1875 /*
1876 * This is a REX prefix in 64-bit mode.
1877 */
1878 if (IEM_IS_64BIT_CODE(pVCpu))
1879 {
1880 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
1881 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
1882 pVCpu->iem.s.uRexReg = 1 << 3;
1883 iemRecalEffOpSize(pVCpu);
1884
1885 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1886 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1887 }
1888
1889 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
1890 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSP);
1891}
1892
1893
1894/**
1895 * @opcode 0x4d
1896 */
1897FNIEMOP_DEF(iemOp_dec_eBP)
1898{
1899 /*
1900 * This is a REX prefix in 64-bit mode.
1901 */
1902 if (IEM_IS_64BIT_CODE(pVCpu))
1903 {
1904 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
1905 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1906 pVCpu->iem.s.uRexReg = 1 << 3;
1907 pVCpu->iem.s.uRexB = 1 << 3;
1908 iemRecalEffOpSize(pVCpu);
1909
1910 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1911 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1912 }
1913
1914 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
1915 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBP);
1916}
1917
1918
1919/**
1920 * @opcode 0x4e
1921 */
1922FNIEMOP_DEF(iemOp_dec_eSI)
1923{
1924 /*
1925 * This is a REX prefix in 64-bit mode.
1926 */
1927 if (IEM_IS_64BIT_CODE(pVCpu))
1928 {
1929 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
1930 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1931 pVCpu->iem.s.uRexReg = 1 << 3;
1932 pVCpu->iem.s.uRexIndex = 1 << 3;
1933 iemRecalEffOpSize(pVCpu);
1934
1935 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1936 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1937 }
1938
1939 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
1940 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSI);
1941}
1942
1943
1944/**
1945 * @opcode 0x4f
1946 */
1947FNIEMOP_DEF(iemOp_dec_eDI)
1948{
1949 /*
1950 * This is a REX prefix in 64-bit mode.
1951 */
1952 if (IEM_IS_64BIT_CODE(pVCpu))
1953 {
1954 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
1955 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1956 pVCpu->iem.s.uRexReg = 1 << 3;
1957 pVCpu->iem.s.uRexB = 1 << 3;
1958 pVCpu->iem.s.uRexIndex = 1 << 3;
1959 iemRecalEffOpSize(pVCpu);
1960
1961 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1962 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1963 }
1964
1965 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
1966 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDI);
1967}
1968
1969
1970/**
1971 * Common 'push register' helper.
1972 */
1973FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
1974{
1975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1976 if (IEM_IS_64BIT_CODE(pVCpu))
1977 {
1978 iReg |= pVCpu->iem.s.uRexB;
1979 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1980 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1981 }
1982
1983 switch (pVCpu->iem.s.enmEffOpSize)
1984 {
1985 case IEMMODE_16BIT:
1986 IEM_MC_BEGIN(0, 1);
1987 IEM_MC_LOCAL(uint16_t, u16Value);
1988 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
1989 IEM_MC_PUSH_U16(u16Value);
1990 IEM_MC_ADVANCE_RIP_AND_FINISH();
1991 IEM_MC_END();
1992 break;
1993
1994 case IEMMODE_32BIT:
1995 IEM_MC_BEGIN(0, 1);
1996 IEM_MC_LOCAL(uint32_t, u32Value);
1997 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
1998 IEM_MC_PUSH_U32(u32Value);
1999 IEM_MC_ADVANCE_RIP_AND_FINISH();
2000 IEM_MC_END();
2001 break;
2002
2003 case IEMMODE_64BIT:
2004 IEM_MC_BEGIN(0, 1);
2005 IEM_MC_LOCAL(uint64_t, u64Value);
2006 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
2007 IEM_MC_PUSH_U64(u64Value);
2008 IEM_MC_ADVANCE_RIP_AND_FINISH();
2009 IEM_MC_END();
2010 break;
2011
2012 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2013 }
2014}
2015
2016
2017/**
2018 * @opcode 0x50
2019 */
2020FNIEMOP_DEF(iemOp_push_eAX)
2021{
2022 IEMOP_MNEMONIC(push_rAX, "push rAX");
2023 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
2024}
2025
2026
2027/**
2028 * @opcode 0x51
2029 */
2030FNIEMOP_DEF(iemOp_push_eCX)
2031{
2032 IEMOP_MNEMONIC(push_rCX, "push rCX");
2033 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
2034}
2035
2036
2037/**
2038 * @opcode 0x52
2039 */
2040FNIEMOP_DEF(iemOp_push_eDX)
2041{
2042 IEMOP_MNEMONIC(push_rDX, "push rDX");
2043 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
2044}
2045
2046
2047/**
2048 * @opcode 0x53
2049 */
2050FNIEMOP_DEF(iemOp_push_eBX)
2051{
2052 IEMOP_MNEMONIC(push_rBX, "push rBX");
2053 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
2054}
2055
2056
2057/**
2058 * @opcode 0x54
2059 */
2060FNIEMOP_DEF(iemOp_push_eSP)
2061{
2062 IEMOP_MNEMONIC(push_rSP, "push rSP");
2063 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
2064 {
2065 IEM_MC_BEGIN(0, 1);
2066 IEM_MC_LOCAL(uint16_t, u16Value);
2067 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
2068 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
2069 IEM_MC_PUSH_U16(u16Value);
2070 IEM_MC_ADVANCE_RIP_AND_FINISH();
2071 IEM_MC_END();
2072 }
2073 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
2074}
2075
2076
2077/**
2078 * @opcode 0x55
2079 */
2080FNIEMOP_DEF(iemOp_push_eBP)
2081{
2082 IEMOP_MNEMONIC(push_rBP, "push rBP");
2083 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
2084}
2085
2086
2087/**
2088 * @opcode 0x56
2089 */
2090FNIEMOP_DEF(iemOp_push_eSI)
2091{
2092 IEMOP_MNEMONIC(push_rSI, "push rSI");
2093 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
2094}
2095
2096
2097/**
2098 * @opcode 0x57
2099 */
2100FNIEMOP_DEF(iemOp_push_eDI)
2101{
2102 IEMOP_MNEMONIC(push_rDI, "push rDI");
2103 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
2104}
2105
2106
2107/**
2108 * Common 'pop register' helper.
2109 */
2110FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
2111{
2112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2113 if (IEM_IS_64BIT_CODE(pVCpu))
2114 {
2115 iReg |= pVCpu->iem.s.uRexB;
2116 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2117 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2118 }
2119
2120 switch (pVCpu->iem.s.enmEffOpSize)
2121 {
2122 case IEMMODE_16BIT:
2123 IEM_MC_BEGIN(0, 1);
2124 IEM_MC_LOCAL(uint16_t *, pu16Dst);
2125 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
2126 IEM_MC_POP_U16(pu16Dst);
2127 IEM_MC_ADVANCE_RIP_AND_FINISH();
2128 IEM_MC_END();
2129 break;
2130
2131 case IEMMODE_32BIT:
2132 IEM_MC_BEGIN(0, 1);
2133 IEM_MC_LOCAL(uint32_t *, pu32Dst);
2134 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
2135 IEM_MC_POP_U32(pu32Dst);
2136 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
2137 IEM_MC_ADVANCE_RIP_AND_FINISH();
2138 IEM_MC_END();
2139 break;
2140
2141 case IEMMODE_64BIT:
2142 IEM_MC_BEGIN(0, 1);
2143 IEM_MC_LOCAL(uint64_t *, pu64Dst);
2144 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
2145 IEM_MC_POP_U64(pu64Dst);
2146 IEM_MC_ADVANCE_RIP_AND_FINISH();
2147 IEM_MC_END();
2148 break;
2149
2150 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2151 }
2152}
2153
2154
2155/**
2156 * @opcode 0x58
2157 */
2158FNIEMOP_DEF(iemOp_pop_eAX)
2159{
2160 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
2161 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
2162}
2163
2164
2165/**
2166 * @opcode 0x59
2167 */
2168FNIEMOP_DEF(iemOp_pop_eCX)
2169{
2170 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
2171 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
2172}
2173
2174
2175/**
2176 * @opcode 0x5a
2177 */
2178FNIEMOP_DEF(iemOp_pop_eDX)
2179{
2180 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
2181 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
2182}
2183
2184
2185/**
2186 * @opcode 0x5b
2187 */
2188FNIEMOP_DEF(iemOp_pop_eBX)
2189{
2190 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
2191 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
2192}
2193
2194
2195/**
2196 * @opcode 0x5c
2197 */
2198FNIEMOP_DEF(iemOp_pop_eSP)
2199{
2200 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
2201 if (IEM_IS_64BIT_CODE(pVCpu))
2202 {
2203 if (pVCpu->iem.s.uRexB)
2204 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
2205 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2206 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2207 }
2208
2209 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2210 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2211 /** @todo add testcase for this instruction. */
2212 switch (pVCpu->iem.s.enmEffOpSize)
2213 {
2214 case IEMMODE_16BIT:
2215 IEM_MC_BEGIN(0, 1);
2216 IEM_MC_LOCAL(uint16_t, u16Dst);
2217 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
2218 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
2219 IEM_MC_ADVANCE_RIP_AND_FINISH();
2220 IEM_MC_END();
2221 break;
2222
2223 case IEMMODE_32BIT:
2224 IEM_MC_BEGIN(0, 1);
2225 IEM_MC_LOCAL(uint32_t, u32Dst);
2226 IEM_MC_POP_U32(&u32Dst);
2227 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
2228 IEM_MC_ADVANCE_RIP_AND_FINISH();
2229 IEM_MC_END();
2230 break;
2231
2232 case IEMMODE_64BIT:
2233 IEM_MC_BEGIN(0, 1);
2234 IEM_MC_LOCAL(uint64_t, u64Dst);
2235 IEM_MC_POP_U64(&u64Dst);
2236 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
2237 IEM_MC_ADVANCE_RIP_AND_FINISH();
2238 IEM_MC_END();
2239 break;
2240
2241 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2242 }
2243}
2244
2245
2246/**
2247 * @opcode 0x5d
2248 */
2249FNIEMOP_DEF(iemOp_pop_eBP)
2250{
2251 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
2252 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
2253}
2254
2255
2256/**
2257 * @opcode 0x5e
2258 */
2259FNIEMOP_DEF(iemOp_pop_eSI)
2260{
2261 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
2262 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
2263}
2264
2265
2266/**
2267 * @opcode 0x5f
2268 */
2269FNIEMOP_DEF(iemOp_pop_eDI)
2270{
2271 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
2272 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
2273}
2274
2275
2276/**
2277 * @opcode 0x60
2278 */
2279FNIEMOP_DEF(iemOp_pusha)
2280{
2281 IEMOP_MNEMONIC(pusha, "pusha");
2282 IEMOP_HLP_MIN_186();
2283 IEMOP_HLP_NO_64BIT();
2284 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2285 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_pusha_16);
2286 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2287 IEM_MC_DEFER_TO_CIMPL_0_RET(0, iemCImpl_pusha_32);
2288}
2289
2290
2291/**
2292 * @opcode 0x61
2293 */
2294FNIEMOP_DEF(iemOp_popa__mvex)
2295{
2296 if (!IEM_IS_64BIT_CODE(pVCpu))
2297 {
2298 IEMOP_MNEMONIC(popa, "popa");
2299 IEMOP_HLP_MIN_186();
2300 IEMOP_HLP_NO_64BIT();
2301 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2302 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS, iemCImpl_popa_16);
2303 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2304 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS, iemCImpl_popa_32);
2305 }
2306 IEMOP_MNEMONIC(mvex, "mvex");
2307 Log(("mvex prefix is not supported!\n"));
2308 IEMOP_RAISE_INVALID_OPCODE_RET();
2309}
2310
2311
2312/**
2313 * @opcode 0x62
2314 * @opmnemonic bound
2315 * @op1 Gv_RO
2316 * @op2 Ma
2317 * @opmincpu 80186
2318 * @ophints harmless x86_invalid_64
2319 * @optest op1=0 op2=0 ->
2320 * @optest op1=1 op2=0 -> value.xcpt=5
2321 * @optest o16 / op1=0xffff op2=0x0000fffe ->
2322 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
2323 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
2324 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
2325 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
2326 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
2327 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
2328 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
2329 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
2330 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
2331 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
2332 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
2333 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
2334 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
2335 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
2336 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
2337 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
2338 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
2339 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
2340 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
2341 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
2342 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
2343 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
2344 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
2345 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
2346 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
2347 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
2348 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
2349 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
2350 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
2351 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
2352 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
2353 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
2354 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
2355 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
2356 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
2357 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
2358 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
2359 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
2360 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
2361 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
2362 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
2363 */
2364FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
2365{
2366 /* The BOUND instruction is invalid 64-bit mode. In legacy and
2367 compatability mode it is invalid with MOD=3.
2368
2369 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
2370 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
2371 given as R and X without an exact description, so we assume it builds on
2372 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
2373 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
2374 uint8_t bRm;
2375 if (!IEM_IS_64BIT_CODE(pVCpu))
2376 {
2377 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2378 IEMOP_HLP_MIN_186();
2379 IEM_OPCODE_GET_NEXT_U8(&bRm);
2380 if (IEM_IS_MODRM_MEM_MODE(bRm))
2381 {
2382 /** @todo testcase: check that there are two memory accesses involved. Check
2383 * whether they're both read before the \#BR triggers. */
2384 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2385 {
2386 IEM_MC_BEGIN(3, 1);
2387 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2388 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
2389 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
2390 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2391
2392 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2394
2395 IEM_MC_FETCH_GREG_U16(u16Index, IEM_GET_MODRM_REG_8(bRm));
2396 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2397 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
2398
2399 IEM_MC_CALL_CIMPL_3(0, iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
2400 IEM_MC_END();
2401 }
2402 else /* 32-bit operands */
2403 {
2404 IEM_MC_BEGIN(3, 1);
2405 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2406 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
2407 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
2408 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2409
2410 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2412
2413 IEM_MC_FETCH_GREG_U32(u32Index, IEM_GET_MODRM_REG_8(bRm));
2414 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2415 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
2416
2417 IEM_MC_CALL_CIMPL_3(0, iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
2418 IEM_MC_END();
2419 }
2420 }
2421
2422 /*
2423 * @opdone
2424 */
2425 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2426 {
2427 /* Note that there is no need for the CPU to fetch further bytes
2428 here because MODRM.MOD == 3. */
2429 Log(("evex not supported by the guest CPU!\n"));
2430 IEMOP_RAISE_INVALID_OPCODE_RET();
2431 }
2432 }
2433 else
2434 {
2435 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
2436 * does modr/m read, whereas AMD probably doesn't... */
2437 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2438 {
2439 Log(("evex not supported by the guest CPU!\n"));
2440 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
2441 }
2442 IEM_OPCODE_GET_NEXT_U8(&bRm);
2443 }
2444
2445 IEMOP_MNEMONIC(evex, "evex");
2446 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
2447 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
2448 Log(("evex prefix is not implemented!\n"));
2449 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2450}
2451
2452
2453/** Opcode 0x63 - non-64-bit modes. */
2454FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
2455{
2456 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
2457 IEMOP_HLP_MIN_286();
2458 IEMOP_HLP_NO_REAL_OR_V86_MODE();
2459 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2460
2461 if (IEM_IS_MODRM_REG_MODE(bRm))
2462 {
2463 /* Register */
2464 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2465 IEM_MC_BEGIN(3, 0);
2466 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2467 IEM_MC_ARG(uint16_t, u16Src, 1);
2468 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2469
2470 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2471 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM_8(bRm));
2472 IEM_MC_REF_EFLAGS(pEFlags);
2473 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2474
2475 IEM_MC_ADVANCE_RIP_AND_FINISH();
2476 IEM_MC_END();
2477 }
2478 else
2479 {
2480 /* Memory */
2481 IEM_MC_BEGIN(3, 2);
2482 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2483 IEM_MC_ARG(uint16_t, u16Src, 1);
2484 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2485 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2486
2487 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2488 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2489 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2490 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2491 IEM_MC_FETCH_EFLAGS(EFlags);
2492 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2493
2494 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
2495 IEM_MC_COMMIT_EFLAGS(EFlags);
2496 IEM_MC_ADVANCE_RIP_AND_FINISH();
2497 IEM_MC_END();
2498 }
2499}
2500
2501
2502/**
2503 * @opcode 0x63
2504 *
2505 * @note This is a weird one. It works like a regular move instruction if
2506 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2507 * @todo This definitely needs a testcase to verify the odd cases. */
2508FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2509{
2510 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2511
2512 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2513 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2514
2515 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2516 {
2517 if (IEM_IS_MODRM_REG_MODE(bRm))
2518 {
2519 /*
2520 * Register to register.
2521 */
2522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2523 IEM_MC_BEGIN(0, 1);
2524 IEM_MC_LOCAL(uint64_t, u64Value);
2525 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
2526 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2527 IEM_MC_ADVANCE_RIP_AND_FINISH();
2528 IEM_MC_END();
2529 }
2530 else
2531 {
2532 /*
2533 * We're loading a register from memory.
2534 */
2535 IEM_MC_BEGIN(0, 2);
2536 IEM_MC_LOCAL(uint64_t, u64Value);
2537 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2538 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2540 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2541 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2542 IEM_MC_ADVANCE_RIP_AND_FINISH();
2543 IEM_MC_END();
2544 }
2545 }
2546 else
2547 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
2548}
2549
2550
2551/**
2552 * @opcode 0x64
2553 * @opmnemonic segfs
2554 * @opmincpu 80386
2555 * @opgroup og_prefixes
2556 */
2557FNIEMOP_DEF(iemOp_seg_FS)
2558{
2559 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2560 IEMOP_HLP_MIN_386();
2561
2562 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2563 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2564
2565 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2566 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2567}
2568
2569
2570/**
2571 * @opcode 0x65
2572 * @opmnemonic seggs
2573 * @opmincpu 80386
2574 * @opgroup og_prefixes
2575 */
2576FNIEMOP_DEF(iemOp_seg_GS)
2577{
2578 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2579 IEMOP_HLP_MIN_386();
2580
2581 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2582 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2583
2584 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2585 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2586}
2587
2588
2589/**
2590 * @opcode 0x66
2591 * @opmnemonic opsize
2592 * @openc prefix
2593 * @opmincpu 80386
2594 * @ophints harmless
2595 * @opgroup og_prefixes
2596 */
2597FNIEMOP_DEF(iemOp_op_size)
2598{
2599 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2600 IEMOP_HLP_MIN_386();
2601
2602 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2603 iemRecalEffOpSize(pVCpu);
2604
2605 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2606 when REPZ or REPNZ are present. */
2607 if (pVCpu->iem.s.idxPrefix == 0)
2608 pVCpu->iem.s.idxPrefix = 1;
2609
2610 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2611 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2612}
2613
2614
2615/**
2616 * @opcode 0x67
2617 * @opmnemonic addrsize
2618 * @openc prefix
2619 * @opmincpu 80386
2620 * @ophints harmless
2621 * @opgroup og_prefixes
2622 */
2623FNIEMOP_DEF(iemOp_addr_size)
2624{
2625 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2626 IEMOP_HLP_MIN_386();
2627
2628 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2629 switch (pVCpu->iem.s.enmDefAddrMode)
2630 {
2631 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2632 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2633 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2634 default: AssertFailed();
2635 }
2636
2637 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2638 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2639}
2640
2641
2642/**
2643 * @opcode 0x68
2644 */
2645FNIEMOP_DEF(iemOp_push_Iz)
2646{
2647 IEMOP_MNEMONIC(push_Iz, "push Iz");
2648 IEMOP_HLP_MIN_186();
2649 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2650 switch (pVCpu->iem.s.enmEffOpSize)
2651 {
2652 case IEMMODE_16BIT:
2653 {
2654 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2656 IEM_MC_BEGIN(0,0);
2657 IEM_MC_PUSH_U16(u16Imm);
2658 IEM_MC_ADVANCE_RIP_AND_FINISH();
2659 IEM_MC_END();
2660 break;
2661 }
2662
2663 case IEMMODE_32BIT:
2664 {
2665 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2667 IEM_MC_BEGIN(0,0);
2668 IEM_MC_PUSH_U32(u32Imm);
2669 IEM_MC_ADVANCE_RIP_AND_FINISH();
2670 IEM_MC_END();
2671 break;
2672 }
2673
2674 case IEMMODE_64BIT:
2675 {
2676 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2678 IEM_MC_BEGIN(0,0);
2679 IEM_MC_PUSH_U64(u64Imm);
2680 IEM_MC_ADVANCE_RIP_AND_FINISH();
2681 IEM_MC_END();
2682 break;
2683 }
2684
2685 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2686 }
2687}
2688
2689
2690/**
2691 * @opcode 0x69
2692 */
2693FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
2694{
2695 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
2696 IEMOP_HLP_MIN_186();
2697 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2698 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2699
2700 switch (pVCpu->iem.s.enmEffOpSize)
2701 {
2702 case IEMMODE_16BIT:
2703 {
2704 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
2705 if (IEM_IS_MODRM_REG_MODE(bRm))
2706 {
2707 /* register operand */
2708 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2710
2711 IEM_MC_BEGIN(3, 1);
2712 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2713 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
2714 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2715 IEM_MC_LOCAL(uint16_t, u16Tmp);
2716
2717 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2718 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2719 IEM_MC_REF_EFLAGS(pEFlags);
2720 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2721 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2722
2723 IEM_MC_ADVANCE_RIP_AND_FINISH();
2724 IEM_MC_END();
2725 }
2726 else
2727 {
2728 /* memory operand */
2729 IEM_MC_BEGIN(3, 2);
2730 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2731 IEM_MC_ARG(uint16_t, u16Src, 1);
2732 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2733 IEM_MC_LOCAL(uint16_t, u16Tmp);
2734 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2735
2736 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2737 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2738 IEM_MC_ASSIGN(u16Src, u16Imm);
2739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2740 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2741 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2742 IEM_MC_REF_EFLAGS(pEFlags);
2743 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2744 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2745
2746 IEM_MC_ADVANCE_RIP_AND_FINISH();
2747 IEM_MC_END();
2748 }
2749 break;
2750 }
2751
2752 case IEMMODE_32BIT:
2753 {
2754 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
2755 if (IEM_IS_MODRM_REG_MODE(bRm))
2756 {
2757 /* register operand */
2758 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2760
2761 IEM_MC_BEGIN(3, 1);
2762 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2763 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
2764 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2765 IEM_MC_LOCAL(uint32_t, u32Tmp);
2766
2767 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2768 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2769 IEM_MC_REF_EFLAGS(pEFlags);
2770 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2771 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2772
2773 IEM_MC_ADVANCE_RIP_AND_FINISH();
2774 IEM_MC_END();
2775 }
2776 else
2777 {
2778 /* memory operand */
2779 IEM_MC_BEGIN(3, 2);
2780 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2781 IEM_MC_ARG(uint32_t, u32Src, 1);
2782 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2783 IEM_MC_LOCAL(uint32_t, u32Tmp);
2784 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2785
2786 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2787 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2788 IEM_MC_ASSIGN(u32Src, u32Imm);
2789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2790 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2791 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2792 IEM_MC_REF_EFLAGS(pEFlags);
2793 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2794 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2795
2796 IEM_MC_ADVANCE_RIP_AND_FINISH();
2797 IEM_MC_END();
2798 }
2799 break;
2800 }
2801
2802 case IEMMODE_64BIT:
2803 {
2804 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
2805 if (IEM_IS_MODRM_REG_MODE(bRm))
2806 {
2807 /* register operand */
2808 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2810
2811 IEM_MC_BEGIN(3, 1);
2812 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2813 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
2814 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2815 IEM_MC_LOCAL(uint64_t, u64Tmp);
2816
2817 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2818 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2819 IEM_MC_REF_EFLAGS(pEFlags);
2820 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
2821 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2822
2823 IEM_MC_ADVANCE_RIP_AND_FINISH();
2824 IEM_MC_END();
2825 }
2826 else
2827 {
2828 /* memory operand */
2829 IEM_MC_BEGIN(3, 2);
2830 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2831 IEM_MC_ARG(uint64_t, u64Src, 1);
2832 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2833 IEM_MC_LOCAL(uint64_t, u64Tmp);
2834 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2835
2836 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2837 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); /* Not using IEM_OPCODE_GET_NEXT_S32_SX_U64 to reduce the */
2838 IEM_MC_ASSIGN_U32_SX_U64(u64Src, u32Imm); /* parameter count for the threaded function for this block. */
2839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2840 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2841 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2842 IEM_MC_REF_EFLAGS(pEFlags);
2843 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
2844 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2845
2846 IEM_MC_ADVANCE_RIP_AND_FINISH();
2847 IEM_MC_END();
2848 }
2849 break;
2850 }
2851
2852 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2853 }
2854}
2855
2856
2857/**
2858 * @opcode 0x6a
2859 */
2860FNIEMOP_DEF(iemOp_push_Ib)
2861{
2862 IEMOP_MNEMONIC(push_Ib, "push Ib");
2863 IEMOP_HLP_MIN_186();
2864 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2866 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2867
2868 switch (pVCpu->iem.s.enmEffOpSize)
2869 {
2870 case IEMMODE_16BIT:
2871 IEM_MC_BEGIN(0,0);
2872 IEM_MC_PUSH_U16(i8Imm);
2873 IEM_MC_ADVANCE_RIP_AND_FINISH();
2874 IEM_MC_END();
2875 break;
2876 case IEMMODE_32BIT:
2877 IEM_MC_BEGIN(0,0);
2878 IEM_MC_PUSH_U32(i8Imm);
2879 IEM_MC_ADVANCE_RIP_AND_FINISH();
2880 IEM_MC_END();
2881 break;
2882 case IEMMODE_64BIT:
2883 IEM_MC_BEGIN(0,0);
2884 IEM_MC_PUSH_U64(i8Imm);
2885 IEM_MC_ADVANCE_RIP_AND_FINISH();
2886 IEM_MC_END();
2887 break;
2888 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2889 }
2890}
2891
2892
2893/**
2894 * @opcode 0x6b
2895 */
2896FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
2897{
2898 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
2899 IEMOP_HLP_MIN_186();
2900 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2901 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2902
2903 switch (pVCpu->iem.s.enmEffOpSize)
2904 {
2905 case IEMMODE_16BIT:
2906 {
2907 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
2908 if (IEM_IS_MODRM_REG_MODE(bRm))
2909 {
2910 /* register operand */
2911 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2913
2914 IEM_MC_BEGIN(3, 1);
2915 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2916 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
2917 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2918 IEM_MC_LOCAL(uint16_t, u16Tmp);
2919
2920 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2921 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2922 IEM_MC_REF_EFLAGS(pEFlags);
2923 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2924 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2925
2926 IEM_MC_ADVANCE_RIP_AND_FINISH();
2927 IEM_MC_END();
2928 }
2929 else
2930 {
2931 /* memory operand */
2932 IEM_MC_BEGIN(3, 2);
2933 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2934 IEM_MC_ARG(uint16_t, u16Src, 1);
2935 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2936 IEM_MC_LOCAL(uint16_t, u16Tmp);
2937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2938
2939 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2940 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
2941 IEM_MC_ASSIGN(u16Src, u16Imm);
2942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2943 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2944 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2945 IEM_MC_REF_EFLAGS(pEFlags);
2946 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2947 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2948
2949 IEM_MC_ADVANCE_RIP_AND_FINISH();
2950 IEM_MC_END();
2951 }
2952 break;
2953 }
2954
2955 case IEMMODE_32BIT:
2956 {
2957 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
2958 if (IEM_IS_MODRM_REG_MODE(bRm))
2959 {
2960 /* register operand */
2961 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2963
2964 IEM_MC_BEGIN(3, 1);
2965 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2966 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
2967 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2968 IEM_MC_LOCAL(uint32_t, u32Tmp);
2969
2970 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2971 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2972 IEM_MC_REF_EFLAGS(pEFlags);
2973 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2974 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2975
2976 IEM_MC_ADVANCE_RIP_AND_FINISH();
2977 IEM_MC_END();
2978 }
2979 else
2980 {
2981 /* memory operand */
2982 IEM_MC_BEGIN(3, 2);
2983 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2984 IEM_MC_ARG(uint32_t, u32Src, 1);
2985 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2986 IEM_MC_LOCAL(uint32_t, u32Tmp);
2987 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2988
2989 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2990 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
2991 IEM_MC_ASSIGN(u32Src, u32Imm);
2992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2993 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2994 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2995 IEM_MC_REF_EFLAGS(pEFlags);
2996 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2997 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2998
2999 IEM_MC_ADVANCE_RIP_AND_FINISH();
3000 IEM_MC_END();
3001 }
3002 break;
3003 }
3004
3005 case IEMMODE_64BIT:
3006 {
3007 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3008 if (IEM_IS_MODRM_REG_MODE(bRm))
3009 {
3010 /* register operand */
3011 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3013
3014 IEM_MC_BEGIN(3, 1);
3015 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3016 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
3017 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3018 IEM_MC_LOCAL(uint64_t, u64Tmp);
3019
3020 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3021 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3022 IEM_MC_REF_EFLAGS(pEFlags);
3023 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3024 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3025
3026 IEM_MC_ADVANCE_RIP_AND_FINISH();
3027 IEM_MC_END();
3028 }
3029 else
3030 {
3031 /* memory operand */
3032 IEM_MC_BEGIN(3, 2);
3033 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3034 IEM_MC_ARG(uint64_t, u64Src, 1);
3035 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3036 IEM_MC_LOCAL(uint64_t, u64Tmp);
3037 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3038
3039 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3040 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); /* Not using IEM_OPCODE_GET_NEXT_S8_SX_U64 to reduce the */
3041 IEM_MC_ASSIGN_U8_SX_U64(u64Src, u8Imm); /* parameter count for the threaded function for this block. */
3042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3043 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3044 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3045 IEM_MC_REF_EFLAGS(pEFlags);
3046 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3047 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3048
3049 IEM_MC_ADVANCE_RIP_AND_FINISH();
3050 IEM_MC_END();
3051 }
3052 break;
3053 }
3054
3055 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3056 }
3057}
3058
3059
3060/**
3061 * @opcode 0x6c
3062 */
3063FNIEMOP_DEF(iemOp_insb_Yb_DX)
3064{
3065 IEMOP_HLP_MIN_186();
3066 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3067 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3068 {
3069 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
3070 switch (pVCpu->iem.s.enmEffAddrMode)
3071 {
3072 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_ins_op8_addr16, false);
3073 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_ins_op8_addr32, false);
3074 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_ins_op8_addr64, false);
3075 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3076 }
3077 }
3078 else
3079 {
3080 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
3081 switch (pVCpu->iem.s.enmEffAddrMode)
3082 {
3083 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_ins_op8_addr16, false);
3084 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_ins_op8_addr32, false);
3085 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_ins_op8_addr64, false);
3086 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3087 }
3088 }
3089}
3090
3091
3092/**
3093 * @opcode 0x6d
3094 */
3095FNIEMOP_DEF(iemOp_inswd_Yv_DX)
3096{
3097 IEMOP_HLP_MIN_186();
3098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3099 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3100 {
3101 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
3102 switch (pVCpu->iem.s.enmEffOpSize)
3103 {
3104 case IEMMODE_16BIT:
3105 switch (pVCpu->iem.s.enmEffAddrMode)
3106 {
3107 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_ins_op16_addr16, false);
3108 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_ins_op16_addr32, false);
3109 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_ins_op16_addr64, false);
3110 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3111 }
3112 break;
3113 case IEMMODE_64BIT:
3114 case IEMMODE_32BIT:
3115 switch (pVCpu->iem.s.enmEffAddrMode)
3116 {
3117 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_ins_op32_addr16, false);
3118 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_ins_op32_addr32, false);
3119 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_ins_op32_addr64, false);
3120 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3121 }
3122 break;
3123 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3124 }
3125 }
3126 else
3127 {
3128 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
3129 switch (pVCpu->iem.s.enmEffOpSize)
3130 {
3131 case IEMMODE_16BIT:
3132 switch (pVCpu->iem.s.enmEffAddrMode)
3133 {
3134 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_ins_op16_addr16, false);
3135 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_ins_op16_addr32, false);
3136 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_ins_op16_addr64, false);
3137 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3138 }
3139 break;
3140 case IEMMODE_64BIT:
3141 case IEMMODE_32BIT:
3142 switch (pVCpu->iem.s.enmEffAddrMode)
3143 {
3144 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_ins_op32_addr16, false);
3145 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_ins_op32_addr32, false);
3146 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_ins_op32_addr64, false);
3147 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3148 }
3149 break;
3150 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3151 }
3152 }
3153}
3154
3155
3156/**
3157 * @opcode 0x6e
3158 */
3159FNIEMOP_DEF(iemOp_outsb_Yb_DX)
3160{
3161 IEMOP_HLP_MIN_186();
3162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3163 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3164 {
3165 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
3166 switch (pVCpu->iem.s.enmEffAddrMode)
3167 {
3168 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3169 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3170 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3171 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3172 }
3173 }
3174 else
3175 {
3176 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
3177 switch (pVCpu->iem.s.enmEffAddrMode)
3178 {
3179 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3180 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3181 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3182 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3183 }
3184 }
3185}
3186
3187
3188/**
3189 * @opcode 0x6f
3190 */
3191FNIEMOP_DEF(iemOp_outswd_Yv_DX)
3192{
3193 IEMOP_HLP_MIN_186();
3194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3195 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3196 {
3197 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
3198 switch (pVCpu->iem.s.enmEffOpSize)
3199 {
3200 case IEMMODE_16BIT:
3201 switch (pVCpu->iem.s.enmEffAddrMode)
3202 {
3203 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3204 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3205 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3206 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3207 }
3208 break;
3209 case IEMMODE_64BIT:
3210 case IEMMODE_32BIT:
3211 switch (pVCpu->iem.s.enmEffAddrMode)
3212 {
3213 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3214 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3215 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT, iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3216 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3217 }
3218 break;
3219 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3220 }
3221 }
3222 else
3223 {
3224 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
3225 switch (pVCpu->iem.s.enmEffOpSize)
3226 {
3227 case IEMMODE_16BIT:
3228 switch (pVCpu->iem.s.enmEffAddrMode)
3229 {
3230 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3231 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3232 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3233 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3234 }
3235 break;
3236 case IEMMODE_64BIT:
3237 case IEMMODE_32BIT:
3238 switch (pVCpu->iem.s.enmEffAddrMode)
3239 {
3240 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3241 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3242 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3243 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3244 }
3245 break;
3246 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3247 }
3248 }
3249}
3250
3251
3252/**
3253 * @opcode 0x70
3254 */
3255FNIEMOP_DEF(iemOp_jo_Jb)
3256{
3257 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
3258 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3260 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3261
3262 IEM_MC_BEGIN(0, 0);
3263 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3264 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3265 } IEM_MC_ELSE() {
3266 IEM_MC_ADVANCE_RIP_AND_FINISH();
3267 } IEM_MC_ENDIF();
3268 IEM_MC_END();
3269}
3270
3271
3272/**
3273 * @opcode 0x71
3274 */
3275FNIEMOP_DEF(iemOp_jno_Jb)
3276{
3277 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
3278 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3280 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3281
3282 IEM_MC_BEGIN(0, 0);
3283 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3284 IEM_MC_ADVANCE_RIP_AND_FINISH();
3285 } IEM_MC_ELSE() {
3286 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3287 } IEM_MC_ENDIF();
3288 IEM_MC_END();
3289}
3290
3291/**
3292 * @opcode 0x72
3293 */
3294FNIEMOP_DEF(iemOp_jc_Jb)
3295{
3296 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
3297 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3299 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3300
3301 IEM_MC_BEGIN(0, 0);
3302 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3303 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3304 } IEM_MC_ELSE() {
3305 IEM_MC_ADVANCE_RIP_AND_FINISH();
3306 } IEM_MC_ENDIF();
3307 IEM_MC_END();
3308}
3309
3310
3311/**
3312 * @opcode 0x73
3313 */
3314FNIEMOP_DEF(iemOp_jnc_Jb)
3315{
3316 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
3317 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3319 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3320
3321 IEM_MC_BEGIN(0, 0);
3322 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3323 IEM_MC_ADVANCE_RIP_AND_FINISH();
3324 } IEM_MC_ELSE() {
3325 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3326 } IEM_MC_ENDIF();
3327 IEM_MC_END();
3328}
3329
3330
3331/**
3332 * @opcode 0x74
3333 */
3334FNIEMOP_DEF(iemOp_je_Jb)
3335{
3336 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
3337 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3339 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3340
3341 IEM_MC_BEGIN(0, 0);
3342 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3343 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3344 } IEM_MC_ELSE() {
3345 IEM_MC_ADVANCE_RIP_AND_FINISH();
3346 } IEM_MC_ENDIF();
3347 IEM_MC_END();
3348}
3349
3350
3351/**
3352 * @opcode 0x75
3353 */
3354FNIEMOP_DEF(iemOp_jne_Jb)
3355{
3356 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
3357 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3358 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3359 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3360
3361 IEM_MC_BEGIN(0, 0);
3362 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3363 IEM_MC_ADVANCE_RIP_AND_FINISH();
3364 } IEM_MC_ELSE() {
3365 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3366 } IEM_MC_ENDIF();
3367 IEM_MC_END();
3368}
3369
3370
3371/**
3372 * @opcode 0x76
3373 */
3374FNIEMOP_DEF(iemOp_jbe_Jb)
3375{
3376 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
3377 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3379 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3380
3381 IEM_MC_BEGIN(0, 0);
3382 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3383 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3384 } IEM_MC_ELSE() {
3385 IEM_MC_ADVANCE_RIP_AND_FINISH();
3386 } IEM_MC_ENDIF();
3387 IEM_MC_END();
3388}
3389
3390
3391/**
3392 * @opcode 0x77
3393 */
3394FNIEMOP_DEF(iemOp_jnbe_Jb)
3395{
3396 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
3397 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3399 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3400
3401 IEM_MC_BEGIN(0, 0);
3402 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3403 IEM_MC_ADVANCE_RIP_AND_FINISH();
3404 } IEM_MC_ELSE() {
3405 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3406 } IEM_MC_ENDIF();
3407 IEM_MC_END();
3408}
3409
3410
3411/**
3412 * @opcode 0x78
3413 */
3414FNIEMOP_DEF(iemOp_js_Jb)
3415{
3416 IEMOP_MNEMONIC(js_Jb, "js Jb");
3417 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3419 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3420
3421 IEM_MC_BEGIN(0, 0);
3422 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3423 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3424 } IEM_MC_ELSE() {
3425 IEM_MC_ADVANCE_RIP_AND_FINISH();
3426 } IEM_MC_ENDIF();
3427 IEM_MC_END();
3428}
3429
3430
3431/**
3432 * @opcode 0x79
3433 */
3434FNIEMOP_DEF(iemOp_jns_Jb)
3435{
3436 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
3437 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3439 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3440
3441 IEM_MC_BEGIN(0, 0);
3442 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3443 IEM_MC_ADVANCE_RIP_AND_FINISH();
3444 } IEM_MC_ELSE() {
3445 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3446 } IEM_MC_ENDIF();
3447 IEM_MC_END();
3448}
3449
3450
3451/**
3452 * @opcode 0x7a
3453 */
3454FNIEMOP_DEF(iemOp_jp_Jb)
3455{
3456 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
3457 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3459 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3460
3461 IEM_MC_BEGIN(0, 0);
3462 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3463 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3464 } IEM_MC_ELSE() {
3465 IEM_MC_ADVANCE_RIP_AND_FINISH();
3466 } IEM_MC_ENDIF();
3467 IEM_MC_END();
3468}
3469
3470
3471/**
3472 * @opcode 0x7b
3473 */
3474FNIEMOP_DEF(iemOp_jnp_Jb)
3475{
3476 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
3477 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3479 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3480
3481 IEM_MC_BEGIN(0, 0);
3482 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3483 IEM_MC_ADVANCE_RIP_AND_FINISH();
3484 } IEM_MC_ELSE() {
3485 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3486 } IEM_MC_ENDIF();
3487 IEM_MC_END();
3488}
3489
3490
3491/**
3492 * @opcode 0x7c
3493 */
3494FNIEMOP_DEF(iemOp_jl_Jb)
3495{
3496 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
3497 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3499 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3500
3501 IEM_MC_BEGIN(0, 0);
3502 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3503 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3504 } IEM_MC_ELSE() {
3505 IEM_MC_ADVANCE_RIP_AND_FINISH();
3506 } IEM_MC_ENDIF();
3507 IEM_MC_END();
3508}
3509
3510
3511/**
3512 * @opcode 0x7d
3513 */
3514FNIEMOP_DEF(iemOp_jnl_Jb)
3515{
3516 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
3517 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3519 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3520
3521 IEM_MC_BEGIN(0, 0);
3522 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3523 IEM_MC_ADVANCE_RIP_AND_FINISH();
3524 } IEM_MC_ELSE() {
3525 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3526 } IEM_MC_ENDIF();
3527 IEM_MC_END();
3528}
3529
3530
3531/**
3532 * @opcode 0x7e
3533 */
3534FNIEMOP_DEF(iemOp_jle_Jb)
3535{
3536 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
3537 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3539 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3540
3541 IEM_MC_BEGIN(0, 0);
3542 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3543 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3544 } IEM_MC_ELSE() {
3545 IEM_MC_ADVANCE_RIP_AND_FINISH();
3546 } IEM_MC_ENDIF();
3547 IEM_MC_END();
3548}
3549
3550
3551/**
3552 * @opcode 0x7f
3553 */
3554FNIEMOP_DEF(iemOp_jnle_Jb)
3555{
3556 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
3557 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3559 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3560
3561 IEM_MC_BEGIN(0, 0);
3562 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3563 IEM_MC_ADVANCE_RIP_AND_FINISH();
3564 } IEM_MC_ELSE() {
3565 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3566 } IEM_MC_ENDIF();
3567 IEM_MC_END();
3568}
3569
3570
3571/**
3572 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
3573 * iemOp_Grp1_Eb_Ib_80.
3574 */
3575#define IEMOP_BODY_BINARY_Eb_Ib(a_fnNormalU8, a_fRW) \
3576 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3577 { \
3578 /* register target */ \
3579 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3581 IEM_MC_BEGIN(3, 0); \
3582 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3583 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3584 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3585 \
3586 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3587 IEM_MC_REF_EFLAGS(pEFlags); \
3588 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3589 \
3590 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3591 IEM_MC_END(); \
3592 } \
3593 else \
3594 { \
3595 /* memory target */ \
3596 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
3597 { \
3598 IEM_MC_BEGIN(3, 2); \
3599 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3600 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3602 \
3603 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3604 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3605 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3606 IEMOP_HLP_DONE_DECODING(); \
3607 \
3608 IEM_MC_MEM_MAP(pu8Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3609 IEM_MC_FETCH_EFLAGS(EFlags); \
3610 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3611 \
3612 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, a_fRW); \
3613 IEM_MC_COMMIT_EFLAGS(EFlags); \
3614 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3615 IEM_MC_END(); \
3616 } \
3617 else \
3618 { \
3619 (void)0
3620
3621#define IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK() \
3622 IEMOP_HLP_DONE_DECODING(); \
3623 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
3624 } \
3625 } \
3626 (void)0
3627
3628#define IEMOP_BODY_BINARY_Eb_Ib_LOCKED(a_fnLockedU8) \
3629 IEM_MC_BEGIN(3, 2); \
3630 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3631 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3632 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3633 \
3634 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3635 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3636 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3637 IEMOP_HLP_DONE_DECODING(); \
3638 \
3639 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3640 IEM_MC_FETCH_EFLAGS(EFlags); \
3641 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
3642 \
3643 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW); \
3644 IEM_MC_COMMIT_EFLAGS(EFlags); \
3645 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3646 IEM_MC_END(); \
3647 } \
3648 } \
3649 (void)0
3650
3651
3652/**
3653 * @opmaps grp1_80,grp1_83
3654 * @opcode /0
3655 */
3656FNIEMOP_DEF_1(iemOp_Grp1_add_Eb_Ib, uint8_t, bRm)
3657{
3658 IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib");
3659 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_add_u8, IEM_ACCESS_DATA_RW);
3660 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_add_u8_locked);
3661}
3662
3663
3664/**
3665 * @opmaps grp1_80,grp1_83
3666 * @opcode /1
3667 */
3668FNIEMOP_DEF_1(iemOp_Grp1_or_Eb_Ib, uint8_t, bRm)
3669{
3670 IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib");
3671 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_or_u8, IEM_ACCESS_DATA_RW);
3672 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_or_u8_locked);
3673}
3674
3675
3676/**
3677 * @opmaps grp1_80,grp1_83
3678 * @opcode /2
3679 */
3680FNIEMOP_DEF_1(iemOp_Grp1_adc_Eb_Ib, uint8_t, bRm)
3681{
3682 IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib");
3683 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_adc_u8, IEM_ACCESS_DATA_RW);
3684 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_adc_u8_locked);
3685}
3686
3687
3688/**
3689 * @opmaps grp1_80,grp1_83
3690 * @opcode /3
3691 */
3692FNIEMOP_DEF_1(iemOp_Grp1_sbb_Eb_Ib, uint8_t, bRm)
3693{
3694 IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib");
3695 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_sbb_u8, IEM_ACCESS_DATA_RW);
3696 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sbb_u8_locked);
3697}
3698
3699
3700/**
3701 * @opmaps grp1_80,grp1_83
3702 * @opcode /4
3703 */
3704FNIEMOP_DEF_1(iemOp_Grp1_and_Eb_Ib, uint8_t, bRm)
3705{
3706 IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib");
3707 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_and_u8, IEM_ACCESS_DATA_RW);
3708 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_and_u8_locked);
3709}
3710
3711
3712/**
3713 * @opmaps grp1_80,grp1_83
3714 * @opcode /5
3715 */
3716FNIEMOP_DEF_1(iemOp_Grp1_sub_Eb_Ib, uint8_t, bRm)
3717{
3718 IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib");
3719 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_sub_u8, IEM_ACCESS_DATA_RW);
3720 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sub_u8_locked);
3721}
3722
3723
3724/**
3725 * @opmaps grp1_80,grp1_83
3726 * @opcode /6
3727 */
3728FNIEMOP_DEF_1(iemOp_Grp1_xor_Eb_Ib, uint8_t, bRm)
3729{
3730 IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib");
3731 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_xor_u8, IEM_ACCESS_DATA_RW);
3732 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_xor_u8_locked);
3733}
3734
3735
3736/**
3737 * @opmaps grp1_80,grp1_83
3738 * @opcode /7
3739 */
3740FNIEMOP_DEF_1(iemOp_Grp1_cmp_Eb_Ib, uint8_t, bRm)
3741{
3742 IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib");
3743 IEMOP_BODY_BINARY_Eb_Ib(iemAImpl_cmp_u8, IEM_ACCESS_DATA_R);
3744 IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK();
3745}
3746
3747
3748/**
3749 * @opcode 0x80
3750 */
3751FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
3752{
3753 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3754 switch (IEM_GET_MODRM_REG_8(bRm))
3755 {
3756 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Eb_Ib, bRm);
3757 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Eb_Ib, bRm);
3758 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Eb_Ib, bRm);
3759 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Eb_Ib, bRm);
3760 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Eb_Ib, bRm);
3761 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Eb_Ib, bRm);
3762 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Eb_Ib, bRm);
3763 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Eb_Ib, bRm);
3764 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3765 }
3766}
3767
3768
3769/**
3770 * Body for a group 1 binary operator.
3771 */
3772#define IEMOP_BODY_BINARY_Ev_Iz(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fRW) \
3773 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3774 { \
3775 /* register target */ \
3776 switch (pVCpu->iem.s.enmEffOpSize) \
3777 { \
3778 case IEMMODE_16BIT: \
3779 { \
3780 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
3781 IEMOP_HLP_DONE_DECODING(); \
3782 IEM_MC_BEGIN(3, 0); \
3783 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
3784 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
3785 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3786 \
3787 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3788 IEM_MC_REF_EFLAGS(pEFlags); \
3789 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
3790 \
3791 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3792 IEM_MC_END(); \
3793 break; \
3794 } \
3795 \
3796 case IEMMODE_32BIT: \
3797 { \
3798 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
3799 IEMOP_HLP_DONE_DECODING(); \
3800 IEM_MC_BEGIN(3, 0); \
3801 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
3802 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
3803 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3804 \
3805 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3806 IEM_MC_REF_EFLAGS(pEFlags); \
3807 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
3808 if (a_fRW == IEM_ACCESS_DATA_RW) \
3809 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
3810 \
3811 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3812 IEM_MC_END(); \
3813 break; \
3814 } \
3815 \
3816 case IEMMODE_64BIT: \
3817 { \
3818 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
3819 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3820 IEM_MC_BEGIN(3, 0); \
3821 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
3822 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
3823 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3824 \
3825 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3826 IEM_MC_REF_EFLAGS(pEFlags); \
3827 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
3828 \
3829 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3830 IEM_MC_END(); \
3831 break; \
3832 } \
3833 \
3834 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
3835 } \
3836 } \
3837 else \
3838 { \
3839 /* memory target */ \
3840 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
3841 { \
3842 switch (pVCpu->iem.s.enmEffOpSize) \
3843 { \
3844 case IEMMODE_16BIT: \
3845 { \
3846 IEM_MC_BEGIN(3, 2); \
3847 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
3848 IEM_MC_ARG(uint16_t, u16Src, 1); \
3849 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3850 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3851 \
3852 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
3853 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
3854 IEM_MC_ASSIGN(u16Src, u16Imm); \
3855 IEMOP_HLP_DONE_DECODING(); \
3856 IEM_MC_MEM_MAP(pu16Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3857 IEM_MC_FETCH_EFLAGS(EFlags); \
3858 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
3859 \
3860 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, a_fRW); \
3861 IEM_MC_COMMIT_EFLAGS(EFlags); \
3862 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3863 IEM_MC_END(); \
3864 break; \
3865 } \
3866 \
3867 case IEMMODE_32BIT: \
3868 { \
3869 IEM_MC_BEGIN(3, 2); \
3870 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
3871 IEM_MC_ARG(uint32_t, u32Src, 1); \
3872 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3873 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3874 \
3875 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
3876 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
3877 IEM_MC_ASSIGN(u32Src, u32Imm); \
3878 IEMOP_HLP_DONE_DECODING(); \
3879 IEM_MC_MEM_MAP(pu32Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3880 IEM_MC_FETCH_EFLAGS(EFlags); \
3881 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
3882 \
3883 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, a_fRW); \
3884 IEM_MC_COMMIT_EFLAGS(EFlags); \
3885 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3886 IEM_MC_END(); \
3887 break; \
3888 } \
3889 \
3890 case IEMMODE_64BIT: \
3891 { \
3892 IEM_MC_BEGIN(3, 2); \
3893 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
3894 IEM_MC_ARG(uint64_t, u64Src, 1); \
3895 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3896 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3897 \
3898 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
3899 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
3900 IEMOP_HLP_DONE_DECODING(); \
3901 IEM_MC_ASSIGN(u64Src, u64Imm); \
3902 IEM_MC_MEM_MAP(pu64Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3903 IEM_MC_FETCH_EFLAGS(EFlags); \
3904 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
3905 \
3906 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, a_fRW); \
3907 IEM_MC_COMMIT_EFLAGS(EFlags); \
3908 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3909 IEM_MC_END(); \
3910 break; \
3911 } \
3912 \
3913 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
3914 } \
3915 } \
3916 else \
3917 { \
3918 (void)0
3919
3920#define IEMOP_BODY_BINARY_Ev_Iz_NO_LOCK() \
3921 IEMOP_HLP_DONE_DECODING(); \
3922 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
3923 } \
3924 } \
3925 (void)0
3926
3927#define IEMOP_BODY_BINARY_Ev_Iz_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
3928 switch (pVCpu->iem.s.enmEffOpSize) \
3929 { \
3930 case IEMMODE_16BIT: \
3931 { \
3932 IEM_MC_BEGIN(3, 2); \
3933 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
3934 IEM_MC_ARG(uint16_t, u16Src, 1); \
3935 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3936 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3937 \
3938 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
3939 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
3940 IEM_MC_ASSIGN(u16Src, u16Imm); \
3941 IEMOP_HLP_DONE_DECODING(); \
3942 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3943 IEM_MC_FETCH_EFLAGS(EFlags); \
3944 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
3945 \
3946 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
3947 IEM_MC_COMMIT_EFLAGS(EFlags); \
3948 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3949 IEM_MC_END(); \
3950 break; \
3951 } \
3952 \
3953 case IEMMODE_32BIT: \
3954 { \
3955 IEM_MC_BEGIN(3, 2); \
3956 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
3957 IEM_MC_ARG(uint32_t, u32Src, 1); \
3958 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3959 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3960 \
3961 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
3962 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
3963 IEM_MC_ASSIGN(u32Src, u32Imm); \
3964 IEMOP_HLP_DONE_DECODING(); \
3965 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3966 IEM_MC_FETCH_EFLAGS(EFlags); \
3967 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
3968 \
3969 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); \
3970 IEM_MC_COMMIT_EFLAGS(EFlags); \
3971 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3972 IEM_MC_END(); \
3973 break; \
3974 } \
3975 \
3976 case IEMMODE_64BIT: \
3977 { \
3978 IEM_MC_BEGIN(3, 2); \
3979 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
3980 IEM_MC_ARG(uint64_t, u64Src, 1); \
3981 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3982 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3983 \
3984 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
3985 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
3986 IEMOP_HLP_DONE_DECODING(); \
3987 IEM_MC_ASSIGN(u64Src, u64Imm); \
3988 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3989 IEM_MC_FETCH_EFLAGS(EFlags); \
3990 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
3991 \
3992 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
3993 IEM_MC_COMMIT_EFLAGS(EFlags); \
3994 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3995 IEM_MC_END(); \
3996 break; \
3997 } \
3998 \
3999 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4000 } \
4001 } \
4002 } \
4003 (void)0
4004
4005
4006/**
4007 * @opmaps grp1_81
4008 * @opcode /0
4009 */
4010FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Iz, uint8_t, bRm)
4011{
4012 IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz");
4013 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, IEM_ACCESS_DATA_RW);
4014 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4015}
4016
4017
4018/**
4019 * @opmaps grp1_81
4020 * @opcode /1
4021 */
4022FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Iz, uint8_t, bRm)
4023{
4024 IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz");
4025 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, IEM_ACCESS_DATA_RW);
4026 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4027}
4028
4029
4030/**
4031 * @opmaps grp1_81
4032 * @opcode /2
4033 */
4034FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Iz, uint8_t, bRm)
4035{
4036 IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz");
4037 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, IEM_ACCESS_DATA_RW);
4038 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
4039}
4040
4041
4042/**
4043 * @opmaps grp1_81
4044 * @opcode /3
4045 */
4046FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Iz, uint8_t, bRm)
4047{
4048 IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz");
4049 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, IEM_ACCESS_DATA_RW);
4050 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
4051}
4052
4053
4054/**
4055 * @opmaps grp1_81
4056 * @opcode /4
4057 */
4058FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Iz, uint8_t, bRm)
4059{
4060 IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz");
4061 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, IEM_ACCESS_DATA_RW);
4062 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
4063}
4064
4065
4066/**
4067 * @opmaps grp1_81
4068 * @opcode /5
4069 */
4070FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Iz, uint8_t, bRm)
4071{
4072 IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz");
4073 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, IEM_ACCESS_DATA_RW);
4074 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
4075}
4076
4077
4078/**
4079 * @opmaps grp1_81
4080 * @opcode /6
4081 */
4082FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Iz, uint8_t, bRm)
4083{
4084 IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz");
4085 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, IEM_ACCESS_DATA_RW);
4086 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
4087}
4088
4089
4090/**
4091 * @opmaps grp1_81
4092 * @opcode /7
4093 */
4094FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Iz, uint8_t, bRm)
4095{
4096 IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz");
4097 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, IEM_ACCESS_DATA_R);
4098 IEMOP_BODY_BINARY_Ev_Iz_NO_LOCK();
4099}
4100
4101
4102/**
4103 * @opcode 0x81
4104 */
4105FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
4106{
4107 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4108 switch (IEM_GET_MODRM_REG_8(bRm))
4109 {
4110 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Iz, bRm);
4111 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Iz, bRm);
4112 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Iz, bRm);
4113 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Iz, bRm);
4114 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Iz, bRm);
4115 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Iz, bRm);
4116 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Iz, bRm);
4117 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Iz, bRm);
4118 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4119 }
4120}
4121
4122
4123/**
4124 * @opcode 0x82
4125 * @opmnemonic grp1_82
4126 * @opgroup og_groups
4127 */
4128FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
4129{
4130 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
4131 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
4132}
4133
4134
4135/**
4136 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
4137 * iemOp_Grp1_Ev_Ib.
4138 */
4139#define IEMOP_BODY_BINARY_Ev_Ib(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fRW) \
4140 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4141 { \
4142 /* \
4143 * Register target \
4144 */ \
4145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4146 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4147 switch (pVCpu->iem.s.enmEffOpSize) \
4148 { \
4149 case IEMMODE_16BIT: \
4150 { \
4151 IEM_MC_BEGIN(3, 0); \
4152 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4153 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4154 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4155 \
4156 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4157 IEM_MC_REF_EFLAGS(pEFlags); \
4158 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4159 \
4160 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4161 IEM_MC_END(); \
4162 break; \
4163 } \
4164 \
4165 case IEMMODE_32BIT: \
4166 { \
4167 IEM_MC_BEGIN(3, 0); \
4168 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4169 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4170 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4171 \
4172 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4173 IEM_MC_REF_EFLAGS(pEFlags); \
4174 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4175 if ((a_fRW) != IEM_ACCESS_DATA_R) \
4176 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
4177 \
4178 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4179 IEM_MC_END(); \
4180 break; \
4181 } \
4182 \
4183 case IEMMODE_64BIT: \
4184 { \
4185 IEM_MC_BEGIN(3, 0); \
4186 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4187 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4188 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4189 \
4190 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4191 IEM_MC_REF_EFLAGS(pEFlags); \
4192 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4193 \
4194 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4195 IEM_MC_END(); \
4196 break; \
4197 } \
4198 \
4199 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4200 } \
4201 } \
4202 else \
4203 { \
4204 /* \
4205 * Memory target. \
4206 */ \
4207 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4208 { \
4209 switch (pVCpu->iem.s.enmEffOpSize) \
4210 { \
4211 case IEMMODE_16BIT: \
4212 { \
4213 IEM_MC_BEGIN(3, 2); \
4214 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4215 IEM_MC_ARG(uint16_t, u16Src, 1); \
4216 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4217 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4218 \
4219 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4220 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4221 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm); \
4222 IEMOP_HLP_DONE_DECODING(); \
4223 IEM_MC_MEM_MAP(pu16Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4224 IEM_MC_FETCH_EFLAGS(EFlags); \
4225 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4226 \
4227 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, a_fRW); \
4228 IEM_MC_COMMIT_EFLAGS(EFlags); \
4229 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4230 IEM_MC_END(); \
4231 break; \
4232 } \
4233 \
4234 case IEMMODE_32BIT: \
4235 { \
4236 IEM_MC_BEGIN(3, 2); \
4237 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4238 IEM_MC_ARG(uint32_t, u32Src, 1); \
4239 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4240 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4241 \
4242 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4243 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4244 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm); \
4245 IEMOP_HLP_DONE_DECODING(); \
4246 IEM_MC_MEM_MAP(pu32Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4247 IEM_MC_FETCH_EFLAGS(EFlags); \
4248 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4249 \
4250 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, a_fRW); \
4251 IEM_MC_COMMIT_EFLAGS(EFlags); \
4252 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4253 IEM_MC_END(); \
4254 break; \
4255 } \
4256 \
4257 case IEMMODE_64BIT: \
4258 { \
4259 IEM_MC_BEGIN(3, 2); \
4260 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4261 IEM_MC_ARG(uint64_t, u64Src, 1); \
4262 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4264 \
4265 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4266 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4267 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm); \
4268 IEMOP_HLP_DONE_DECODING(); \
4269 IEM_MC_MEM_MAP(pu64Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4270 IEM_MC_FETCH_EFLAGS(EFlags); \
4271 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4272 \
4273 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, a_fRW); \
4274 IEM_MC_COMMIT_EFLAGS(EFlags); \
4275 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4276 IEM_MC_END(); \
4277 break; \
4278 } \
4279 \
4280 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4281 } \
4282 } \
4283 else \
4284 { \
4285 (void)0
4286
4287#define IEMOP_BODY_BINARY_Ev_Ib_NO_LOCK() \
4288 IEMOP_HLP_DONE_DECODING(); \
4289 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4290 } \
4291 } \
4292 (void)0
4293
4294#define IEMOP_BODY_BINARY_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4295 switch (pVCpu->iem.s.enmEffOpSize) \
4296 { \
4297 case IEMMODE_16BIT: \
4298 { \
4299 IEM_MC_BEGIN(3, 2); \
4300 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4301 IEM_MC_ARG(uint16_t, u16Src, 1); \
4302 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4303 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4304 \
4305 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4306 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4307 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm); \
4308 IEMOP_HLP_DONE_DECODING(); \
4309 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4310 IEM_MC_FETCH_EFLAGS(EFlags); \
4311 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4312 \
4313 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
4314 IEM_MC_COMMIT_EFLAGS(EFlags); \
4315 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4316 IEM_MC_END(); \
4317 break; \
4318 } \
4319 \
4320 case IEMMODE_32BIT: \
4321 { \
4322 IEM_MC_BEGIN(3, 2); \
4323 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4324 IEM_MC_ARG(uint32_t, u32Src, 1); \
4325 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4326 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4327 \
4328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4329 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4330 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm); \
4331 IEMOP_HLP_DONE_DECODING(); \
4332 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4333 IEM_MC_FETCH_EFLAGS(EFlags); \
4334 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4335 \
4336 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); \
4337 IEM_MC_COMMIT_EFLAGS(EFlags); \
4338 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4339 IEM_MC_END(); \
4340 break; \
4341 } \
4342 \
4343 case IEMMODE_64BIT: \
4344 { \
4345 IEM_MC_BEGIN(3, 2); \
4346 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4347 IEM_MC_ARG(uint64_t, u64Src, 1); \
4348 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4349 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4350 \
4351 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4352 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4353 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm); \
4354 IEMOP_HLP_DONE_DECODING(); \
4355 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4356 IEM_MC_FETCH_EFLAGS(EFlags); \
4357 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4358 \
4359 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
4360 IEM_MC_COMMIT_EFLAGS(EFlags); \
4361 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4362 IEM_MC_END(); \
4363 break; \
4364 } \
4365 \
4366 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4367 } \
4368 } \
4369 } \
4370 (void)0
4371
4372/**
4373 * @opmaps grp1_83
4374 * @opcode /0
4375 */
4376FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Ib, uint8_t, bRm)
4377{
4378 IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib");
4379 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, IEM_ACCESS_DATA_RW);
4380 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4381}
4382
4383
4384/**
4385 * @opmaps grp1_83
4386 * @opcode /1
4387 */
4388FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Ib, uint8_t, bRm)
4389{
4390 IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib");
4391 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, IEM_ACCESS_DATA_RW);
4392 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4393}
4394
4395
4396/**
4397 * @opmaps grp1_83
4398 * @opcode /2
4399 */
4400FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Ib, uint8_t, bRm)
4401{
4402 IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib");
4403 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, IEM_ACCESS_DATA_RW);
4404 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
4405}
4406
4407
4408/**
4409 * @opmaps grp1_83
4410 * @opcode /3
4411 */
4412FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Ib, uint8_t, bRm)
4413{
4414 IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib");
4415 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, IEM_ACCESS_DATA_RW);
4416 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
4417}
4418
4419
4420/**
4421 * @opmaps grp1_83
4422 * @opcode /4
4423 */
4424FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Ib, uint8_t, bRm)
4425{
4426 IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib");
4427 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, IEM_ACCESS_DATA_RW);
4428 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
4429}
4430
4431
4432/**
4433 * @opmaps grp1_83
4434 * @opcode /5
4435 */
4436FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Ib, uint8_t, bRm)
4437{
4438 IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib");
4439 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, IEM_ACCESS_DATA_RW);
4440 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
4441}
4442
4443
4444/**
4445 * @opmaps grp1_83
4446 * @opcode /6
4447 */
4448FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Ib, uint8_t, bRm)
4449{
4450 IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib");
4451 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, IEM_ACCESS_DATA_RW);
4452 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
4453}
4454
4455
4456/**
4457 * @opmaps grp1_83
4458 * @opcode /7
4459 */
4460FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Ib, uint8_t, bRm)
4461{
4462 IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib");
4463 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, IEM_ACCESS_DATA_R);
4464 IEMOP_BODY_BINARY_Ev_Ib_NO_LOCK();
4465}
4466
4467
4468/**
4469 * @opcode 0x83
4470 */
4471FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
4472{
4473 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
4474 to the 386 even if absent in the intel reference manuals and some
4475 3rd party opcode listings. */
4476 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4477 switch (IEM_GET_MODRM_REG_8(bRm))
4478 {
4479 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Ib, bRm);
4480 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Ib, bRm);
4481 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Ib, bRm);
4482 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Ib, bRm);
4483 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Ib, bRm);
4484 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Ib, bRm);
4485 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Ib, bRm);
4486 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Ib, bRm);
4487 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4488 }
4489}
4490
4491
4492/**
4493 * @opcode 0x84
4494 */
4495FNIEMOP_DEF(iemOp_test_Eb_Gb)
4496{
4497 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
4498 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
4499 IEMOP_BODY_BINARY_rm_r8(iemAImpl_test_u8, IEM_ACCESS_DATA_R);
4500 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
4501}
4502
4503
4504/**
4505 * @opcode 0x85
4506 */
4507FNIEMOP_DEF(iemOp_test_Ev_Gv)
4508{
4509 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
4510 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
4511 IEMOP_BODY_BINARY_rm_rv(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, IEM_ACCESS_DATA_R);
4512 IEMOP_BODY_BINARY_rm_rv_NO_LOCK();
4513}
4514
4515
4516/**
4517 * @opcode 0x86
4518 */
4519FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
4520{
4521 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4522 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
4523
4524 /*
4525 * If rm is denoting a register, no more instruction bytes.
4526 */
4527 if (IEM_IS_MODRM_REG_MODE(bRm))
4528 {
4529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4530
4531 IEM_MC_BEGIN(0, 2);
4532 IEM_MC_LOCAL(uint8_t, uTmp1);
4533 IEM_MC_LOCAL(uint8_t, uTmp2);
4534
4535 IEM_MC_FETCH_GREG_U8(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
4536 IEM_MC_FETCH_GREG_U8(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
4537 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
4538 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
4539
4540 IEM_MC_ADVANCE_RIP_AND_FINISH();
4541 IEM_MC_END();
4542 }
4543 else
4544 {
4545 /*
4546 * We're accessing memory.
4547 */
4548/** @todo the register must be committed separately! */
4549 IEM_MC_BEGIN(2, 2);
4550 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
4551 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
4552 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4553
4554 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4555 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
4556 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
4557 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
4558 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_locked, pu8Mem, pu8Reg);
4559 else
4560 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_unlocked, pu8Mem, pu8Reg);
4561 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
4562
4563 IEM_MC_ADVANCE_RIP_AND_FINISH();
4564 IEM_MC_END();
4565 }
4566}
4567
4568
4569/**
4570 * @opcode 0x87
4571 */
4572FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
4573{
4574 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
4575 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4576
4577 /*
4578 * If rm is denoting a register, no more instruction bytes.
4579 */
4580 if (IEM_IS_MODRM_REG_MODE(bRm))
4581 {
4582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4583
4584 switch (pVCpu->iem.s.enmEffOpSize)
4585 {
4586 case IEMMODE_16BIT:
4587 IEM_MC_BEGIN(0, 2);
4588 IEM_MC_LOCAL(uint16_t, uTmp1);
4589 IEM_MC_LOCAL(uint16_t, uTmp2);
4590
4591 IEM_MC_FETCH_GREG_U16(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
4592 IEM_MC_FETCH_GREG_U16(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
4593 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
4594 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
4595
4596 IEM_MC_ADVANCE_RIP_AND_FINISH();
4597 IEM_MC_END();
4598 break;
4599
4600 case IEMMODE_32BIT:
4601 IEM_MC_BEGIN(0, 2);
4602 IEM_MC_LOCAL(uint32_t, uTmp1);
4603 IEM_MC_LOCAL(uint32_t, uTmp2);
4604
4605 IEM_MC_FETCH_GREG_U32(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
4606 IEM_MC_FETCH_GREG_U32(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
4607 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
4608 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
4609
4610 IEM_MC_ADVANCE_RIP_AND_FINISH();
4611 IEM_MC_END();
4612 break;
4613
4614 case IEMMODE_64BIT:
4615 IEM_MC_BEGIN(0, 2);
4616 IEM_MC_LOCAL(uint64_t, uTmp1);
4617 IEM_MC_LOCAL(uint64_t, uTmp2);
4618
4619 IEM_MC_FETCH_GREG_U64(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
4620 IEM_MC_FETCH_GREG_U64(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
4621 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
4622 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
4623
4624 IEM_MC_ADVANCE_RIP_AND_FINISH();
4625 IEM_MC_END();
4626 break;
4627
4628 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4629 }
4630 }
4631 else
4632 {
4633 /*
4634 * We're accessing memory.
4635 */
4636 switch (pVCpu->iem.s.enmEffOpSize)
4637 {
4638/** @todo the register must be committed separately! */
4639 case IEMMODE_16BIT:
4640 IEM_MC_BEGIN(2, 2);
4641 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
4642 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
4643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4644
4645 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4646 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
4647 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
4648 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
4649 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_locked, pu16Mem, pu16Reg);
4650 else
4651 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_unlocked, pu16Mem, pu16Reg);
4652 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
4653
4654 IEM_MC_ADVANCE_RIP_AND_FINISH();
4655 IEM_MC_END();
4656 break;
4657
4658 case IEMMODE_32BIT:
4659 IEM_MC_BEGIN(2, 2);
4660 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
4661 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
4662 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4663
4664 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4665 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
4666 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
4667 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
4668 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_locked, pu32Mem, pu32Reg);
4669 else
4670 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_unlocked, pu32Mem, pu32Reg);
4671 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
4672
4673 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
4674 IEM_MC_ADVANCE_RIP_AND_FINISH();
4675 IEM_MC_END();
4676 break;
4677
4678 case IEMMODE_64BIT:
4679 IEM_MC_BEGIN(2, 2);
4680 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
4681 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
4682 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4683
4684 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4685 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
4686 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
4687 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
4688 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_locked, pu64Mem, pu64Reg);
4689 else
4690 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_unlocked, pu64Mem, pu64Reg);
4691 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
4692
4693 IEM_MC_ADVANCE_RIP_AND_FINISH();
4694 IEM_MC_END();
4695 break;
4696
4697 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4698 }
4699 }
4700}
4701
4702
4703/**
4704 * @opcode 0x88
4705 */
4706FNIEMOP_DEF(iemOp_mov_Eb_Gb)
4707{
4708 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
4709
4710 uint8_t bRm;
4711 IEM_OPCODE_GET_NEXT_U8(&bRm);
4712
4713 /*
4714 * If rm is denoting a register, no more instruction bytes.
4715 */
4716 if (IEM_IS_MODRM_REG_MODE(bRm))
4717 {
4718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4719 IEM_MC_BEGIN(0, 1);
4720 IEM_MC_LOCAL(uint8_t, u8Value);
4721 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4722 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Value);
4723 IEM_MC_ADVANCE_RIP_AND_FINISH();
4724 IEM_MC_END();
4725 }
4726 else
4727 {
4728 /*
4729 * We're writing a register to memory.
4730 */
4731 IEM_MC_BEGIN(0, 2);
4732 IEM_MC_LOCAL(uint8_t, u8Value);
4733 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4736 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4737 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
4738 IEM_MC_ADVANCE_RIP_AND_FINISH();
4739 IEM_MC_END();
4740 }
4741}
4742
4743
4744/**
4745 * @opcode 0x89
4746 */
4747FNIEMOP_DEF(iemOp_mov_Ev_Gv)
4748{
4749 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
4750
4751 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4752
4753 /*
4754 * If rm is denoting a register, no more instruction bytes.
4755 */
4756 if (IEM_IS_MODRM_REG_MODE(bRm))
4757 {
4758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4759 switch (pVCpu->iem.s.enmEffOpSize)
4760 {
4761 case IEMMODE_16BIT:
4762 IEM_MC_BEGIN(0, 1);
4763 IEM_MC_LOCAL(uint16_t, u16Value);
4764 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4765 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
4766 IEM_MC_ADVANCE_RIP_AND_FINISH();
4767 IEM_MC_END();
4768 break;
4769
4770 case IEMMODE_32BIT:
4771 IEM_MC_BEGIN(0, 1);
4772 IEM_MC_LOCAL(uint32_t, u32Value);
4773 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4774 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
4775 IEM_MC_ADVANCE_RIP_AND_FINISH();
4776 IEM_MC_END();
4777 break;
4778
4779 case IEMMODE_64BIT:
4780 IEM_MC_BEGIN(0, 1);
4781 IEM_MC_LOCAL(uint64_t, u64Value);
4782 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4783 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
4784 IEM_MC_ADVANCE_RIP_AND_FINISH();
4785 IEM_MC_END();
4786 break;
4787
4788 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4789 }
4790 }
4791 else
4792 {
4793 /*
4794 * We're writing a register to memory.
4795 */
4796 switch (pVCpu->iem.s.enmEffOpSize)
4797 {
4798 case IEMMODE_16BIT:
4799 IEM_MC_BEGIN(0, 2);
4800 IEM_MC_LOCAL(uint16_t, u16Value);
4801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4802 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4804 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4805 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
4806 IEM_MC_ADVANCE_RIP_AND_FINISH();
4807 IEM_MC_END();
4808 break;
4809
4810 case IEMMODE_32BIT:
4811 IEM_MC_BEGIN(0, 2);
4812 IEM_MC_LOCAL(uint32_t, u32Value);
4813 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4814 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4816 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4817 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
4818 IEM_MC_ADVANCE_RIP_AND_FINISH();
4819 IEM_MC_END();
4820 break;
4821
4822 case IEMMODE_64BIT:
4823 IEM_MC_BEGIN(0, 2);
4824 IEM_MC_LOCAL(uint64_t, u64Value);
4825 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4826 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4828 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4829 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
4830 IEM_MC_ADVANCE_RIP_AND_FINISH();
4831 IEM_MC_END();
4832 break;
4833
4834 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4835 }
4836 }
4837}
4838
4839
4840/**
4841 * @opcode 0x8a
4842 */
4843FNIEMOP_DEF(iemOp_mov_Gb_Eb)
4844{
4845 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
4846
4847 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4848
4849 /*
4850 * If rm is denoting a register, no more instruction bytes.
4851 */
4852 if (IEM_IS_MODRM_REG_MODE(bRm))
4853 {
4854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4855 IEM_MC_BEGIN(0, 1);
4856 IEM_MC_LOCAL(uint8_t, u8Value);
4857 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4858 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
4859 IEM_MC_ADVANCE_RIP_AND_FINISH();
4860 IEM_MC_END();
4861 }
4862 else
4863 {
4864 /*
4865 * We're loading a register from memory.
4866 */
4867 IEM_MC_BEGIN(0, 2);
4868 IEM_MC_LOCAL(uint8_t, u8Value);
4869 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4870 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4872 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4873 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
4874 IEM_MC_ADVANCE_RIP_AND_FINISH();
4875 IEM_MC_END();
4876 }
4877}
4878
4879
4880/**
4881 * @opcode 0x8b
4882 */
4883FNIEMOP_DEF(iemOp_mov_Gv_Ev)
4884{
4885 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
4886
4887 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4888
4889 /*
4890 * If rm is denoting a register, no more instruction bytes.
4891 */
4892 if (IEM_IS_MODRM_REG_MODE(bRm))
4893 {
4894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4895 switch (pVCpu->iem.s.enmEffOpSize)
4896 {
4897 case IEMMODE_16BIT:
4898 IEM_MC_BEGIN(0, 1);
4899 IEM_MC_LOCAL(uint16_t, u16Value);
4900 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4901 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
4902 IEM_MC_ADVANCE_RIP_AND_FINISH();
4903 IEM_MC_END();
4904 break;
4905
4906 case IEMMODE_32BIT:
4907 IEM_MC_BEGIN(0, 1);
4908 IEM_MC_LOCAL(uint32_t, u32Value);
4909 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4910 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
4911 IEM_MC_ADVANCE_RIP_AND_FINISH();
4912 IEM_MC_END();
4913 break;
4914
4915 case IEMMODE_64BIT:
4916 IEM_MC_BEGIN(0, 1);
4917 IEM_MC_LOCAL(uint64_t, u64Value);
4918 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4919 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
4920 IEM_MC_ADVANCE_RIP_AND_FINISH();
4921 IEM_MC_END();
4922 break;
4923
4924 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4925 }
4926 }
4927 else
4928 {
4929 /*
4930 * We're loading a register from memory.
4931 */
4932 switch (pVCpu->iem.s.enmEffOpSize)
4933 {
4934 case IEMMODE_16BIT:
4935 IEM_MC_BEGIN(0, 2);
4936 IEM_MC_LOCAL(uint16_t, u16Value);
4937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4938 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4940 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4941 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
4942 IEM_MC_ADVANCE_RIP_AND_FINISH();
4943 IEM_MC_END();
4944 break;
4945
4946 case IEMMODE_32BIT:
4947 IEM_MC_BEGIN(0, 2);
4948 IEM_MC_LOCAL(uint32_t, u32Value);
4949 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4950 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4952 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4953 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
4954 IEM_MC_ADVANCE_RIP_AND_FINISH();
4955 IEM_MC_END();
4956 break;
4957
4958 case IEMMODE_64BIT:
4959 IEM_MC_BEGIN(0, 2);
4960 IEM_MC_LOCAL(uint64_t, u64Value);
4961 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4962 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4964 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4965 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
4966 IEM_MC_ADVANCE_RIP_AND_FINISH();
4967 IEM_MC_END();
4968 break;
4969
4970 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4971 }
4972 }
4973}
4974
4975
4976/**
4977 * opcode 0x63
4978 * @todo Table fixme
4979 */
4980FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
4981{
4982 if (!IEM_IS_64BIT_CODE(pVCpu))
4983 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
4984 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
4985 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
4986 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
4987}
4988
4989
4990/**
4991 * @opcode 0x8c
4992 */
4993FNIEMOP_DEF(iemOp_mov_Ev_Sw)
4994{
4995 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
4996
4997 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4998
4999 /*
5000 * Check that the destination register exists. The REX.R prefix is ignored.
5001 */
5002 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5003 if ( iSegReg > X86_SREG_GS)
5004 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5005
5006 /*
5007 * If rm is denoting a register, no more instruction bytes.
5008 * In that case, the operand size is respected and the upper bits are
5009 * cleared (starting with some pentium).
5010 */
5011 if (IEM_IS_MODRM_REG_MODE(bRm))
5012 {
5013 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5014 switch (pVCpu->iem.s.enmEffOpSize)
5015 {
5016 case IEMMODE_16BIT:
5017 IEM_MC_BEGIN(0, 1);
5018 IEM_MC_LOCAL(uint16_t, u16Value);
5019 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5020 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5021 IEM_MC_ADVANCE_RIP_AND_FINISH();
5022 IEM_MC_END();
5023 break;
5024
5025 case IEMMODE_32BIT:
5026 IEM_MC_BEGIN(0, 1);
5027 IEM_MC_LOCAL(uint32_t, u32Value);
5028 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
5029 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5030 IEM_MC_ADVANCE_RIP_AND_FINISH();
5031 IEM_MC_END();
5032 break;
5033
5034 case IEMMODE_64BIT:
5035 IEM_MC_BEGIN(0, 1);
5036 IEM_MC_LOCAL(uint64_t, u64Value);
5037 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
5038 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5039 IEM_MC_ADVANCE_RIP_AND_FINISH();
5040 IEM_MC_END();
5041 break;
5042
5043 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5044 }
5045 }
5046 else
5047 {
5048 /*
5049 * We're saving the register to memory. The access is word sized
5050 * regardless of operand size prefixes.
5051 */
5052#if 0 /* not necessary */
5053 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5054#endif
5055 IEM_MC_BEGIN(0, 2);
5056 IEM_MC_LOCAL(uint16_t, u16Value);
5057 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5058 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5060 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5061 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5062 IEM_MC_ADVANCE_RIP_AND_FINISH();
5063 IEM_MC_END();
5064 }
5065}
5066
5067
5068
5069
5070/**
5071 * @opcode 0x8d
5072 */
5073FNIEMOP_DEF(iemOp_lea_Gv_M)
5074{
5075 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
5076 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5077 if (IEM_IS_MODRM_REG_MODE(bRm))
5078 IEMOP_RAISE_INVALID_OPCODE_RET(); /* no register form */
5079
5080 switch (pVCpu->iem.s.enmEffOpSize)
5081 {
5082 case IEMMODE_16BIT:
5083 IEM_MC_BEGIN(0, 2);
5084 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5085 IEM_MC_LOCAL(uint16_t, u16Cast);
5086 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5088 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
5089 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Cast);
5090 IEM_MC_ADVANCE_RIP_AND_FINISH();
5091 IEM_MC_END();
5092 break;
5093
5094 case IEMMODE_32BIT:
5095 IEM_MC_BEGIN(0, 2);
5096 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5097 IEM_MC_LOCAL(uint32_t, u32Cast);
5098 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5100 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
5101 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Cast);
5102 IEM_MC_ADVANCE_RIP_AND_FINISH();
5103 IEM_MC_END();
5104 break;
5105
5106 case IEMMODE_64BIT:
5107 IEM_MC_BEGIN(0, 1);
5108 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5109 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5110 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5111 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), GCPtrEffSrc);
5112 IEM_MC_ADVANCE_RIP_AND_FINISH();
5113 IEM_MC_END();
5114 break;
5115
5116 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5117 }
5118}
5119
5120
5121/**
5122 * @opcode 0x8e
5123 */
5124FNIEMOP_DEF(iemOp_mov_Sw_Ev)
5125{
5126 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
5127
5128 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5129
5130 /*
5131 * The practical operand size is 16-bit.
5132 */
5133#if 0 /* not necessary */
5134 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5135#endif
5136
5137 /*
5138 * Check that the destination register exists and can be used with this
5139 * instruction. The REX.R prefix is ignored.
5140 */
5141 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5142 if ( iSegReg == X86_SREG_CS
5143 || iSegReg > X86_SREG_GS)
5144 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5145
5146 /*
5147 * If rm is denoting a register, no more instruction bytes.
5148 */
5149 if (IEM_IS_MODRM_REG_MODE(bRm))
5150 {
5151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5152 IEM_MC_BEGIN(2, 0);
5153 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
5154 IEM_MC_ARG(uint16_t, u16Value, 1);
5155 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5156 if (iSRegArg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5157 IEM_MC_CALL_CIMPL_2( 0, iemCImpl_load_SReg, iSRegArg, u16Value);
5158 else
5159 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE, iemCImpl_load_SReg, iSRegArg, u16Value);
5160 IEM_MC_END();
5161 }
5162 else
5163 {
5164 /*
5165 * We're loading the register from memory. The access is word sized
5166 * regardless of operand size prefixes.
5167 */
5168 IEM_MC_BEGIN(2, 1);
5169 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
5170 IEM_MC_ARG(uint16_t, u16Value, 1);
5171 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5172 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5174 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5175 if (iSRegArg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5176 IEM_MC_CALL_CIMPL_2( 0, iemCImpl_load_SReg, iSRegArg, u16Value);
5177 else
5178 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_MODE, iemCImpl_load_SReg, iSRegArg, u16Value);
5179 IEM_MC_END();
5180 }
5181}
5182
5183
5184/** Opcode 0x8f /0. */
5185FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
5186{
5187 /* This bugger is rather annoying as it requires rSP to be updated before
5188 doing the effective address calculations. Will eventually require a
5189 split between the R/M+SIB decoding and the effective address
5190 calculation - which is something that is required for any attempt at
5191 reusing this code for a recompiler. It may also be good to have if we
5192 need to delay #UD exception caused by invalid lock prefixes.
5193
5194 For now, we'll do a mostly safe interpreter-only implementation here. */
5195 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
5196 * now until tests show it's checked.. */
5197 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
5198
5199 /* Register access is relatively easy and can share code. */
5200 if (IEM_IS_MODRM_REG_MODE(bRm))
5201 return FNIEMOP_CALL_1(iemOpCommonPopGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
5202
5203 /*
5204 * Memory target.
5205 *
5206 * Intel says that RSP is incremented before it's used in any effective
5207 * address calcuations. This means some serious extra annoyance here since
5208 * we decode and calculate the effective address in one step and like to
5209 * delay committing registers till everything is done.
5210 *
5211 * So, we'll decode and calculate the effective address twice. This will
5212 * require some recoding if turned into a recompiler.
5213 */
5214 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
5215
5216#ifndef TST_IEM_CHECK_MC
5217 /* Calc effective address with modified ESP. */
5218/** @todo testcase */
5219 RTGCPTR GCPtrEff;
5220 VBOXSTRICTRC rcStrict;
5221 switch (pVCpu->iem.s.enmEffOpSize)
5222 {
5223 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 2 << 8, &GCPtrEff); break;
5224 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 4 << 8, &GCPtrEff); break;
5225 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 8 << 8, &GCPtrEff); break;
5226 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5227 }
5228 if (rcStrict != VINF_SUCCESS)
5229 return rcStrict;
5230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5231
5232 /* Perform the operation - this should be CImpl. */
5233 RTUINT64U TmpRsp;
5234 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
5235 switch (pVCpu->iem.s.enmEffOpSize)
5236 {
5237 case IEMMODE_16BIT:
5238 {
5239 uint16_t u16Value;
5240 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
5241 if (rcStrict == VINF_SUCCESS)
5242 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
5243 break;
5244 }
5245
5246 case IEMMODE_32BIT:
5247 {
5248 uint32_t u32Value;
5249 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
5250 if (rcStrict == VINF_SUCCESS)
5251 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
5252 break;
5253 }
5254
5255 case IEMMODE_64BIT:
5256 {
5257 uint64_t u64Value;
5258 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
5259 if (rcStrict == VINF_SUCCESS)
5260 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
5261 break;
5262 }
5263
5264 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5265 }
5266 if (rcStrict == VINF_SUCCESS)
5267 {
5268 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
5269 return iemRegUpdateRipAndFinishClearingRF(pVCpu);
5270 }
5271 return rcStrict;
5272
5273#else
5274 return VERR_IEM_IPE_2;
5275#endif
5276}
5277
5278
5279/**
5280 * @opcode 0x8f
5281 */
5282FNIEMOP_DEF(iemOp_Grp1A__xop)
5283{
5284 /*
5285 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
5286 * three byte VEX prefix, except that the mmmmm field cannot have the values
5287 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
5288 */
5289 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5290 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
5291 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
5292
5293 IEMOP_MNEMONIC(xop, "xop");
5294 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
5295 {
5296 /** @todo Test when exctly the XOP conformance checks kick in during
5297 * instruction decoding and fetching (using \#PF). */
5298 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
5299 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
5300 if ( ( pVCpu->iem.s.fPrefixes
5301 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
5302 == 0)
5303 {
5304 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
5305 if ((bXop2 & 0x80 /* XOP.W */) && IEM_IS_64BIT_CODE(pVCpu))
5306 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
5307 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
5308 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
5309 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
5310 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
5311 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
5312 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
5313
5314 /** @todo XOP: Just use new tables and decoders. */
5315 switch (bRm & 0x1f)
5316 {
5317 case 8: /* xop opcode map 8. */
5318 IEMOP_BITCH_ABOUT_STUB();
5319 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5320
5321 case 9: /* xop opcode map 9. */
5322 IEMOP_BITCH_ABOUT_STUB();
5323 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5324
5325 case 10: /* xop opcode map 10. */
5326 IEMOP_BITCH_ABOUT_STUB();
5327 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5328
5329 default:
5330 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
5331 IEMOP_RAISE_INVALID_OPCODE_RET();
5332 }
5333 }
5334 else
5335 Log(("XOP: Invalid prefix mix!\n"));
5336 }
5337 else
5338 Log(("XOP: XOP support disabled!\n"));
5339 IEMOP_RAISE_INVALID_OPCODE_RET();
5340}
5341
5342
5343/**
5344 * Common 'xchg reg,rAX' helper.
5345 */
5346FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
5347{
5348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5349
5350 iReg |= pVCpu->iem.s.uRexB;
5351 switch (pVCpu->iem.s.enmEffOpSize)
5352 {
5353 case IEMMODE_16BIT:
5354 IEM_MC_BEGIN(0, 2);
5355 IEM_MC_LOCAL(uint16_t, u16Tmp1);
5356 IEM_MC_LOCAL(uint16_t, u16Tmp2);
5357 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
5358 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
5359 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
5360 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
5361 IEM_MC_ADVANCE_RIP_AND_FINISH();
5362 IEM_MC_END();
5363 break;
5364
5365 case IEMMODE_32BIT:
5366 IEM_MC_BEGIN(0, 2);
5367 IEM_MC_LOCAL(uint32_t, u32Tmp1);
5368 IEM_MC_LOCAL(uint32_t, u32Tmp2);
5369 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
5370 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
5371 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
5372 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
5373 IEM_MC_ADVANCE_RIP_AND_FINISH();
5374 IEM_MC_END();
5375 break;
5376
5377 case IEMMODE_64BIT:
5378 IEM_MC_BEGIN(0, 2);
5379 IEM_MC_LOCAL(uint64_t, u64Tmp1);
5380 IEM_MC_LOCAL(uint64_t, u64Tmp2);
5381 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
5382 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
5383 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
5384 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
5385 IEM_MC_ADVANCE_RIP_AND_FINISH();
5386 IEM_MC_END();
5387 break;
5388
5389 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5390 }
5391}
5392
5393
5394/**
5395 * @opcode 0x90
5396 */
5397FNIEMOP_DEF(iemOp_nop)
5398{
5399 /* R8/R8D and RAX/EAX can be exchanged. */
5400 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
5401 {
5402 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
5403 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
5404 }
5405
5406 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
5407 {
5408 IEMOP_MNEMONIC(pause, "pause");
5409 /* ASSUMING that we keep the IEM_F_X86_CTX_IN_GUEST, IEM_F_X86_CTX_VMX
5410 and IEM_F_X86_CTX_SVM in the TB key, we can safely do the following: */
5411 if (!IEM_IS_IN_GUEST(pVCpu))
5412 { /* probable */ }
5413#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
5414 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_VMX)
5415 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_vmx_pause);
5416#endif
5417#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
5418 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_SVM)
5419 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_svm_pause);
5420#endif
5421 }
5422 else
5423 IEMOP_MNEMONIC(nop, "nop");
5424 IEM_MC_BEGIN(0, 0);
5425 IEM_MC_ADVANCE_RIP_AND_FINISH();
5426 IEM_MC_END();
5427}
5428
5429
5430/**
5431 * @opcode 0x91
5432 */
5433FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
5434{
5435 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
5436 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
5437}
5438
5439
5440/**
5441 * @opcode 0x92
5442 */
5443FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
5444{
5445 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
5446 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
5447}
5448
5449
5450/**
5451 * @opcode 0x93
5452 */
5453FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
5454{
5455 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
5456 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
5457}
5458
5459
5460/**
5461 * @opcode 0x94
5462 */
5463FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
5464{
5465 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
5466 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
5467}
5468
5469
5470/**
5471 * @opcode 0x95
5472 */
5473FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
5474{
5475 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
5476 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
5477}
5478
5479
5480/**
5481 * @opcode 0x96
5482 */
5483FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
5484{
5485 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
5486 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
5487}
5488
5489
5490/**
5491 * @opcode 0x97
5492 */
5493FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
5494{
5495 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
5496 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
5497}
5498
5499
5500/**
5501 * @opcode 0x98
5502 */
5503FNIEMOP_DEF(iemOp_cbw)
5504{
5505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5506 switch (pVCpu->iem.s.enmEffOpSize)
5507 {
5508 case IEMMODE_16BIT:
5509 IEMOP_MNEMONIC(cbw, "cbw");
5510 IEM_MC_BEGIN(0, 1);
5511 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
5512 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
5513 } IEM_MC_ELSE() {
5514 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
5515 } IEM_MC_ENDIF();
5516 IEM_MC_ADVANCE_RIP_AND_FINISH();
5517 IEM_MC_END();
5518 break;
5519
5520 case IEMMODE_32BIT:
5521 IEMOP_MNEMONIC(cwde, "cwde");
5522 IEM_MC_BEGIN(0, 1);
5523 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
5524 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
5525 } IEM_MC_ELSE() {
5526 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
5527 } IEM_MC_ENDIF();
5528 IEM_MC_ADVANCE_RIP_AND_FINISH();
5529 IEM_MC_END();
5530 break;
5531
5532 case IEMMODE_64BIT:
5533 IEMOP_MNEMONIC(cdqe, "cdqe");
5534 IEM_MC_BEGIN(0, 1);
5535 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
5536 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
5537 } IEM_MC_ELSE() {
5538 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
5539 } IEM_MC_ENDIF();
5540 IEM_MC_ADVANCE_RIP_AND_FINISH();
5541 IEM_MC_END();
5542 break;
5543
5544 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5545 }
5546}
5547
5548
5549/**
5550 * @opcode 0x99
5551 */
5552FNIEMOP_DEF(iemOp_cwd)
5553{
5554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5555 switch (pVCpu->iem.s.enmEffOpSize)
5556 {
5557 case IEMMODE_16BIT:
5558 IEMOP_MNEMONIC(cwd, "cwd");
5559 IEM_MC_BEGIN(0, 1);
5560 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
5561 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
5562 } IEM_MC_ELSE() {
5563 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
5564 } IEM_MC_ENDIF();
5565 IEM_MC_ADVANCE_RIP_AND_FINISH();
5566 IEM_MC_END();
5567 break;
5568
5569 case IEMMODE_32BIT:
5570 IEMOP_MNEMONIC(cdq, "cdq");
5571 IEM_MC_BEGIN(0, 1);
5572 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
5573 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
5574 } IEM_MC_ELSE() {
5575 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
5576 } IEM_MC_ENDIF();
5577 IEM_MC_ADVANCE_RIP_AND_FINISH();
5578 IEM_MC_END();
5579 break;
5580
5581 case IEMMODE_64BIT:
5582 IEMOP_MNEMONIC(cqo, "cqo");
5583 IEM_MC_BEGIN(0, 1);
5584 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
5585 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
5586 } IEM_MC_ELSE() {
5587 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
5588 } IEM_MC_ENDIF();
5589 IEM_MC_ADVANCE_RIP_AND_FINISH();
5590 IEM_MC_END();
5591 break;
5592
5593 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5594 }
5595}
5596
5597
5598/**
5599 * @opcode 0x9a
5600 */
5601FNIEMOP_DEF(iemOp_call_Ap)
5602{
5603 IEMOP_MNEMONIC(call_Ap, "call Ap");
5604 IEMOP_HLP_NO_64BIT();
5605
5606 /* Decode the far pointer address and pass it on to the far call C implementation. */
5607 uint32_t off32Seg;
5608 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
5609 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
5610 else
5611 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
5612 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
5613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5614 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
5615 iemCImpl_callf, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
5616}
5617
5618
5619/** Opcode 0x9b. (aka fwait) */
5620FNIEMOP_DEF(iemOp_wait)
5621{
5622 IEMOP_MNEMONIC(wait, "wait");
5623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5624
5625 IEM_MC_BEGIN(0, 0);
5626 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
5627 IEM_MC_MAYBE_RAISE_FPU_XCPT();
5628 IEM_MC_ADVANCE_RIP_AND_FINISH();
5629 IEM_MC_END();
5630}
5631
5632
5633/**
5634 * @opcode 0x9c
5635 */
5636FNIEMOP_DEF(iemOp_pushf_Fv)
5637{
5638 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
5639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5640 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5641 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
5642}
5643
5644
5645/**
5646 * @opcode 0x9d
5647 */
5648FNIEMOP_DEF(iemOp_popf_Fv)
5649{
5650 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
5651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5652 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5653 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS, iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
5654}
5655
5656
5657/**
5658 * @opcode 0x9e
5659 */
5660FNIEMOP_DEF(iemOp_sahf)
5661{
5662 IEMOP_MNEMONIC(sahf, "sahf");
5663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5664 if ( IEM_IS_64BIT_CODE(pVCpu)
5665 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
5666 IEMOP_RAISE_INVALID_OPCODE_RET();
5667 IEM_MC_BEGIN(0, 2);
5668 IEM_MC_LOCAL(uint32_t, u32Flags);
5669 IEM_MC_LOCAL(uint32_t, EFlags);
5670 IEM_MC_FETCH_EFLAGS(EFlags);
5671 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
5672 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
5673 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
5674 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
5675 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
5676 IEM_MC_COMMIT_EFLAGS(EFlags);
5677 IEM_MC_ADVANCE_RIP_AND_FINISH();
5678 IEM_MC_END();
5679}
5680
5681
5682/**
5683 * @opcode 0x9f
5684 */
5685FNIEMOP_DEF(iemOp_lahf)
5686{
5687 IEMOP_MNEMONIC(lahf, "lahf");
5688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5689 if ( IEM_IS_64BIT_CODE(pVCpu)
5690 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
5691 IEMOP_RAISE_INVALID_OPCODE_RET();
5692 IEM_MC_BEGIN(0, 1);
5693 IEM_MC_LOCAL(uint8_t, u8Flags);
5694 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
5695 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
5696 IEM_MC_ADVANCE_RIP_AND_FINISH();
5697 IEM_MC_END();
5698}
5699
5700
5701/**
5702 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
5703 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend off lock
5704 * prefixes. Will return on failures.
5705 * @param a_GCPtrMemOff The variable to store the offset in.
5706 */
5707#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
5708 do \
5709 { \
5710 switch (pVCpu->iem.s.enmEffAddrMode) \
5711 { \
5712 case IEMMODE_16BIT: \
5713 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
5714 break; \
5715 case IEMMODE_32BIT: \
5716 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
5717 break; \
5718 case IEMMODE_64BIT: \
5719 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
5720 break; \
5721 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5722 } \
5723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5724 } while (0)
5725
5726/**
5727 * @opcode 0xa0
5728 */
5729FNIEMOP_DEF(iemOp_mov_AL_Ob)
5730{
5731 /*
5732 * Get the offset and fend off lock prefixes.
5733 */
5734 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
5735 RTGCPTR GCPtrMemOff;
5736 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
5737
5738 /*
5739 * Fetch AL.
5740 */
5741 IEM_MC_BEGIN(0,1);
5742 IEM_MC_LOCAL(uint8_t, u8Tmp);
5743 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
5744 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
5745 IEM_MC_ADVANCE_RIP_AND_FINISH();
5746 IEM_MC_END();
5747}
5748
5749
5750/**
5751 * @opcode 0xa1
5752 */
5753FNIEMOP_DEF(iemOp_mov_rAX_Ov)
5754{
5755 /*
5756 * Get the offset and fend off lock prefixes.
5757 */
5758 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
5759 RTGCPTR GCPtrMemOff;
5760 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
5761
5762 /*
5763 * Fetch rAX.
5764 */
5765 switch (pVCpu->iem.s.enmEffOpSize)
5766 {
5767 case IEMMODE_16BIT:
5768 IEM_MC_BEGIN(0,1);
5769 IEM_MC_LOCAL(uint16_t, u16Tmp);
5770 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
5771 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
5772 IEM_MC_ADVANCE_RIP_AND_FINISH();
5773 IEM_MC_END();
5774 break;
5775
5776 case IEMMODE_32BIT:
5777 IEM_MC_BEGIN(0,1);
5778 IEM_MC_LOCAL(uint32_t, u32Tmp);
5779 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
5780 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
5781 IEM_MC_ADVANCE_RIP_AND_FINISH();
5782 IEM_MC_END();
5783 break;
5784
5785 case IEMMODE_64BIT:
5786 IEM_MC_BEGIN(0,1);
5787 IEM_MC_LOCAL(uint64_t, u64Tmp);
5788 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
5789 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
5790 IEM_MC_ADVANCE_RIP_AND_FINISH();
5791 IEM_MC_END();
5792 break;
5793
5794 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5795 }
5796}
5797
5798
5799/**
5800 * @opcode 0xa2
5801 */
5802FNIEMOP_DEF(iemOp_mov_Ob_AL)
5803{
5804 /*
5805 * Get the offset and fend off lock prefixes.
5806 */
5807 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
5808 RTGCPTR GCPtrMemOff;
5809 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
5810
5811 /*
5812 * Store AL.
5813 */
5814 IEM_MC_BEGIN(0,1);
5815 IEM_MC_LOCAL(uint8_t, u8Tmp);
5816 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
5817 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
5818 IEM_MC_ADVANCE_RIP_AND_FINISH();
5819 IEM_MC_END();
5820}
5821
5822
5823/**
5824 * @opcode 0xa3
5825 */
5826FNIEMOP_DEF(iemOp_mov_Ov_rAX)
5827{
5828 /*
5829 * Get the offset and fend off lock prefixes.
5830 */
5831 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
5832 RTGCPTR GCPtrMemOff;
5833 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
5834
5835 /*
5836 * Store rAX.
5837 */
5838 switch (pVCpu->iem.s.enmEffOpSize)
5839 {
5840 case IEMMODE_16BIT:
5841 IEM_MC_BEGIN(0,1);
5842 IEM_MC_LOCAL(uint16_t, u16Tmp);
5843 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
5844 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
5845 IEM_MC_ADVANCE_RIP_AND_FINISH();
5846 IEM_MC_END();
5847 break;
5848
5849 case IEMMODE_32BIT:
5850 IEM_MC_BEGIN(0,1);
5851 IEM_MC_LOCAL(uint32_t, u32Tmp);
5852 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
5853 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
5854 IEM_MC_ADVANCE_RIP_AND_FINISH();
5855 IEM_MC_END();
5856 break;
5857
5858 case IEMMODE_64BIT:
5859 IEM_MC_BEGIN(0,1);
5860 IEM_MC_LOCAL(uint64_t, u64Tmp);
5861 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
5862 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
5863 IEM_MC_ADVANCE_RIP_AND_FINISH();
5864 IEM_MC_END();
5865 break;
5866
5867 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5868 }
5869}
5870
5871/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
5872#define IEM_MOVS_CASE(ValBits, AddrBits) \
5873 IEM_MC_BEGIN(0, 2); \
5874 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5875 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5876 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
5877 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
5878 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5879 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
5880 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5881 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5882 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5883 } IEM_MC_ELSE() { \
5884 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5885 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5886 } IEM_MC_ENDIF(); \
5887 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5888 IEM_MC_END() \
5889
5890/**
5891 * @opcode 0xa4
5892 */
5893FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
5894{
5895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5896
5897 /*
5898 * Use the C implementation if a repeat prefix is encountered.
5899 */
5900 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5901 {
5902 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
5903 switch (pVCpu->iem.s.enmEffAddrMode)
5904 {
5905 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
5906 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
5907 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
5908 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5909 }
5910 }
5911 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
5912
5913 /*
5914 * Sharing case implementation with movs[wdq] below.
5915 */
5916 switch (pVCpu->iem.s.enmEffAddrMode)
5917 {
5918 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
5919 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
5920 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
5921 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5922 }
5923}
5924
5925
5926/**
5927 * @opcode 0xa5
5928 */
5929FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
5930{
5931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5932
5933 /*
5934 * Use the C implementation if a repeat prefix is encountered.
5935 */
5936 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5937 {
5938 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
5939 switch (pVCpu->iem.s.enmEffOpSize)
5940 {
5941 case IEMMODE_16BIT:
5942 switch (pVCpu->iem.s.enmEffAddrMode)
5943 {
5944 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
5945 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
5946 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
5947 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5948 }
5949 break;
5950 case IEMMODE_32BIT:
5951 switch (pVCpu->iem.s.enmEffAddrMode)
5952 {
5953 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
5954 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
5955 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
5956 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5957 }
5958 case IEMMODE_64BIT:
5959 switch (pVCpu->iem.s.enmEffAddrMode)
5960 {
5961 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
5962 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
5963 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
5964 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5965 }
5966 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5967 }
5968 }
5969 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
5970
5971 /*
5972 * Annoying double switch here.
5973 * Using ugly macro for implementing the cases, sharing it with movsb.
5974 */
5975 switch (pVCpu->iem.s.enmEffOpSize)
5976 {
5977 case IEMMODE_16BIT:
5978 switch (pVCpu->iem.s.enmEffAddrMode)
5979 {
5980 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
5981 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
5982 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
5983 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5984 }
5985 break;
5986
5987 case IEMMODE_32BIT:
5988 switch (pVCpu->iem.s.enmEffAddrMode)
5989 {
5990 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
5991 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
5992 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
5993 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5994 }
5995 break;
5996
5997 case IEMMODE_64BIT:
5998 switch (pVCpu->iem.s.enmEffAddrMode)
5999 {
6000 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6001 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
6002 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
6003 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6004 }
6005 break;
6006 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6007 }
6008}
6009
6010#undef IEM_MOVS_CASE
6011
6012/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
6013#define IEM_CMPS_CASE(ValBits, AddrBits) \
6014 IEM_MC_BEGIN(3, 3); \
6015 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
6016 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
6017 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
6018 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
6019 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6020 \
6021 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6022 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
6023 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6024 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
6025 IEM_MC_REF_LOCAL(puValue1, uValue1); \
6026 IEM_MC_REF_EFLAGS(pEFlags); \
6027 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
6028 \
6029 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6030 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6031 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6032 } IEM_MC_ELSE() { \
6033 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6034 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6035 } IEM_MC_ENDIF(); \
6036 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6037 IEM_MC_END() \
6038
6039/**
6040 * @opcode 0xa6
6041 */
6042FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
6043{
6044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6045
6046 /*
6047 * Use the C implementation if a repeat prefix is encountered.
6048 */
6049 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6050 {
6051 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
6052 switch (pVCpu->iem.s.enmEffAddrMode)
6053 {
6054 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6055 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6056 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6057 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6058 }
6059 }
6060 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6061 {
6062 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
6063 switch (pVCpu->iem.s.enmEffAddrMode)
6064 {
6065 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6066 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6067 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6068 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6069 }
6070 }
6071 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
6072
6073 /*
6074 * Sharing case implementation with cmps[wdq] below.
6075 */
6076 switch (pVCpu->iem.s.enmEffAddrMode)
6077 {
6078 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
6079 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
6080 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
6081 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6082 }
6083}
6084
6085
6086/**
6087 * @opcode 0xa7
6088 */
6089FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
6090{
6091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6092
6093 /*
6094 * Use the C implementation if a repeat prefix is encountered.
6095 */
6096 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6097 {
6098 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
6099 switch (pVCpu->iem.s.enmEffOpSize)
6100 {
6101 case IEMMODE_16BIT:
6102 switch (pVCpu->iem.s.enmEffAddrMode)
6103 {
6104 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
6105 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
6106 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
6107 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6108 }
6109 break;
6110 case IEMMODE_32BIT:
6111 switch (pVCpu->iem.s.enmEffAddrMode)
6112 {
6113 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
6114 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
6115 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
6116 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6117 }
6118 case IEMMODE_64BIT:
6119 switch (pVCpu->iem.s.enmEffAddrMode)
6120 {
6121 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
6122 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
6123 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
6124 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6125 }
6126 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6127 }
6128 }
6129
6130 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6131 {
6132 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
6133 switch (pVCpu->iem.s.enmEffOpSize)
6134 {
6135 case IEMMODE_16BIT:
6136 switch (pVCpu->iem.s.enmEffAddrMode)
6137 {
6138 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
6139 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
6140 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
6141 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6142 }
6143 break;
6144 case IEMMODE_32BIT:
6145 switch (pVCpu->iem.s.enmEffAddrMode)
6146 {
6147 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
6148 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
6149 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
6150 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6151 }
6152 case IEMMODE_64BIT:
6153 switch (pVCpu->iem.s.enmEffAddrMode)
6154 {
6155 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
6156 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
6157 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
6158 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6159 }
6160 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6161 }
6162 }
6163
6164 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
6165
6166 /*
6167 * Annoying double switch here.
6168 * Using ugly macro for implementing the cases, sharing it with cmpsb.
6169 */
6170 switch (pVCpu->iem.s.enmEffOpSize)
6171 {
6172 case IEMMODE_16BIT:
6173 switch (pVCpu->iem.s.enmEffAddrMode)
6174 {
6175 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
6176 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
6177 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
6178 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6179 }
6180 break;
6181
6182 case IEMMODE_32BIT:
6183 switch (pVCpu->iem.s.enmEffAddrMode)
6184 {
6185 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
6186 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
6187 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
6188 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6189 }
6190 break;
6191
6192 case IEMMODE_64BIT:
6193 switch (pVCpu->iem.s.enmEffAddrMode)
6194 {
6195 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6196 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
6197 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
6198 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6199 }
6200 break;
6201 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6202 }
6203}
6204
6205#undef IEM_CMPS_CASE
6206
6207/**
6208 * @opcode 0xa8
6209 */
6210FNIEMOP_DEF(iemOp_test_AL_Ib)
6211{
6212 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
6213 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6214 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_test_u8);
6215}
6216
6217
6218/**
6219 * @opcode 0xa9
6220 */
6221FNIEMOP_DEF(iemOp_test_eAX_Iz)
6222{
6223 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
6224 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6225 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, 0);
6226}
6227
6228
6229/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
6230#define IEM_STOS_CASE(ValBits, AddrBits) \
6231 IEM_MC_BEGIN(0, 2); \
6232 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6233 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6234 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
6235 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6236 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
6237 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6238 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6239 } IEM_MC_ELSE() { \
6240 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6241 } IEM_MC_ENDIF(); \
6242 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6243 IEM_MC_END() \
6244
6245/**
6246 * @opcode 0xaa
6247 */
6248FNIEMOP_DEF(iemOp_stosb_Yb_AL)
6249{
6250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6251
6252 /*
6253 * Use the C implementation if a repeat prefix is encountered.
6254 */
6255 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6256 {
6257 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
6258 switch (pVCpu->iem.s.enmEffAddrMode)
6259 {
6260 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP, iemCImpl_stos_al_m16);
6261 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP, iemCImpl_stos_al_m32);
6262 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP, iemCImpl_stos_al_m64);
6263 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6264 }
6265 }
6266 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
6267
6268 /*
6269 * Sharing case implementation with stos[wdq] below.
6270 */
6271 switch (pVCpu->iem.s.enmEffAddrMode)
6272 {
6273 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
6274 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
6275 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
6276 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6277 }
6278}
6279
6280
6281/**
6282 * @opcode 0xab
6283 */
6284FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
6285{
6286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6287
6288 /*
6289 * Use the C implementation if a repeat prefix is encountered.
6290 */
6291 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6292 {
6293 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
6294 switch (pVCpu->iem.s.enmEffOpSize)
6295 {
6296 case IEMMODE_16BIT:
6297 switch (pVCpu->iem.s.enmEffAddrMode)
6298 {
6299 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_ax_m16);
6300 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_ax_m32);
6301 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_ax_m64);
6302 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6303 }
6304 break;
6305 case IEMMODE_32BIT:
6306 switch (pVCpu->iem.s.enmEffAddrMode)
6307 {
6308 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_eax_m16);
6309 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_eax_m32);
6310 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_eax_m64);
6311 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6312 }
6313 case IEMMODE_64BIT:
6314 switch (pVCpu->iem.s.enmEffAddrMode)
6315 {
6316 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
6317 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_rax_m32);
6318 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP, iemCImpl_stos_rax_m64);
6319 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6320 }
6321 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6322 }
6323 }
6324 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
6325
6326 /*
6327 * Annoying double switch here.
6328 * Using ugly macro for implementing the cases, sharing it with stosb.
6329 */
6330 switch (pVCpu->iem.s.enmEffOpSize)
6331 {
6332 case IEMMODE_16BIT:
6333 switch (pVCpu->iem.s.enmEffAddrMode)
6334 {
6335 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
6336 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
6337 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
6338 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6339 }
6340 break;
6341
6342 case IEMMODE_32BIT:
6343 switch (pVCpu->iem.s.enmEffAddrMode)
6344 {
6345 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
6346 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
6347 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
6348 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6349 }
6350 break;
6351
6352 case IEMMODE_64BIT:
6353 switch (pVCpu->iem.s.enmEffAddrMode)
6354 {
6355 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6356 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
6357 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
6358 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6359 }
6360 break;
6361 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6362 }
6363}
6364
6365#undef IEM_STOS_CASE
6366
6367/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
6368#define IEM_LODS_CASE(ValBits, AddrBits) \
6369 IEM_MC_BEGIN(0, 2); \
6370 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6371 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6372 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6373 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
6374 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
6375 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6376 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6377 } IEM_MC_ELSE() { \
6378 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6379 } IEM_MC_ENDIF(); \
6380 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6381 IEM_MC_END() \
6382
6383/**
6384 * @opcode 0xac
6385 */
6386FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
6387{
6388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6389
6390 /*
6391 * Use the C implementation if a repeat prefix is encountered.
6392 */
6393 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6394 {
6395 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
6396 switch (pVCpu->iem.s.enmEffAddrMode)
6397 {
6398 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
6399 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
6400 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
6401 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6402 }
6403 }
6404 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
6405
6406 /*
6407 * Sharing case implementation with stos[wdq] below.
6408 */
6409 switch (pVCpu->iem.s.enmEffAddrMode)
6410 {
6411 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
6412 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
6413 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
6414 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6415 }
6416}
6417
6418
6419/**
6420 * @opcode 0xad
6421 */
6422FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
6423{
6424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6425
6426 /*
6427 * Use the C implementation if a repeat prefix is encountered.
6428 */
6429 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6430 {
6431 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
6432 switch (pVCpu->iem.s.enmEffOpSize)
6433 {
6434 case IEMMODE_16BIT:
6435 switch (pVCpu->iem.s.enmEffAddrMode)
6436 {
6437 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
6438 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
6439 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
6440 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6441 }
6442 break;
6443 case IEMMODE_32BIT:
6444 switch (pVCpu->iem.s.enmEffAddrMode)
6445 {
6446 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
6447 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
6448 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
6449 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6450 }
6451 case IEMMODE_64BIT:
6452 switch (pVCpu->iem.s.enmEffAddrMode)
6453 {
6454 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
6455 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
6456 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP, iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
6457 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6458 }
6459 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6460 }
6461 }
6462 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
6463
6464 /*
6465 * Annoying double switch here.
6466 * Using ugly macro for implementing the cases, sharing it with lodsb.
6467 */
6468 switch (pVCpu->iem.s.enmEffOpSize)
6469 {
6470 case IEMMODE_16BIT:
6471 switch (pVCpu->iem.s.enmEffAddrMode)
6472 {
6473 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
6474 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
6475 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
6476 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6477 }
6478 break;
6479
6480 case IEMMODE_32BIT:
6481 switch (pVCpu->iem.s.enmEffAddrMode)
6482 {
6483 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
6484 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
6485 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
6486 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6487 }
6488 break;
6489
6490 case IEMMODE_64BIT:
6491 switch (pVCpu->iem.s.enmEffAddrMode)
6492 {
6493 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6494 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
6495 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
6496 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6497 }
6498 break;
6499 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6500 }
6501}
6502
6503#undef IEM_LODS_CASE
6504
6505/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
6506#define IEM_SCAS_CASE(ValBits, AddrBits) \
6507 IEM_MC_BEGIN(3, 2); \
6508 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
6509 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
6510 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
6511 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6512 \
6513 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6514 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
6515 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
6516 IEM_MC_REF_EFLAGS(pEFlags); \
6517 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
6518 \
6519 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6520 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6521 } IEM_MC_ELSE() { \
6522 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6523 } IEM_MC_ENDIF(); \
6524 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6525 IEM_MC_END();
6526
6527/**
6528 * @opcode 0xae
6529 */
6530FNIEMOP_DEF(iemOp_scasb_AL_Xb)
6531{
6532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6533
6534 /*
6535 * Use the C implementation if a repeat prefix is encountered.
6536 */
6537 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6538 {
6539 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
6540 switch (pVCpu->iem.s.enmEffAddrMode)
6541 {
6542 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_al_m16);
6543 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_al_m32);
6544 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_al_m64);
6545 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6546 }
6547 }
6548 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6549 {
6550 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
6551 switch (pVCpu->iem.s.enmEffAddrMode)
6552 {
6553 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_al_m16);
6554 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_al_m32);
6555 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_al_m64);
6556 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6557 }
6558 }
6559 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
6560
6561 /*
6562 * Sharing case implementation with stos[wdq] below.
6563 */
6564 switch (pVCpu->iem.s.enmEffAddrMode)
6565 {
6566 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
6567 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
6568 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
6569 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6570 }
6571}
6572
6573
6574/**
6575 * @opcode 0xaf
6576 */
6577FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
6578{
6579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6580
6581 /*
6582 * Use the C implementation if a repeat prefix is encountered.
6583 */
6584 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6585 {
6586 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
6587 switch (pVCpu->iem.s.enmEffOpSize)
6588 {
6589 case IEMMODE_16BIT:
6590 switch (pVCpu->iem.s.enmEffAddrMode)
6591 {
6592 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_ax_m16);
6593 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_ax_m32);
6594 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_ax_m64);
6595 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6596 }
6597 break;
6598 case IEMMODE_32BIT:
6599 switch (pVCpu->iem.s.enmEffAddrMode)
6600 {
6601 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_eax_m16);
6602 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_eax_m32);
6603 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_eax_m64);
6604 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6605 }
6606 case IEMMODE_64BIT:
6607 switch (pVCpu->iem.s.enmEffAddrMode)
6608 {
6609 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
6610 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_rax_m32);
6611 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repe_scas_rax_m64);
6612 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6613 }
6614 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6615 }
6616 }
6617 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6618 {
6619 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
6620 switch (pVCpu->iem.s.enmEffOpSize)
6621 {
6622 case IEMMODE_16BIT:
6623 switch (pVCpu->iem.s.enmEffAddrMode)
6624 {
6625 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_ax_m16);
6626 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_ax_m32);
6627 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_ax_m64);
6628 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6629 }
6630 break;
6631 case IEMMODE_32BIT:
6632 switch (pVCpu->iem.s.enmEffAddrMode)
6633 {
6634 case IEMMODE_16BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_eax_m16);
6635 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_eax_m32);
6636 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_eax_m64);
6637 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6638 }
6639 case IEMMODE_64BIT:
6640 switch (pVCpu->iem.s.enmEffAddrMode)
6641 {
6642 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
6643 case IEMMODE_32BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_rax_m32);
6644 case IEMMODE_64BIT: IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_repne_scas_rax_m64);
6645 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6646 }
6647 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6648 }
6649 }
6650 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
6651
6652 /*
6653 * Annoying double switch here.
6654 * Using ugly macro for implementing the cases, sharing it with scasb.
6655 */
6656 switch (pVCpu->iem.s.enmEffOpSize)
6657 {
6658 case IEMMODE_16BIT:
6659 switch (pVCpu->iem.s.enmEffAddrMode)
6660 {
6661 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
6662 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
6663 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
6664 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6665 }
6666 break;
6667
6668 case IEMMODE_32BIT:
6669 switch (pVCpu->iem.s.enmEffAddrMode)
6670 {
6671 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
6672 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
6673 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
6674 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6675 }
6676 break;
6677
6678 case IEMMODE_64BIT:
6679 switch (pVCpu->iem.s.enmEffAddrMode)
6680 {
6681 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6682 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
6683 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
6684 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6685 }
6686 break;
6687 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6688 }
6689}
6690
6691#undef IEM_SCAS_CASE
6692
6693/**
6694 * Common 'mov r8, imm8' helper.
6695 */
6696FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
6697{
6698 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6700
6701 IEM_MC_BEGIN(0, 1);
6702 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
6703 IEM_MC_STORE_GREG_U8(iReg, u8Value);
6704 IEM_MC_ADVANCE_RIP_AND_FINISH();
6705 IEM_MC_END();
6706}
6707
6708
6709/**
6710 * @opcode 0xb0
6711 */
6712FNIEMOP_DEF(iemOp_mov_AL_Ib)
6713{
6714 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
6715 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
6716}
6717
6718
6719/**
6720 * @opcode 0xb1
6721 */
6722FNIEMOP_DEF(iemOp_CL_Ib)
6723{
6724 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
6725 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
6726}
6727
6728
6729/**
6730 * @opcode 0xb2
6731 */
6732FNIEMOP_DEF(iemOp_DL_Ib)
6733{
6734 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
6735 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
6736}
6737
6738
6739/**
6740 * @opcode 0xb3
6741 */
6742FNIEMOP_DEF(iemOp_BL_Ib)
6743{
6744 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
6745 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
6746}
6747
6748
6749/**
6750 * @opcode 0xb4
6751 */
6752FNIEMOP_DEF(iemOp_mov_AH_Ib)
6753{
6754 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
6755 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
6756}
6757
6758
6759/**
6760 * @opcode 0xb5
6761 */
6762FNIEMOP_DEF(iemOp_CH_Ib)
6763{
6764 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
6765 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
6766}
6767
6768
6769/**
6770 * @opcode 0xb6
6771 */
6772FNIEMOP_DEF(iemOp_DH_Ib)
6773{
6774 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
6775 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
6776}
6777
6778
6779/**
6780 * @opcode 0xb7
6781 */
6782FNIEMOP_DEF(iemOp_BH_Ib)
6783{
6784 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
6785 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
6786}
6787
6788
6789/**
6790 * Common 'mov regX,immX' helper.
6791 */
6792FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
6793{
6794 switch (pVCpu->iem.s.enmEffOpSize)
6795 {
6796 case IEMMODE_16BIT:
6797 {
6798 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6800
6801 IEM_MC_BEGIN(0, 1);
6802 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
6803 IEM_MC_STORE_GREG_U16(iReg, u16Value);
6804 IEM_MC_ADVANCE_RIP_AND_FINISH();
6805 IEM_MC_END();
6806 break;
6807 }
6808
6809 case IEMMODE_32BIT:
6810 {
6811 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6813
6814 IEM_MC_BEGIN(0, 1);
6815 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
6816 IEM_MC_STORE_GREG_U32(iReg, u32Value);
6817 IEM_MC_ADVANCE_RIP_AND_FINISH();
6818 IEM_MC_END();
6819 break;
6820 }
6821 case IEMMODE_64BIT:
6822 {
6823 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
6824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6825
6826 IEM_MC_BEGIN(0, 1);
6827 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
6828 IEM_MC_STORE_GREG_U64(iReg, u64Value);
6829 IEM_MC_ADVANCE_RIP_AND_FINISH();
6830 IEM_MC_END();
6831 break;
6832 }
6833 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6834 }
6835}
6836
6837
6838/**
6839 * @opcode 0xb8
6840 */
6841FNIEMOP_DEF(iemOp_eAX_Iv)
6842{
6843 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
6844 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
6845}
6846
6847
6848/**
6849 * @opcode 0xb9
6850 */
6851FNIEMOP_DEF(iemOp_eCX_Iv)
6852{
6853 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
6854 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
6855}
6856
6857
6858/**
6859 * @opcode 0xba
6860 */
6861FNIEMOP_DEF(iemOp_eDX_Iv)
6862{
6863 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
6864 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
6865}
6866
6867
6868/**
6869 * @opcode 0xbb
6870 */
6871FNIEMOP_DEF(iemOp_eBX_Iv)
6872{
6873 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
6874 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
6875}
6876
6877
6878/**
6879 * @opcode 0xbc
6880 */
6881FNIEMOP_DEF(iemOp_eSP_Iv)
6882{
6883 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
6884 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
6885}
6886
6887
6888/**
6889 * @opcode 0xbd
6890 */
6891FNIEMOP_DEF(iemOp_eBP_Iv)
6892{
6893 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
6894 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
6895}
6896
6897
6898/**
6899 * @opcode 0xbe
6900 */
6901FNIEMOP_DEF(iemOp_eSI_Iv)
6902{
6903 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
6904 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
6905}
6906
6907
6908/**
6909 * @opcode 0xbf
6910 */
6911FNIEMOP_DEF(iemOp_eDI_Iv)
6912{
6913 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
6914 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
6915}
6916
6917
6918/**
6919 * @opcode 0xc0
6920 */
6921FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
6922{
6923 IEMOP_HLP_MIN_186();
6924 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6925 PCIEMOPSHIFTSIZES pImpl;
6926 switch (IEM_GET_MODRM_REG_8(bRm))
6927 {
6928 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
6929 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
6930 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
6931 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
6932 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
6933 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
6934 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
6935 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
6936 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6937 }
6938 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6939
6940 if (IEM_IS_MODRM_REG_MODE(bRm))
6941 {
6942 /* register */
6943 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6945 IEM_MC_BEGIN(3, 0);
6946 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6947 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6948 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6949 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6950 IEM_MC_REF_EFLAGS(pEFlags);
6951 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6952 IEM_MC_ADVANCE_RIP_AND_FINISH();
6953 IEM_MC_END();
6954 }
6955 else
6956 {
6957 /* memory */
6958 IEM_MC_BEGIN(3, 2);
6959 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6960 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6961 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6962 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6963
6964 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6965 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6966 IEM_MC_ASSIGN(cShiftArg, cShift);
6967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6968 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6969 IEM_MC_FETCH_EFLAGS(EFlags);
6970 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6971
6972 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6973 IEM_MC_COMMIT_EFLAGS(EFlags);
6974 IEM_MC_ADVANCE_RIP_AND_FINISH();
6975 IEM_MC_END();
6976 }
6977}
6978
6979
6980/**
6981 * @opcode 0xc1
6982 */
6983FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
6984{
6985 IEMOP_HLP_MIN_186();
6986 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6987 PCIEMOPSHIFTSIZES pImpl;
6988 switch (IEM_GET_MODRM_REG_8(bRm))
6989 {
6990 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
6991 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
6992 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
6993 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
6994 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
6995 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
6996 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
6997 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
6998 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6999 }
7000 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7001
7002 if (IEM_IS_MODRM_REG_MODE(bRm))
7003 {
7004 /* register */
7005 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7007 switch (pVCpu->iem.s.enmEffOpSize)
7008 {
7009 case IEMMODE_16BIT:
7010 IEM_MC_BEGIN(3, 0);
7011 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7012 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7013 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7014 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7015 IEM_MC_REF_EFLAGS(pEFlags);
7016 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7017 IEM_MC_ADVANCE_RIP_AND_FINISH();
7018 IEM_MC_END();
7019 break;
7020
7021 case IEMMODE_32BIT:
7022 IEM_MC_BEGIN(3, 0);
7023 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7024 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7025 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7026 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7027 IEM_MC_REF_EFLAGS(pEFlags);
7028 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7029 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7030 IEM_MC_ADVANCE_RIP_AND_FINISH();
7031 IEM_MC_END();
7032 break;
7033
7034 case IEMMODE_64BIT:
7035 IEM_MC_BEGIN(3, 0);
7036 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7037 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7038 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7039 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7040 IEM_MC_REF_EFLAGS(pEFlags);
7041 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7042 IEM_MC_ADVANCE_RIP_AND_FINISH();
7043 IEM_MC_END();
7044 break;
7045
7046 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7047 }
7048 }
7049 else
7050 {
7051 /* memory */
7052 switch (pVCpu->iem.s.enmEffOpSize)
7053 {
7054 case IEMMODE_16BIT:
7055 IEM_MC_BEGIN(3, 2);
7056 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7057 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7058 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7059 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7060
7061 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7062 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7063 IEM_MC_ASSIGN(cShiftArg, cShift);
7064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7065 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7066 IEM_MC_FETCH_EFLAGS(EFlags);
7067 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7068
7069 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7070 IEM_MC_COMMIT_EFLAGS(EFlags);
7071 IEM_MC_ADVANCE_RIP_AND_FINISH();
7072 IEM_MC_END();
7073 break;
7074
7075 case IEMMODE_32BIT:
7076 IEM_MC_BEGIN(3, 2);
7077 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7078 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7079 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7080 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7081
7082 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7083 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7084 IEM_MC_ASSIGN(cShiftArg, cShift);
7085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7086 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7087 IEM_MC_FETCH_EFLAGS(EFlags);
7088 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7089
7090 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7091 IEM_MC_COMMIT_EFLAGS(EFlags);
7092 IEM_MC_ADVANCE_RIP_AND_FINISH();
7093 IEM_MC_END();
7094 break;
7095
7096 case IEMMODE_64BIT:
7097 IEM_MC_BEGIN(3, 2);
7098 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7099 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7100 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7102
7103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7104 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7105 IEM_MC_ASSIGN(cShiftArg, cShift);
7106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7107 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7108 IEM_MC_FETCH_EFLAGS(EFlags);
7109 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7110
7111 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7112 IEM_MC_COMMIT_EFLAGS(EFlags);
7113 IEM_MC_ADVANCE_RIP_AND_FINISH();
7114 IEM_MC_END();
7115 break;
7116
7117 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7118 }
7119 }
7120}
7121
7122
7123/**
7124 * @opcode 0xc2
7125 */
7126FNIEMOP_DEF(iemOp_retn_Iw)
7127{
7128 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
7129 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7130 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7131 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7132 switch (pVCpu->iem.s.enmEffOpSize)
7133 {
7134 case IEMMODE_16BIT:
7135 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH, iemCImpl_retn_iw_16, u16Imm);
7136 case IEMMODE_32BIT:
7137 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH, iemCImpl_retn_iw_32, u16Imm);
7138 case IEMMODE_64BIT:
7139 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH, iemCImpl_retn_iw_64, u16Imm);
7140 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7141 }
7142}
7143
7144
7145/**
7146 * @opcode 0xc3
7147 */
7148FNIEMOP_DEF(iemOp_retn)
7149{
7150 IEMOP_MNEMONIC(retn, "retn");
7151 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7153 switch (pVCpu->iem.s.enmEffOpSize)
7154 {
7155 case IEMMODE_16BIT:
7156 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH, iemCImpl_retn_16);
7157 case IEMMODE_32BIT:
7158 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH, iemCImpl_retn_32);
7159 case IEMMODE_64BIT:
7160 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH, iemCImpl_retn_64);
7161 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7162 }
7163}
7164
7165
7166/**
7167 * @opcode 0xc4
7168 */
7169FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
7170{
7171 /* The LDS instruction is invalid 64-bit mode. In legacy and
7172 compatability mode it is invalid with MOD=3.
7173 The use as a VEX prefix is made possible by assigning the inverted
7174 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
7175 outside of 64-bit mode. VEX is not available in real or v86 mode. */
7176 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7177 if ( IEM_IS_64BIT_CODE(pVCpu)
7178 || IEM_IS_MODRM_REG_MODE(bRm) )
7179 {
7180 IEMOP_MNEMONIC(vex3_prefix, "vex3");
7181 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
7182 {
7183 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
7184 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
7185 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
7186 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
7187 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
7188 if ((bVex2 & 0x80 /* VEX.W */) && IEM_IS_64BIT_CODE(pVCpu))
7189 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
7190 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
7191 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
7192 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
7193 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
7194 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
7195 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
7196
7197 switch (bRm & 0x1f)
7198 {
7199 case 1: /* 0x0f lead opcode byte. */
7200#ifdef IEM_WITH_VEX
7201 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7202#else
7203 IEMOP_BITCH_ABOUT_STUB();
7204 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7205#endif
7206
7207 case 2: /* 0x0f 0x38 lead opcode bytes. */
7208#ifdef IEM_WITH_VEX
7209 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7210#else
7211 IEMOP_BITCH_ABOUT_STUB();
7212 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7213#endif
7214
7215 case 3: /* 0x0f 0x3a lead opcode bytes. */
7216#ifdef IEM_WITH_VEX
7217 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7218#else
7219 IEMOP_BITCH_ABOUT_STUB();
7220 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7221#endif
7222
7223 default:
7224 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
7225 IEMOP_RAISE_INVALID_OPCODE_RET();
7226 }
7227 }
7228 Log(("VEX3: VEX support disabled!\n"));
7229 IEMOP_RAISE_INVALID_OPCODE_RET();
7230 }
7231
7232 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
7233 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
7234}
7235
7236
7237/**
7238 * @opcode 0xc5
7239 */
7240FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
7241{
7242 /* The LES instruction is invalid 64-bit mode. In legacy and
7243 compatability mode it is invalid with MOD=3.
7244 The use as a VEX prefix is made possible by assigning the inverted
7245 REX.R to the top MOD bit, and the top bit in the inverted register
7246 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
7247 to accessing registers 0..7 in this VEX form. */
7248 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7249 if ( IEM_IS_64BIT_CODE(pVCpu)
7250 || IEM_IS_MODRM_REG_MODE(bRm))
7251 {
7252 IEMOP_MNEMONIC(vex2_prefix, "vex2");
7253 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
7254 {
7255 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
7256 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
7257 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
7258 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
7259 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
7260 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
7261 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
7262 pVCpu->iem.s.idxPrefix = bRm & 0x3;
7263
7264#ifdef IEM_WITH_VEX
7265 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7266#else
7267 IEMOP_BITCH_ABOUT_STUB();
7268 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7269#endif
7270 }
7271
7272 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
7273 Log(("VEX2: VEX support disabled!\n"));
7274 IEMOP_RAISE_INVALID_OPCODE_RET();
7275 }
7276
7277 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
7278 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
7279}
7280
7281
7282/**
7283 * @opcode 0xc6
7284 */
7285FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
7286{
7287 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7288 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
7289 IEMOP_RAISE_INVALID_OPCODE_RET();
7290 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
7291
7292 if (IEM_IS_MODRM_REG_MODE(bRm))
7293 {
7294 /* register access */
7295 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7297 IEM_MC_BEGIN(0, 0);
7298 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Imm);
7299 IEM_MC_ADVANCE_RIP_AND_FINISH();
7300 IEM_MC_END();
7301 }
7302 else
7303 {
7304 /* memory access. */
7305 IEM_MC_BEGIN(0, 1);
7306 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7307 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7308 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7310 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
7311 IEM_MC_ADVANCE_RIP_AND_FINISH();
7312 IEM_MC_END();
7313 }
7314}
7315
7316
7317/**
7318 * @opcode 0xc7
7319 */
7320FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
7321{
7322 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7323 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
7324 IEMOP_RAISE_INVALID_OPCODE_RET();
7325 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
7326
7327 if (IEM_IS_MODRM_REG_MODE(bRm))
7328 {
7329 /* register access */
7330 switch (pVCpu->iem.s.enmEffOpSize)
7331 {
7332 case IEMMODE_16BIT:
7333 IEM_MC_BEGIN(0, 0);
7334 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7336 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Imm);
7337 IEM_MC_ADVANCE_RIP_AND_FINISH();
7338 IEM_MC_END();
7339 break;
7340
7341 case IEMMODE_32BIT:
7342 IEM_MC_BEGIN(0, 0);
7343 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
7344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7345 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Imm);
7346 IEM_MC_ADVANCE_RIP_AND_FINISH();
7347 IEM_MC_END();
7348 break;
7349
7350 case IEMMODE_64BIT:
7351 IEM_MC_BEGIN(0, 0);
7352 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
7353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7354 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Imm);
7355 IEM_MC_ADVANCE_RIP_AND_FINISH();
7356 IEM_MC_END();
7357 break;
7358
7359 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7360 }
7361 }
7362 else
7363 {
7364 /* memory access. */
7365 switch (pVCpu->iem.s.enmEffOpSize)
7366 {
7367 case IEMMODE_16BIT:
7368 IEM_MC_BEGIN(0, 1);
7369 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7370 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
7371 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7373 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
7374 IEM_MC_ADVANCE_RIP_AND_FINISH();
7375 IEM_MC_END();
7376 break;
7377
7378 case IEMMODE_32BIT:
7379 IEM_MC_BEGIN(0, 1);
7380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
7382 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
7383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7384 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
7385 IEM_MC_ADVANCE_RIP_AND_FINISH();
7386 IEM_MC_END();
7387 break;
7388
7389 case IEMMODE_64BIT:
7390 IEM_MC_BEGIN(0, 1);
7391 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7392 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
7393 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
7394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7395 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
7396 IEM_MC_ADVANCE_RIP_AND_FINISH();
7397 IEM_MC_END();
7398 break;
7399
7400 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7401 }
7402 }
7403}
7404
7405
7406
7407
7408/**
7409 * @opcode 0xc8
7410 */
7411FNIEMOP_DEF(iemOp_enter_Iw_Ib)
7412{
7413 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
7414 IEMOP_HLP_MIN_186();
7415 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7416 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
7417 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
7418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7419 IEM_MC_DEFER_TO_CIMPL_3_RET(0, iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
7420}
7421
7422
7423/**
7424 * @opcode 0xc9
7425 */
7426FNIEMOP_DEF(iemOp_leave)
7427{
7428 IEMOP_MNEMONIC(leave, "leave");
7429 IEMOP_HLP_MIN_186();
7430 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7432 IEM_MC_DEFER_TO_CIMPL_1_RET(0, iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
7433}
7434
7435
7436/**
7437 * @opcode 0xca
7438 */
7439FNIEMOP_DEF(iemOp_retf_Iw)
7440{
7441 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
7442 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7444 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH, iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
7445}
7446
7447
7448/**
7449 * @opcode 0xcb
7450 */
7451FNIEMOP_DEF(iemOp_retf)
7452{
7453 IEMOP_MNEMONIC(retf, "retf");
7454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7455 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH, iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
7456}
7457
7458
7459/**
7460 * @opcode 0xcc
7461 */
7462FNIEMOP_DEF(iemOp_int3)
7463{
7464 IEMOP_MNEMONIC(int3, "int3");
7465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7466 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
7467 iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
7468}
7469
7470
7471/**
7472 * @opcode 0xcd
7473 */
7474FNIEMOP_DEF(iemOp_int_Ib)
7475{
7476 IEMOP_MNEMONIC(int_Ib, "int Ib");
7477 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
7478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7479 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
7480 iemCImpl_int, u8Int, IEMINT_INTN);
7481}
7482
7483
7484/**
7485 * @opcode 0xce
7486 */
7487FNIEMOP_DEF(iemOp_into)
7488{
7489 IEMOP_MNEMONIC(into, "into");
7490 IEMOP_HLP_NO_64BIT();
7491 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
7492 iemCImpl_int, X86_XCPT_OF, IEMINT_INTO);
7493}
7494
7495
7496/**
7497 * @opcode 0xcf
7498 */
7499FNIEMOP_DEF(iemOp_iret)
7500{
7501 IEMOP_MNEMONIC(iret, "iret");
7502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7503 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
7504 iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
7505}
7506
7507
7508/**
7509 * @opcode 0xd0
7510 */
7511FNIEMOP_DEF(iemOp_Grp2_Eb_1)
7512{
7513 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7514 PCIEMOPSHIFTSIZES pImpl;
7515 switch (IEM_GET_MODRM_REG_8(bRm))
7516 {
7517 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
7518 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
7519 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
7520 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
7521 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
7522 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
7523 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
7524 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7525 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
7526 }
7527 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7528
7529 if (IEM_IS_MODRM_REG_MODE(bRm))
7530 {
7531 /* register */
7532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7533 IEM_MC_BEGIN(3, 0);
7534 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7535 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
7536 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7537 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7538 IEM_MC_REF_EFLAGS(pEFlags);
7539 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7540 IEM_MC_ADVANCE_RIP_AND_FINISH();
7541 IEM_MC_END();
7542 }
7543 else
7544 {
7545 /* memory */
7546 IEM_MC_BEGIN(3, 2);
7547 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7548 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
7549 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7550 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7551
7552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7554 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7555 IEM_MC_FETCH_EFLAGS(EFlags);
7556 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7557
7558 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7559 IEM_MC_COMMIT_EFLAGS(EFlags);
7560 IEM_MC_ADVANCE_RIP_AND_FINISH();
7561 IEM_MC_END();
7562 }
7563}
7564
7565
7566
7567/**
7568 * @opcode 0xd1
7569 */
7570FNIEMOP_DEF(iemOp_Grp2_Ev_1)
7571{
7572 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7573 PCIEMOPSHIFTSIZES pImpl;
7574 switch (IEM_GET_MODRM_REG_8(bRm))
7575 {
7576 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
7577 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
7578 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
7579 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
7580 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
7581 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
7582 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
7583 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7584 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
7585 }
7586 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7587
7588 if (IEM_IS_MODRM_REG_MODE(bRm))
7589 {
7590 /* register */
7591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7592 switch (pVCpu->iem.s.enmEffOpSize)
7593 {
7594 case IEMMODE_16BIT:
7595 IEM_MC_BEGIN(3, 0);
7596 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7597 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7598 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7599 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7600 IEM_MC_REF_EFLAGS(pEFlags);
7601 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7602 IEM_MC_ADVANCE_RIP_AND_FINISH();
7603 IEM_MC_END();
7604 break;
7605
7606 case IEMMODE_32BIT:
7607 IEM_MC_BEGIN(3, 0);
7608 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7609 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7610 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7611 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7612 IEM_MC_REF_EFLAGS(pEFlags);
7613 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7614 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7615 IEM_MC_ADVANCE_RIP_AND_FINISH();
7616 IEM_MC_END();
7617 break;
7618
7619 case IEMMODE_64BIT:
7620 IEM_MC_BEGIN(3, 0);
7621 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7622 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7623 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7624 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7625 IEM_MC_REF_EFLAGS(pEFlags);
7626 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7627 IEM_MC_ADVANCE_RIP_AND_FINISH();
7628 IEM_MC_END();
7629 break;
7630
7631 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7632 }
7633 }
7634 else
7635 {
7636 /* memory */
7637 switch (pVCpu->iem.s.enmEffOpSize)
7638 {
7639 case IEMMODE_16BIT:
7640 IEM_MC_BEGIN(3, 2);
7641 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7642 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7643 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7644 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7645
7646 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7648 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7649 IEM_MC_FETCH_EFLAGS(EFlags);
7650 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7651
7652 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7653 IEM_MC_COMMIT_EFLAGS(EFlags);
7654 IEM_MC_ADVANCE_RIP_AND_FINISH();
7655 IEM_MC_END();
7656 break;
7657
7658 case IEMMODE_32BIT:
7659 IEM_MC_BEGIN(3, 2);
7660 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7661 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7662 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7664
7665 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7667 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7668 IEM_MC_FETCH_EFLAGS(EFlags);
7669 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7670
7671 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7672 IEM_MC_COMMIT_EFLAGS(EFlags);
7673 IEM_MC_ADVANCE_RIP_AND_FINISH();
7674 IEM_MC_END();
7675 break;
7676
7677 case IEMMODE_64BIT:
7678 IEM_MC_BEGIN(3, 2);
7679 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7680 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7681 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7682 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7683
7684 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7686 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7687 IEM_MC_FETCH_EFLAGS(EFlags);
7688 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7689
7690 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7691 IEM_MC_COMMIT_EFLAGS(EFlags);
7692 IEM_MC_ADVANCE_RIP_AND_FINISH();
7693 IEM_MC_END();
7694 break;
7695
7696 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7697 }
7698 }
7699}
7700
7701
7702/**
7703 * @opcode 0xd2
7704 */
7705FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
7706{
7707 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7708 PCIEMOPSHIFTSIZES pImpl;
7709 switch (IEM_GET_MODRM_REG_8(bRm))
7710 {
7711 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
7712 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
7713 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
7714 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
7715 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
7716 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
7717 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
7718 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7719 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
7720 }
7721 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7722
7723 if (IEM_IS_MODRM_REG_MODE(bRm))
7724 {
7725 /* register */
7726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7727 IEM_MC_BEGIN(3, 0);
7728 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7729 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7730 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7731 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7732 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7733 IEM_MC_REF_EFLAGS(pEFlags);
7734 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7735 IEM_MC_ADVANCE_RIP_AND_FINISH();
7736 IEM_MC_END();
7737 }
7738 else
7739 {
7740 /* memory */
7741 IEM_MC_BEGIN(3, 2);
7742 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7743 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7744 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7745 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7746
7747 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7748 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7749 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7750 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7751 IEM_MC_FETCH_EFLAGS(EFlags);
7752 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7753
7754 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7755 IEM_MC_COMMIT_EFLAGS(EFlags);
7756 IEM_MC_ADVANCE_RIP_AND_FINISH();
7757 IEM_MC_END();
7758 }
7759}
7760
7761
7762/**
7763 * @opcode 0xd3
7764 */
7765FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
7766{
7767 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7768 PCIEMOPSHIFTSIZES pImpl;
7769 switch (IEM_GET_MODRM_REG_8(bRm))
7770 {
7771 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
7772 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
7773 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
7774 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
7775 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
7776 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
7777 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
7778 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
7779 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
7780 }
7781 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7782
7783 if (IEM_IS_MODRM_REG_MODE(bRm))
7784 {
7785 /* register */
7786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7787 switch (pVCpu->iem.s.enmEffOpSize)
7788 {
7789 case IEMMODE_16BIT:
7790 IEM_MC_BEGIN(3, 0);
7791 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7792 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7793 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7794 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7795 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7796 IEM_MC_REF_EFLAGS(pEFlags);
7797 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7798 IEM_MC_ADVANCE_RIP_AND_FINISH();
7799 IEM_MC_END();
7800 break;
7801
7802 case IEMMODE_32BIT:
7803 IEM_MC_BEGIN(3, 0);
7804 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7805 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7806 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7807 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7808 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7809 IEM_MC_REF_EFLAGS(pEFlags);
7810 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7811 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7812 IEM_MC_ADVANCE_RIP_AND_FINISH();
7813 IEM_MC_END();
7814 break;
7815
7816 case IEMMODE_64BIT:
7817 IEM_MC_BEGIN(3, 0);
7818 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7819 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7820 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7821 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7822 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7823 IEM_MC_REF_EFLAGS(pEFlags);
7824 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7825 IEM_MC_ADVANCE_RIP_AND_FINISH();
7826 IEM_MC_END();
7827 break;
7828
7829 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7830 }
7831 }
7832 else
7833 {
7834 /* memory */
7835 switch (pVCpu->iem.s.enmEffOpSize)
7836 {
7837 case IEMMODE_16BIT:
7838 IEM_MC_BEGIN(3, 2);
7839 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7840 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7841 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7842 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7843
7844 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7846 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7847 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7848 IEM_MC_FETCH_EFLAGS(EFlags);
7849 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7850
7851 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7852 IEM_MC_COMMIT_EFLAGS(EFlags);
7853 IEM_MC_ADVANCE_RIP_AND_FINISH();
7854 IEM_MC_END();
7855 break;
7856
7857 case IEMMODE_32BIT:
7858 IEM_MC_BEGIN(3, 2);
7859 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7860 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7861 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7862 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7863
7864 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7866 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7867 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7868 IEM_MC_FETCH_EFLAGS(EFlags);
7869 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7870
7871 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7872 IEM_MC_COMMIT_EFLAGS(EFlags);
7873 IEM_MC_ADVANCE_RIP_AND_FINISH();
7874 IEM_MC_END();
7875 break;
7876
7877 case IEMMODE_64BIT:
7878 IEM_MC_BEGIN(3, 2);
7879 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7880 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7881 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7882 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7883
7884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7886 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7887 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7888 IEM_MC_FETCH_EFLAGS(EFlags);
7889 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7890
7891 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7892 IEM_MC_COMMIT_EFLAGS(EFlags);
7893 IEM_MC_ADVANCE_RIP_AND_FINISH();
7894 IEM_MC_END();
7895 break;
7896
7897 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7898 }
7899 }
7900}
7901
7902/**
7903 * @opcode 0xd4
7904 */
7905FNIEMOP_DEF(iemOp_aam_Ib)
7906{
7907 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
7908 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
7909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7910 IEMOP_HLP_NO_64BIT();
7911 if (!bImm)
7912 IEMOP_RAISE_DIVIDE_ERROR_RET();
7913 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aam, bImm);
7914}
7915
7916
7917/**
7918 * @opcode 0xd5
7919 */
7920FNIEMOP_DEF(iemOp_aad_Ib)
7921{
7922 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
7923 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
7924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7925 IEMOP_HLP_NO_64BIT();
7926 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, iemCImpl_aad, bImm);
7927}
7928
7929
7930/**
7931 * @opcode 0xd6
7932 */
7933FNIEMOP_DEF(iemOp_salc)
7934{
7935 IEMOP_MNEMONIC(salc, "salc");
7936 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7937 IEMOP_HLP_NO_64BIT();
7938
7939 IEM_MC_BEGIN(0, 0);
7940 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7941 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
7942 } IEM_MC_ELSE() {
7943 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
7944 } IEM_MC_ENDIF();
7945 IEM_MC_ADVANCE_RIP_AND_FINISH();
7946 IEM_MC_END();
7947}
7948
7949
7950/**
7951 * @opcode 0xd7
7952 */
7953FNIEMOP_DEF(iemOp_xlat)
7954{
7955 IEMOP_MNEMONIC(xlat, "xlat");
7956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7957 switch (pVCpu->iem.s.enmEffAddrMode)
7958 {
7959 case IEMMODE_16BIT:
7960 IEM_MC_BEGIN(2, 0);
7961 IEM_MC_LOCAL(uint8_t, u8Tmp);
7962 IEM_MC_LOCAL(uint16_t, u16Addr);
7963 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
7964 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
7965 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
7966 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7967 IEM_MC_ADVANCE_RIP_AND_FINISH();
7968 IEM_MC_END();
7969 break;
7970
7971 case IEMMODE_32BIT:
7972 IEM_MC_BEGIN(2, 0);
7973 IEM_MC_LOCAL(uint8_t, u8Tmp);
7974 IEM_MC_LOCAL(uint32_t, u32Addr);
7975 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
7976 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
7977 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
7978 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7979 IEM_MC_ADVANCE_RIP_AND_FINISH();
7980 IEM_MC_END();
7981 break;
7982
7983 case IEMMODE_64BIT:
7984 IEM_MC_BEGIN(2, 0);
7985 IEM_MC_LOCAL(uint8_t, u8Tmp);
7986 IEM_MC_LOCAL(uint64_t, u64Addr);
7987 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
7988 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
7989 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
7990 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7991 IEM_MC_ADVANCE_RIP_AND_FINISH();
7992 IEM_MC_END();
7993 break;
7994
7995 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7996 }
7997}
7998
7999
8000/**
8001 * Common worker for FPU instructions working on ST0 and STn, and storing the
8002 * result in ST0.
8003 *
8004 * @param bRm Mod R/M byte.
8005 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8006 */
8007FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
8008{
8009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8010
8011 IEM_MC_BEGIN(3, 1);
8012 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8013 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8014 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8015 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8016
8017 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8018 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8019 IEM_MC_PREPARE_FPU_USAGE();
8020 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8021 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
8022 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
8023 } IEM_MC_ELSE() {
8024 IEM_MC_FPU_STACK_UNDERFLOW(0);
8025 } IEM_MC_ENDIF();
8026 IEM_MC_ADVANCE_RIP_AND_FINISH();
8027
8028 IEM_MC_END();
8029}
8030
8031
8032/**
8033 * Common worker for FPU instructions working on ST0 and STn, and only affecting
8034 * flags.
8035 *
8036 * @param bRm Mod R/M byte.
8037 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8038 */
8039FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8040{
8041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8042
8043 IEM_MC_BEGIN(3, 1);
8044 IEM_MC_LOCAL(uint16_t, u16Fsw);
8045 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8046 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8047 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8048
8049 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8050 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8051 IEM_MC_PREPARE_FPU_USAGE();
8052 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8053 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8054 IEM_MC_UPDATE_FSW(u16Fsw);
8055 } IEM_MC_ELSE() {
8056 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
8057 } IEM_MC_ENDIF();
8058 IEM_MC_ADVANCE_RIP_AND_FINISH();
8059
8060 IEM_MC_END();
8061}
8062
8063
8064/**
8065 * Common worker for FPU instructions working on ST0 and STn, only affecting
8066 * flags, and popping when done.
8067 *
8068 * @param bRm Mod R/M byte.
8069 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8070 */
8071FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8072{
8073 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8074
8075 IEM_MC_BEGIN(3, 1);
8076 IEM_MC_LOCAL(uint16_t, u16Fsw);
8077 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8078 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8079 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8080
8081 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8082 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8083 IEM_MC_PREPARE_FPU_USAGE();
8084 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8085 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8086 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
8087 } IEM_MC_ELSE() {
8088 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
8089 } IEM_MC_ENDIF();
8090 IEM_MC_ADVANCE_RIP_AND_FINISH();
8091
8092 IEM_MC_END();
8093}
8094
8095
8096/** Opcode 0xd8 11/0. */
8097FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
8098{
8099 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
8100 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
8101}
8102
8103
8104/** Opcode 0xd8 11/1. */
8105FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
8106{
8107 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
8108 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
8109}
8110
8111
8112/** Opcode 0xd8 11/2. */
8113FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
8114{
8115 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
8116 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
8117}
8118
8119
8120/** Opcode 0xd8 11/3. */
8121FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
8122{
8123 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
8124 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
8125}
8126
8127
8128/** Opcode 0xd8 11/4. */
8129FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
8130{
8131 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
8132 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
8133}
8134
8135
8136/** Opcode 0xd8 11/5. */
8137FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
8138{
8139 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
8140 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
8141}
8142
8143
8144/** Opcode 0xd8 11/6. */
8145FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
8146{
8147 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
8148 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
8149}
8150
8151
8152/** Opcode 0xd8 11/7. */
8153FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
8154{
8155 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
8156 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
8157}
8158
8159
8160/**
8161 * Common worker for FPU instructions working on ST0 and an m32r, and storing
8162 * the result in ST0.
8163 *
8164 * @param bRm Mod R/M byte.
8165 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8166 */
8167FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
8168{
8169 IEM_MC_BEGIN(3, 3);
8170 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8171 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8172 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8173 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8174 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8175 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8176
8177 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8179
8180 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8181 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8182 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8183
8184 IEM_MC_PREPARE_FPU_USAGE();
8185 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8186 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
8187 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
8188 } IEM_MC_ELSE() {
8189 IEM_MC_FPU_STACK_UNDERFLOW(0);
8190 } IEM_MC_ENDIF();
8191 IEM_MC_ADVANCE_RIP_AND_FINISH();
8192
8193 IEM_MC_END();
8194}
8195
8196
8197/** Opcode 0xd8 !11/0. */
8198FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
8199{
8200 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
8201 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
8202}
8203
8204
8205/** Opcode 0xd8 !11/1. */
8206FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
8207{
8208 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
8209 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
8210}
8211
8212
8213/** Opcode 0xd8 !11/2. */
8214FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
8215{
8216 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
8217
8218 IEM_MC_BEGIN(3, 3);
8219 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8220 IEM_MC_LOCAL(uint16_t, u16Fsw);
8221 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8222 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8223 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8224 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8225
8226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8228
8229 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8230 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8231 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8232
8233 IEM_MC_PREPARE_FPU_USAGE();
8234 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8235 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
8236 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8237 } IEM_MC_ELSE() {
8238 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8239 } IEM_MC_ENDIF();
8240 IEM_MC_ADVANCE_RIP_AND_FINISH();
8241
8242 IEM_MC_END();
8243}
8244
8245
8246/** Opcode 0xd8 !11/3. */
8247FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
8248{
8249 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
8250
8251 IEM_MC_BEGIN(3, 3);
8252 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8253 IEM_MC_LOCAL(uint16_t, u16Fsw);
8254 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8255 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8256 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8257 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8258
8259 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8261
8262 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8263 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8264 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8265
8266 IEM_MC_PREPARE_FPU_USAGE();
8267 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8268 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
8269 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8270 } IEM_MC_ELSE() {
8271 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8272 } IEM_MC_ENDIF();
8273 IEM_MC_ADVANCE_RIP_AND_FINISH();
8274
8275 IEM_MC_END();
8276}
8277
8278
8279/** Opcode 0xd8 !11/4. */
8280FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
8281{
8282 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
8283 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
8284}
8285
8286
8287/** Opcode 0xd8 !11/5. */
8288FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
8289{
8290 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
8291 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
8292}
8293
8294
8295/** Opcode 0xd8 !11/6. */
8296FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
8297{
8298 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
8299 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
8300}
8301
8302
8303/** Opcode 0xd8 !11/7. */
8304FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
8305{
8306 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
8307 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
8308}
8309
8310
8311/**
8312 * @opcode 0xd8
8313 */
8314FNIEMOP_DEF(iemOp_EscF0)
8315{
8316 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8317 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
8318
8319 if (IEM_IS_MODRM_REG_MODE(bRm))
8320 {
8321 switch (IEM_GET_MODRM_REG_8(bRm))
8322 {
8323 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
8324 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
8325 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
8326 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
8327 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
8328 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
8329 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
8330 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
8331 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8332 }
8333 }
8334 else
8335 {
8336 switch (IEM_GET_MODRM_REG_8(bRm))
8337 {
8338 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
8339 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
8340 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
8341 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
8342 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
8343 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
8344 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
8345 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
8346 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8347 }
8348 }
8349}
8350
8351
8352/** Opcode 0xd9 /0 mem32real
8353 * @sa iemOp_fld_m64r */
8354FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
8355{
8356 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
8357
8358 IEM_MC_BEGIN(2, 3);
8359 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8360 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8361 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
8362 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8363 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
8364
8365 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8367
8368 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8369 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8370 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8371
8372 IEM_MC_PREPARE_FPU_USAGE();
8373 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
8374 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r32, pFpuRes, pr32Val);
8375 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8376 } IEM_MC_ELSE() {
8377 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8378 } IEM_MC_ENDIF();
8379 IEM_MC_ADVANCE_RIP_AND_FINISH();
8380
8381 IEM_MC_END();
8382}
8383
8384
8385/** Opcode 0xd9 !11/2 mem32real */
8386FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
8387{
8388 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
8389 IEM_MC_BEGIN(3, 2);
8390 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8391 IEM_MC_LOCAL(uint16_t, u16Fsw);
8392 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8393 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
8394 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8395
8396 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8398 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8399 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8400
8401 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8402 IEM_MC_PREPARE_FPU_USAGE();
8403 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8404 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
8405 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8406 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8407 } IEM_MC_ELSE() {
8408 IEM_MC_IF_FCW_IM() {
8409 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
8410 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
8411 } IEM_MC_ENDIF();
8412 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8413 } IEM_MC_ENDIF();
8414 IEM_MC_ADVANCE_RIP_AND_FINISH();
8415
8416 IEM_MC_END();
8417}
8418
8419
8420/** Opcode 0xd9 !11/3 */
8421FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
8422{
8423 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
8424 IEM_MC_BEGIN(3, 2);
8425 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8426 IEM_MC_LOCAL(uint16_t, u16Fsw);
8427 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8428 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
8429 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8430
8431 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8433 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8434 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8435
8436 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8437 IEM_MC_PREPARE_FPU_USAGE();
8438 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8439 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
8440 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8441 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8442 } IEM_MC_ELSE() {
8443 IEM_MC_IF_FCW_IM() {
8444 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
8445 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
8446 } IEM_MC_ENDIF();
8447 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8448 } IEM_MC_ENDIF();
8449 IEM_MC_ADVANCE_RIP_AND_FINISH();
8450
8451 IEM_MC_END();
8452}
8453
8454
8455/** Opcode 0xd9 !11/4 */
8456FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
8457{
8458 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
8459 IEM_MC_BEGIN(3, 0);
8460 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
8461 IEM_MC_ARG(uint8_t, iEffSeg, 1);
8462 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
8463 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8465 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8466 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8467 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8468 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
8469 IEM_MC_END();
8470}
8471
8472
8473/** Opcode 0xd9 !11/5 */
8474FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
8475{
8476 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
8477 IEM_MC_BEGIN(1, 1);
8478 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8479 IEM_MC_ARG(uint16_t, u16Fsw, 0);
8480 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8482 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8483 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8484 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8485 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_FPU, iemCImpl_fldcw, u16Fsw);
8486 IEM_MC_END();
8487}
8488
8489
8490/** Opcode 0xd9 !11/6 */
8491FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
8492{
8493 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
8494 IEM_MC_BEGIN(3, 0);
8495 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
8496 IEM_MC_ARG(uint8_t, iEffSeg, 1);
8497 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
8498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8500 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8501 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8502 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8503 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
8504 IEM_MC_END();
8505}
8506
8507
8508/** Opcode 0xd9 !11/7 */
8509FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
8510{
8511 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
8512 IEM_MC_BEGIN(2, 0);
8513 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8514 IEM_MC_LOCAL(uint16_t, u16Fcw);
8515 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8517 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8518 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8519 IEM_MC_FETCH_FCW(u16Fcw);
8520 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
8521 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
8522 IEM_MC_END();
8523}
8524
8525
8526/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
8527FNIEMOP_DEF(iemOp_fnop)
8528{
8529 IEMOP_MNEMONIC(fnop, "fnop");
8530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8531
8532 IEM_MC_BEGIN(0, 0);
8533 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8534 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8535 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8536 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
8537 * intel optimizations. Investigate. */
8538 IEM_MC_UPDATE_FPU_OPCODE_IP();
8539 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
8540 IEM_MC_END();
8541}
8542
8543
8544/** Opcode 0xd9 11/0 stN */
8545FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
8546{
8547 IEMOP_MNEMONIC(fld_stN, "fld stN");
8548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8549
8550 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
8551 * indicates that it does. */
8552 IEM_MC_BEGIN(0, 2);
8553 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
8554 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8555 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8556 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8557
8558 IEM_MC_PREPARE_FPU_USAGE();
8559 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, IEM_GET_MODRM_RM_8(bRm)) {
8560 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
8561 IEM_MC_PUSH_FPU_RESULT(FpuRes);
8562 } IEM_MC_ELSE() {
8563 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
8564 } IEM_MC_ENDIF();
8565
8566 IEM_MC_ADVANCE_RIP_AND_FINISH();
8567 IEM_MC_END();
8568}
8569
8570
8571/** Opcode 0xd9 11/3 stN */
8572FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
8573{
8574 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
8575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8576
8577 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
8578 * indicates that it does. */
8579 IEM_MC_BEGIN(2, 3);
8580 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
8581 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
8582 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8583 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ IEM_GET_MODRM_RM_8(bRm), 0);
8584 IEM_MC_ARG_CONST(uint16_t, uFpuOpcode, /*=*/ pVCpu->iem.s.uFpuOpcode, 1);
8585 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8586 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8587
8588 IEM_MC_PREPARE_FPU_USAGE();
8589 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8590 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
8591 IEM_MC_STORE_FPUREG_R80_SRC_REF(IEM_GET_MODRM_RM_8(bRm), pr80Value1);
8592 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
8593 } IEM_MC_ELSE() {
8594 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, iemCImpl_fxch_underflow, iStReg, uFpuOpcode);
8595 } IEM_MC_ENDIF();
8596
8597 IEM_MC_ADVANCE_RIP_AND_FINISH();
8598 IEM_MC_END();
8599}
8600
8601
8602/** Opcode 0xd9 11/4, 0xdd 11/2. */
8603FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
8604{
8605 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
8606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8607
8608 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
8609 uint8_t const iDstReg = IEM_GET_MODRM_RM_8(bRm);
8610 if (!iDstReg)
8611 {
8612 IEM_MC_BEGIN(0, 1);
8613 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
8614 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8615 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8616
8617 IEM_MC_PREPARE_FPU_USAGE();
8618 IEM_MC_IF_FPUREG_NOT_EMPTY(0) {
8619 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
8620 } IEM_MC_ELSE() {
8621 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
8622 } IEM_MC_ENDIF();
8623
8624 IEM_MC_ADVANCE_RIP_AND_FINISH();
8625 IEM_MC_END();
8626 }
8627 else
8628 {
8629 IEM_MC_BEGIN(0, 2);
8630 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
8631 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8632 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8633 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8634
8635 IEM_MC_PREPARE_FPU_USAGE();
8636 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8637 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
8638 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
8639 } IEM_MC_ELSE() {
8640 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
8641 } IEM_MC_ENDIF();
8642
8643 IEM_MC_ADVANCE_RIP_AND_FINISH();
8644 IEM_MC_END();
8645 }
8646}
8647
8648
8649/**
8650 * Common worker for FPU instructions working on ST0 and replaces it with the
8651 * result, i.e. unary operators.
8652 *
8653 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8654 */
8655FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
8656{
8657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8658
8659 IEM_MC_BEGIN(2, 1);
8660 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8661 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8662 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
8663
8664 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8665 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8666 IEM_MC_PREPARE_FPU_USAGE();
8667 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8668 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
8669 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
8670 } IEM_MC_ELSE() {
8671 IEM_MC_FPU_STACK_UNDERFLOW(0);
8672 } IEM_MC_ENDIF();
8673 IEM_MC_ADVANCE_RIP_AND_FINISH();
8674
8675 IEM_MC_END();
8676}
8677
8678
8679/** Opcode 0xd9 0xe0. */
8680FNIEMOP_DEF(iemOp_fchs)
8681{
8682 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
8683 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
8684}
8685
8686
8687/** Opcode 0xd9 0xe1. */
8688FNIEMOP_DEF(iemOp_fabs)
8689{
8690 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
8691 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
8692}
8693
8694
8695/** Opcode 0xd9 0xe4. */
8696FNIEMOP_DEF(iemOp_ftst)
8697{
8698 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
8699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8700
8701 IEM_MC_BEGIN(2, 1);
8702 IEM_MC_LOCAL(uint16_t, u16Fsw);
8703 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8704 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
8705
8706 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8707 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8708 IEM_MC_PREPARE_FPU_USAGE();
8709 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8710 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_ftst_r80, pu16Fsw, pr80Value);
8711 IEM_MC_UPDATE_FSW(u16Fsw);
8712 } IEM_MC_ELSE() {
8713 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
8714 } IEM_MC_ENDIF();
8715 IEM_MC_ADVANCE_RIP_AND_FINISH();
8716
8717 IEM_MC_END();
8718}
8719
8720
8721/** Opcode 0xd9 0xe5. */
8722FNIEMOP_DEF(iemOp_fxam)
8723{
8724 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
8725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8726
8727 IEM_MC_BEGIN(2, 1);
8728 IEM_MC_LOCAL(uint16_t, u16Fsw);
8729 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8730 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
8731
8732 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8733 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8734 IEM_MC_PREPARE_FPU_USAGE();
8735 IEM_MC_REF_FPUREG(pr80Value, 0);
8736 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fxam_r80, pu16Fsw, pr80Value);
8737 IEM_MC_UPDATE_FSW(u16Fsw);
8738 IEM_MC_ADVANCE_RIP_AND_FINISH();
8739
8740 IEM_MC_END();
8741}
8742
8743
8744/**
8745 * Common worker for FPU instructions pushing a constant onto the FPU stack.
8746 *
8747 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8748 */
8749FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
8750{
8751 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8752
8753 IEM_MC_BEGIN(1, 1);
8754 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8755 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8756
8757 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8758 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8759 IEM_MC_PREPARE_FPU_USAGE();
8760 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
8761 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
8762 IEM_MC_PUSH_FPU_RESULT(FpuRes);
8763 } IEM_MC_ELSE() {
8764 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
8765 } IEM_MC_ENDIF();
8766 IEM_MC_ADVANCE_RIP_AND_FINISH();
8767
8768 IEM_MC_END();
8769}
8770
8771
8772/** Opcode 0xd9 0xe8. */
8773FNIEMOP_DEF(iemOp_fld1)
8774{
8775 IEMOP_MNEMONIC(fld1, "fld1");
8776 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
8777}
8778
8779
8780/** Opcode 0xd9 0xe9. */
8781FNIEMOP_DEF(iemOp_fldl2t)
8782{
8783 IEMOP_MNEMONIC(fldl2t, "fldl2t");
8784 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
8785}
8786
8787
8788/** Opcode 0xd9 0xea. */
8789FNIEMOP_DEF(iemOp_fldl2e)
8790{
8791 IEMOP_MNEMONIC(fldl2e, "fldl2e");
8792 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
8793}
8794
8795/** Opcode 0xd9 0xeb. */
8796FNIEMOP_DEF(iemOp_fldpi)
8797{
8798 IEMOP_MNEMONIC(fldpi, "fldpi");
8799 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
8800}
8801
8802
8803/** Opcode 0xd9 0xec. */
8804FNIEMOP_DEF(iemOp_fldlg2)
8805{
8806 IEMOP_MNEMONIC(fldlg2, "fldlg2");
8807 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
8808}
8809
8810/** Opcode 0xd9 0xed. */
8811FNIEMOP_DEF(iemOp_fldln2)
8812{
8813 IEMOP_MNEMONIC(fldln2, "fldln2");
8814 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
8815}
8816
8817
8818/** Opcode 0xd9 0xee. */
8819FNIEMOP_DEF(iemOp_fldz)
8820{
8821 IEMOP_MNEMONIC(fldz, "fldz");
8822 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
8823}
8824
8825
8826/** Opcode 0xd9 0xf0.
8827 *
8828 * The f2xm1 instruction works on values +1.0 thru -1.0, currently (the range on
8829 * 287 & 8087 was +0.5 thru 0.0 according to docs). In addition is does appear
8830 * to produce proper results for +Inf and -Inf.
8831 *
8832 * This is probably usful in the implementation pow() and similar.
8833 */
8834FNIEMOP_DEF(iemOp_f2xm1)
8835{
8836 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
8837 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
8838}
8839
8840
8841/**
8842 * Common worker for FPU instructions working on STn and ST0, storing the result
8843 * in STn, and popping the stack unless IE, DE or ZE was raised.
8844 *
8845 * @param bRm Mod R/M byte.
8846 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8847 */
8848FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
8849{
8850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8851
8852 IEM_MC_BEGIN(3, 1);
8853 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8854 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8855 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8856 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8857
8858 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8859 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8860
8861 IEM_MC_PREPARE_FPU_USAGE();
8862 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
8863 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
8864 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, IEM_GET_MODRM_RM_8(bRm));
8865 } IEM_MC_ELSE() {
8866 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(IEM_GET_MODRM_RM_8(bRm));
8867 } IEM_MC_ENDIF();
8868 IEM_MC_ADVANCE_RIP_AND_FINISH();
8869
8870 IEM_MC_END();
8871}
8872
8873
8874/** Opcode 0xd9 0xf1. */
8875FNIEMOP_DEF(iemOp_fyl2x)
8876{
8877 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
8878 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
8879}
8880
8881
8882/**
8883 * Common worker for FPU instructions working on ST0 and having two outputs, one
8884 * replacing ST0 and one pushed onto the stack.
8885 *
8886 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8887 */
8888FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
8889{
8890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8891
8892 IEM_MC_BEGIN(2, 1);
8893 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
8894 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
8895 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
8896
8897 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8898 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8899 IEM_MC_PREPARE_FPU_USAGE();
8900 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8901 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
8902 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
8903 } IEM_MC_ELSE() {
8904 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
8905 } IEM_MC_ENDIF();
8906 IEM_MC_ADVANCE_RIP_AND_FINISH();
8907
8908 IEM_MC_END();
8909}
8910
8911
8912/** Opcode 0xd9 0xf2. */
8913FNIEMOP_DEF(iemOp_fptan)
8914{
8915 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
8916 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
8917}
8918
8919
8920/** Opcode 0xd9 0xf3. */
8921FNIEMOP_DEF(iemOp_fpatan)
8922{
8923 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
8924 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
8925}
8926
8927
8928/** Opcode 0xd9 0xf4. */
8929FNIEMOP_DEF(iemOp_fxtract)
8930{
8931 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
8932 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
8933}
8934
8935
8936/** Opcode 0xd9 0xf5. */
8937FNIEMOP_DEF(iemOp_fprem1)
8938{
8939 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
8940 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
8941}
8942
8943
8944/** Opcode 0xd9 0xf6. */
8945FNIEMOP_DEF(iemOp_fdecstp)
8946{
8947 IEMOP_MNEMONIC(fdecstp, "fdecstp");
8948 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8949 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
8950 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
8951 * FINCSTP and FDECSTP. */
8952
8953 IEM_MC_BEGIN(0,0);
8954
8955 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8956 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8957
8958 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8959 IEM_MC_FPU_STACK_DEC_TOP();
8960 IEM_MC_UPDATE_FSW_CONST(0);
8961
8962 IEM_MC_ADVANCE_RIP_AND_FINISH();
8963 IEM_MC_END();
8964}
8965
8966
8967/** Opcode 0xd9 0xf7. */
8968FNIEMOP_DEF(iemOp_fincstp)
8969{
8970 IEMOP_MNEMONIC(fincstp, "fincstp");
8971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8972 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
8973 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
8974 * FINCSTP and FDECSTP. */
8975
8976 IEM_MC_BEGIN(0,0);
8977
8978 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8979 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8980
8981 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8982 IEM_MC_FPU_STACK_INC_TOP();
8983 IEM_MC_UPDATE_FSW_CONST(0);
8984
8985 IEM_MC_ADVANCE_RIP_AND_FINISH();
8986 IEM_MC_END();
8987}
8988
8989
8990/** Opcode 0xd9 0xf8. */
8991FNIEMOP_DEF(iemOp_fprem)
8992{
8993 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
8994 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
8995}
8996
8997
8998/** Opcode 0xd9 0xf9. */
8999FNIEMOP_DEF(iemOp_fyl2xp1)
9000{
9001 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
9002 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
9003}
9004
9005
9006/** Opcode 0xd9 0xfa. */
9007FNIEMOP_DEF(iemOp_fsqrt)
9008{
9009 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
9010 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
9011}
9012
9013
9014/** Opcode 0xd9 0xfb. */
9015FNIEMOP_DEF(iemOp_fsincos)
9016{
9017 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
9018 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
9019}
9020
9021
9022/** Opcode 0xd9 0xfc. */
9023FNIEMOP_DEF(iemOp_frndint)
9024{
9025 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
9026 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
9027}
9028
9029
9030/** Opcode 0xd9 0xfd. */
9031FNIEMOP_DEF(iemOp_fscale)
9032{
9033 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
9034 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
9035}
9036
9037
9038/** Opcode 0xd9 0xfe. */
9039FNIEMOP_DEF(iemOp_fsin)
9040{
9041 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
9042 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
9043}
9044
9045
9046/** Opcode 0xd9 0xff. */
9047FNIEMOP_DEF(iemOp_fcos)
9048{
9049 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
9050 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
9051}
9052
9053
9054/** Used by iemOp_EscF1. */
9055IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
9056{
9057 /* 0xe0 */ iemOp_fchs,
9058 /* 0xe1 */ iemOp_fabs,
9059 /* 0xe2 */ iemOp_Invalid,
9060 /* 0xe3 */ iemOp_Invalid,
9061 /* 0xe4 */ iemOp_ftst,
9062 /* 0xe5 */ iemOp_fxam,
9063 /* 0xe6 */ iemOp_Invalid,
9064 /* 0xe7 */ iemOp_Invalid,
9065 /* 0xe8 */ iemOp_fld1,
9066 /* 0xe9 */ iemOp_fldl2t,
9067 /* 0xea */ iemOp_fldl2e,
9068 /* 0xeb */ iemOp_fldpi,
9069 /* 0xec */ iemOp_fldlg2,
9070 /* 0xed */ iemOp_fldln2,
9071 /* 0xee */ iemOp_fldz,
9072 /* 0xef */ iemOp_Invalid,
9073 /* 0xf0 */ iemOp_f2xm1,
9074 /* 0xf1 */ iemOp_fyl2x,
9075 /* 0xf2 */ iemOp_fptan,
9076 /* 0xf3 */ iemOp_fpatan,
9077 /* 0xf4 */ iemOp_fxtract,
9078 /* 0xf5 */ iemOp_fprem1,
9079 /* 0xf6 */ iemOp_fdecstp,
9080 /* 0xf7 */ iemOp_fincstp,
9081 /* 0xf8 */ iemOp_fprem,
9082 /* 0xf9 */ iemOp_fyl2xp1,
9083 /* 0xfa */ iemOp_fsqrt,
9084 /* 0xfb */ iemOp_fsincos,
9085 /* 0xfc */ iemOp_frndint,
9086 /* 0xfd */ iemOp_fscale,
9087 /* 0xfe */ iemOp_fsin,
9088 /* 0xff */ iemOp_fcos
9089};
9090
9091
9092/**
9093 * @opcode 0xd9
9094 */
9095FNIEMOP_DEF(iemOp_EscF1)
9096{
9097 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9098 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
9099
9100 if (IEM_IS_MODRM_REG_MODE(bRm))
9101 {
9102 switch (IEM_GET_MODRM_REG_8(bRm))
9103 {
9104 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
9105 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
9106 case 2:
9107 if (bRm == 0xd0)
9108 return FNIEMOP_CALL(iemOp_fnop);
9109 IEMOP_RAISE_INVALID_OPCODE_RET();
9110 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
9111 case 4:
9112 case 5:
9113 case 6:
9114 case 7:
9115 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
9116 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
9117 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9118 }
9119 }
9120 else
9121 {
9122 switch (IEM_GET_MODRM_REG_8(bRm))
9123 {
9124 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
9125 case 1: IEMOP_RAISE_INVALID_OPCODE_RET();
9126 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
9127 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
9128 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
9129 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
9130 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
9131 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
9132 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9133 }
9134 }
9135}
9136
9137
9138/** Opcode 0xda 11/0. */
9139FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
9140{
9141 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
9142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9143
9144 IEM_MC_BEGIN(0, 1);
9145 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9146
9147 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9148 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9149
9150 IEM_MC_PREPARE_FPU_USAGE();
9151 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9152 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9153 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9154 } IEM_MC_ENDIF();
9155 IEM_MC_UPDATE_FPU_OPCODE_IP();
9156 } IEM_MC_ELSE() {
9157 IEM_MC_FPU_STACK_UNDERFLOW(0);
9158 } IEM_MC_ENDIF();
9159 IEM_MC_ADVANCE_RIP_AND_FINISH();
9160
9161 IEM_MC_END();
9162}
9163
9164
9165/** Opcode 0xda 11/1. */
9166FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
9167{
9168 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
9169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9170
9171 IEM_MC_BEGIN(0, 1);
9172 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9173
9174 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9175 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9176
9177 IEM_MC_PREPARE_FPU_USAGE();
9178 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9179 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9180 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9181 } IEM_MC_ENDIF();
9182 IEM_MC_UPDATE_FPU_OPCODE_IP();
9183 } IEM_MC_ELSE() {
9184 IEM_MC_FPU_STACK_UNDERFLOW(0);
9185 } IEM_MC_ENDIF();
9186 IEM_MC_ADVANCE_RIP_AND_FINISH();
9187
9188 IEM_MC_END();
9189}
9190
9191
9192/** Opcode 0xda 11/2. */
9193FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
9194{
9195 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
9196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9197
9198 IEM_MC_BEGIN(0, 1);
9199 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9200
9201 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9202 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9203
9204 IEM_MC_PREPARE_FPU_USAGE();
9205 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9206 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9207 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9208 } IEM_MC_ENDIF();
9209 IEM_MC_UPDATE_FPU_OPCODE_IP();
9210 } IEM_MC_ELSE() {
9211 IEM_MC_FPU_STACK_UNDERFLOW(0);
9212 } IEM_MC_ENDIF();
9213 IEM_MC_ADVANCE_RIP_AND_FINISH();
9214
9215 IEM_MC_END();
9216}
9217
9218
9219/** Opcode 0xda 11/3. */
9220FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
9221{
9222 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
9223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9224
9225 IEM_MC_BEGIN(0, 1);
9226 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9227
9228 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9229 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9230
9231 IEM_MC_PREPARE_FPU_USAGE();
9232 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9233 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9234 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9235 } IEM_MC_ENDIF();
9236 IEM_MC_UPDATE_FPU_OPCODE_IP();
9237 } IEM_MC_ELSE() {
9238 IEM_MC_FPU_STACK_UNDERFLOW(0);
9239 } IEM_MC_ENDIF();
9240 IEM_MC_ADVANCE_RIP_AND_FINISH();
9241
9242 IEM_MC_END();
9243}
9244
9245
9246/**
9247 * Common worker for FPU instructions working on ST0 and ST1, only affecting
9248 * flags, and popping twice when done.
9249 *
9250 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9251 */
9252FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
9253{
9254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9255
9256 IEM_MC_BEGIN(3, 1);
9257 IEM_MC_LOCAL(uint16_t, u16Fsw);
9258 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9259 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9260 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9261
9262 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9263 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9264
9265 IEM_MC_PREPARE_FPU_USAGE();
9266 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1) {
9267 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
9268 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
9269 } IEM_MC_ELSE() {
9270 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
9271 } IEM_MC_ENDIF();
9272 IEM_MC_ADVANCE_RIP_AND_FINISH();
9273
9274 IEM_MC_END();
9275}
9276
9277
9278/** Opcode 0xda 0xe9. */
9279FNIEMOP_DEF(iemOp_fucompp)
9280{
9281 IEMOP_MNEMONIC(fucompp, "fucompp");
9282 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fucom_r80_by_r80);
9283}
9284
9285
9286/**
9287 * Common worker for FPU instructions working on ST0 and an m32i, and storing
9288 * the result in ST0.
9289 *
9290 * @param bRm Mod R/M byte.
9291 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9292 */
9293FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
9294{
9295 IEM_MC_BEGIN(3, 3);
9296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9297 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9298 IEM_MC_LOCAL(int32_t, i32Val2);
9299 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9300 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9301 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
9302
9303 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9305
9306 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9307 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9308 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9309
9310 IEM_MC_PREPARE_FPU_USAGE();
9311 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9312 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
9313 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
9314 } IEM_MC_ELSE() {
9315 IEM_MC_FPU_STACK_UNDERFLOW(0);
9316 } IEM_MC_ENDIF();
9317 IEM_MC_ADVANCE_RIP_AND_FINISH();
9318
9319 IEM_MC_END();
9320}
9321
9322
9323/** Opcode 0xda !11/0. */
9324FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
9325{
9326 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
9327 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
9328}
9329
9330
9331/** Opcode 0xda !11/1. */
9332FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
9333{
9334 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
9335 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
9336}
9337
9338
9339/** Opcode 0xda !11/2. */
9340FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
9341{
9342 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
9343
9344 IEM_MC_BEGIN(3, 3);
9345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9346 IEM_MC_LOCAL(uint16_t, u16Fsw);
9347 IEM_MC_LOCAL(int32_t, i32Val2);
9348 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9349 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9350 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
9351
9352 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9354
9355 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9356 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9357 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9358
9359 IEM_MC_PREPARE_FPU_USAGE();
9360 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9361 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
9362 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9363 } IEM_MC_ELSE() {
9364 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9365 } IEM_MC_ENDIF();
9366 IEM_MC_ADVANCE_RIP_AND_FINISH();
9367
9368 IEM_MC_END();
9369}
9370
9371
9372/** Opcode 0xda !11/3. */
9373FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
9374{
9375 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
9376
9377 IEM_MC_BEGIN(3, 3);
9378 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9379 IEM_MC_LOCAL(uint16_t, u16Fsw);
9380 IEM_MC_LOCAL(int32_t, i32Val2);
9381 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9382 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9383 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
9384
9385 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9387
9388 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9389 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9390 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9391
9392 IEM_MC_PREPARE_FPU_USAGE();
9393 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9394 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
9395 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9396 } IEM_MC_ELSE() {
9397 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9398 } IEM_MC_ENDIF();
9399 IEM_MC_ADVANCE_RIP_AND_FINISH();
9400
9401 IEM_MC_END();
9402}
9403
9404
9405/** Opcode 0xda !11/4. */
9406FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
9407{
9408 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
9409 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
9410}
9411
9412
9413/** Opcode 0xda !11/5. */
9414FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
9415{
9416 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
9417 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
9418}
9419
9420
9421/** Opcode 0xda !11/6. */
9422FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
9423{
9424 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
9425 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
9426}
9427
9428
9429/** Opcode 0xda !11/7. */
9430FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
9431{
9432 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
9433 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
9434}
9435
9436
9437/**
9438 * @opcode 0xda
9439 */
9440FNIEMOP_DEF(iemOp_EscF2)
9441{
9442 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9443 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
9444 if (IEM_IS_MODRM_REG_MODE(bRm))
9445 {
9446 switch (IEM_GET_MODRM_REG_8(bRm))
9447 {
9448 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
9449 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
9450 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
9451 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
9452 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
9453 case 5:
9454 if (bRm == 0xe9)
9455 return FNIEMOP_CALL(iemOp_fucompp);
9456 IEMOP_RAISE_INVALID_OPCODE_RET();
9457 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9458 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
9459 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9460 }
9461 }
9462 else
9463 {
9464 switch (IEM_GET_MODRM_REG_8(bRm))
9465 {
9466 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
9467 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
9468 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
9469 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
9470 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
9471 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
9472 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
9473 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
9474 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9475 }
9476 }
9477}
9478
9479
9480/** Opcode 0xdb !11/0. */
9481FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
9482{
9483 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
9484
9485 IEM_MC_BEGIN(2, 3);
9486 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9487 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9488 IEM_MC_LOCAL(int32_t, i32Val);
9489 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9490 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
9491
9492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9494
9495 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9496 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9497 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9498
9499 IEM_MC_PREPARE_FPU_USAGE();
9500 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
9501 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i32, pFpuRes, pi32Val);
9502 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9503 } IEM_MC_ELSE() {
9504 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9505 } IEM_MC_ENDIF();
9506 IEM_MC_ADVANCE_RIP_AND_FINISH();
9507
9508 IEM_MC_END();
9509}
9510
9511
9512/** Opcode 0xdb !11/1. */
9513FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
9514{
9515 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
9516 IEM_MC_BEGIN(3, 2);
9517 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9518 IEM_MC_LOCAL(uint16_t, u16Fsw);
9519 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9520 IEM_MC_ARG(int32_t *, pi32Dst, 1);
9521 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9522
9523 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9525 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9526 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9527
9528 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9529 IEM_MC_PREPARE_FPU_USAGE();
9530 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9531 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
9532 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9533 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9534 } IEM_MC_ELSE() {
9535 IEM_MC_IF_FCW_IM() {
9536 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
9537 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
9538 } IEM_MC_ENDIF();
9539 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9540 } IEM_MC_ENDIF();
9541 IEM_MC_ADVANCE_RIP_AND_FINISH();
9542
9543 IEM_MC_END();
9544}
9545
9546
9547/** Opcode 0xdb !11/2. */
9548FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
9549{
9550 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
9551 IEM_MC_BEGIN(3, 2);
9552 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9553 IEM_MC_LOCAL(uint16_t, u16Fsw);
9554 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9555 IEM_MC_ARG(int32_t *, pi32Dst, 1);
9556 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9557
9558 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9560 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9561 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9562
9563 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9564 IEM_MC_PREPARE_FPU_USAGE();
9565 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9566 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
9567 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9568 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9569 } IEM_MC_ELSE() {
9570 IEM_MC_IF_FCW_IM() {
9571 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
9572 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
9573 } IEM_MC_ENDIF();
9574 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9575 } IEM_MC_ENDIF();
9576 IEM_MC_ADVANCE_RIP_AND_FINISH();
9577
9578 IEM_MC_END();
9579}
9580
9581
9582/** Opcode 0xdb !11/3. */
9583FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
9584{
9585 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
9586 IEM_MC_BEGIN(3, 2);
9587 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9588 IEM_MC_LOCAL(uint16_t, u16Fsw);
9589 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9590 IEM_MC_ARG(int32_t *, pi32Dst, 1);
9591 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9592
9593 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9595 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9596 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9597
9598 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9599 IEM_MC_PREPARE_FPU_USAGE();
9600 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9601 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
9602 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9603 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9604 } IEM_MC_ELSE() {
9605 IEM_MC_IF_FCW_IM() {
9606 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
9607 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
9608 } IEM_MC_ENDIF();
9609 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9610 } IEM_MC_ENDIF();
9611 IEM_MC_ADVANCE_RIP_AND_FINISH();
9612
9613 IEM_MC_END();
9614}
9615
9616
9617/** Opcode 0xdb !11/5. */
9618FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
9619{
9620 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
9621
9622 IEM_MC_BEGIN(2, 3);
9623 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9624 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9625 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
9626 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9627 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
9628
9629 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9631
9632 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9633 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9634 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9635
9636 IEM_MC_PREPARE_FPU_USAGE();
9637 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
9638 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
9639 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9640 } IEM_MC_ELSE() {
9641 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9642 } IEM_MC_ENDIF();
9643 IEM_MC_ADVANCE_RIP_AND_FINISH();
9644
9645 IEM_MC_END();
9646}
9647
9648
9649/** Opcode 0xdb !11/7. */
9650FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
9651{
9652 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
9653 IEM_MC_BEGIN(3, 2);
9654 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9655 IEM_MC_LOCAL(uint16_t, u16Fsw);
9656 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9657 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
9658 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9659
9660 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9662 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9663 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9664
9665 IEM_MC_MEM_MAP_EX(pr80Dst, IEM_ACCESS_DATA_W, sizeof(*pr80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
9666 IEM_MC_PREPARE_FPU_USAGE();
9667 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9668 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
9669 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
9670 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9671 } IEM_MC_ELSE() {
9672 IEM_MC_IF_FCW_IM() {
9673 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
9674 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
9675 } IEM_MC_ENDIF();
9676 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9677 } IEM_MC_ENDIF();
9678 IEM_MC_ADVANCE_RIP_AND_FINISH();
9679
9680 IEM_MC_END();
9681}
9682
9683
9684/** Opcode 0xdb 11/0. */
9685FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
9686{
9687 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
9688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9689
9690 IEM_MC_BEGIN(0, 1);
9691 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9692
9693 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9694 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9695
9696 IEM_MC_PREPARE_FPU_USAGE();
9697 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9698 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF) {
9699 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9700 } IEM_MC_ENDIF();
9701 IEM_MC_UPDATE_FPU_OPCODE_IP();
9702 } IEM_MC_ELSE() {
9703 IEM_MC_FPU_STACK_UNDERFLOW(0);
9704 } IEM_MC_ENDIF();
9705 IEM_MC_ADVANCE_RIP_AND_FINISH();
9706
9707 IEM_MC_END();
9708}
9709
9710
9711/** Opcode 0xdb 11/1. */
9712FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
9713{
9714 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
9715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9716
9717 IEM_MC_BEGIN(0, 1);
9718 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9719
9720 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9721 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9722
9723 IEM_MC_PREPARE_FPU_USAGE();
9724 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9725 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
9726 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9727 } IEM_MC_ENDIF();
9728 IEM_MC_UPDATE_FPU_OPCODE_IP();
9729 } IEM_MC_ELSE() {
9730 IEM_MC_FPU_STACK_UNDERFLOW(0);
9731 } IEM_MC_ENDIF();
9732 IEM_MC_ADVANCE_RIP_AND_FINISH();
9733
9734 IEM_MC_END();
9735}
9736
9737
9738/** Opcode 0xdb 11/2. */
9739FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
9740{
9741 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
9742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9743
9744 IEM_MC_BEGIN(0, 1);
9745 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9746
9747 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9748 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9749
9750 IEM_MC_PREPARE_FPU_USAGE();
9751 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9752 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9753 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9754 } IEM_MC_ENDIF();
9755 IEM_MC_UPDATE_FPU_OPCODE_IP();
9756 } IEM_MC_ELSE() {
9757 IEM_MC_FPU_STACK_UNDERFLOW(0);
9758 } IEM_MC_ENDIF();
9759 IEM_MC_ADVANCE_RIP_AND_FINISH();
9760
9761 IEM_MC_END();
9762}
9763
9764
9765/** Opcode 0xdb 11/3. */
9766FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
9767{
9768 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
9769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9770
9771 IEM_MC_BEGIN(0, 1);
9772 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9773
9774 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9775 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9776
9777 IEM_MC_PREPARE_FPU_USAGE();
9778 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9779 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF) {
9780 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9781 } IEM_MC_ENDIF();
9782 IEM_MC_UPDATE_FPU_OPCODE_IP();
9783 } IEM_MC_ELSE() {
9784 IEM_MC_FPU_STACK_UNDERFLOW(0);
9785 } IEM_MC_ENDIF();
9786 IEM_MC_ADVANCE_RIP_AND_FINISH();
9787
9788 IEM_MC_END();
9789}
9790
9791
9792/** Opcode 0xdb 0xe0. */
9793FNIEMOP_DEF(iemOp_fneni)
9794{
9795 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
9796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9797 IEM_MC_BEGIN(0,0);
9798 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9799 IEM_MC_ADVANCE_RIP_AND_FINISH();
9800 IEM_MC_END();
9801}
9802
9803
9804/** Opcode 0xdb 0xe1. */
9805FNIEMOP_DEF(iemOp_fndisi)
9806{
9807 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
9808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9809 IEM_MC_BEGIN(0,0);
9810 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9811 IEM_MC_ADVANCE_RIP_AND_FINISH();
9812 IEM_MC_END();
9813}
9814
9815
9816/** Opcode 0xdb 0xe2. */
9817FNIEMOP_DEF(iemOp_fnclex)
9818{
9819 IEMOP_MNEMONIC(fnclex, "fnclex");
9820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9821
9822 IEM_MC_BEGIN(0,0);
9823 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9824 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9825 IEM_MC_CLEAR_FSW_EX();
9826 IEM_MC_ADVANCE_RIP_AND_FINISH();
9827 IEM_MC_END();
9828}
9829
9830
9831/** Opcode 0xdb 0xe3. */
9832FNIEMOP_DEF(iemOp_fninit)
9833{
9834 IEMOP_MNEMONIC(fninit, "fninit");
9835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9836 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_FPU, iemCImpl_finit, false /*fCheckXcpts*/);
9837}
9838
9839
9840/** Opcode 0xdb 0xe4. */
9841FNIEMOP_DEF(iemOp_fnsetpm)
9842{
9843 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
9844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9845 IEM_MC_BEGIN(0,0);
9846 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9847 IEM_MC_ADVANCE_RIP_AND_FINISH();
9848 IEM_MC_END();
9849}
9850
9851
9852/** Opcode 0xdb 0xe5. */
9853FNIEMOP_DEF(iemOp_frstpm)
9854{
9855 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
9856#if 0 /* #UDs on newer CPUs */
9857 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9858 IEM_MC_BEGIN(0,0);
9859 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9860 IEM_MC_ADVANCE_RIP_AND_FINISH();
9861 IEM_MC_END();
9862 return VINF_SUCCESS;
9863#else
9864 IEMOP_RAISE_INVALID_OPCODE_RET();
9865#endif
9866}
9867
9868
9869/** Opcode 0xdb 11/5. */
9870FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
9871{
9872 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
9873 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
9874 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fucomi_r80_by_r80,
9875 0 /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
9876}
9877
9878
9879/** Opcode 0xdb 11/6. */
9880FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
9881{
9882 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
9883 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
9884 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80,
9885 false /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
9886}
9887
9888
9889/**
9890 * @opcode 0xdb
9891 */
9892FNIEMOP_DEF(iemOp_EscF3)
9893{
9894 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9895 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
9896 if (IEM_IS_MODRM_REG_MODE(bRm))
9897 {
9898 switch (IEM_GET_MODRM_REG_8(bRm))
9899 {
9900 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
9901 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
9902 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
9903 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
9904 case 4:
9905 switch (bRm)
9906 {
9907 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
9908 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
9909 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
9910 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
9911 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
9912 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
9913 case 0xe6: IEMOP_RAISE_INVALID_OPCODE_RET();
9914 case 0xe7: IEMOP_RAISE_INVALID_OPCODE_RET();
9915 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9916 }
9917 break;
9918 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
9919 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
9920 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
9921 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9922 }
9923 }
9924 else
9925 {
9926 switch (IEM_GET_MODRM_REG_8(bRm))
9927 {
9928 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
9929 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
9930 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
9931 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
9932 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
9933 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
9934 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9935 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
9936 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9937 }
9938 }
9939}
9940
9941
9942/**
9943 * Common worker for FPU instructions working on STn and ST0, and storing the
9944 * result in STn unless IE, DE or ZE was raised.
9945 *
9946 * @param bRm Mod R/M byte.
9947 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9948 */
9949FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
9950{
9951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9952
9953 IEM_MC_BEGIN(3, 1);
9954 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9955 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9956 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9957 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9958
9959 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9960 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9961
9962 IEM_MC_PREPARE_FPU_USAGE();
9963 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
9964 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
9965 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm));
9966 } IEM_MC_ELSE() {
9967 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm));
9968 } IEM_MC_ENDIF();
9969 IEM_MC_ADVANCE_RIP_AND_FINISH();
9970
9971 IEM_MC_END();
9972}
9973
9974
9975/** Opcode 0xdc 11/0. */
9976FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
9977{
9978 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
9979 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
9980}
9981
9982
9983/** Opcode 0xdc 11/1. */
9984FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
9985{
9986 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
9987 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
9988}
9989
9990
9991/** Opcode 0xdc 11/4. */
9992FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
9993{
9994 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
9995 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
9996}
9997
9998
9999/** Opcode 0xdc 11/5. */
10000FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
10001{
10002 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
10003 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
10004}
10005
10006
10007/** Opcode 0xdc 11/6. */
10008FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
10009{
10010 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
10011 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
10012}
10013
10014
10015/** Opcode 0xdc 11/7. */
10016FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
10017{
10018 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
10019 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
10020}
10021
10022
10023/**
10024 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
10025 * memory operand, and storing the result in ST0.
10026 *
10027 * @param bRm Mod R/M byte.
10028 * @param pfnImpl Pointer to the instruction implementation (assembly).
10029 */
10030FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
10031{
10032 IEM_MC_BEGIN(3, 3);
10033 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10034 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10035 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
10036 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10037 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
10038 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
10039
10040 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10042 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10043 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10044
10045 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10046 IEM_MC_PREPARE_FPU_USAGE();
10047 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0) {
10048 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
10049 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10050 } IEM_MC_ELSE() {
10051 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10052 } IEM_MC_ENDIF();
10053 IEM_MC_ADVANCE_RIP_AND_FINISH();
10054
10055 IEM_MC_END();
10056}
10057
10058
10059/** Opcode 0xdc !11/0. */
10060FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
10061{
10062 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
10063 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
10064}
10065
10066
10067/** Opcode 0xdc !11/1. */
10068FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
10069{
10070 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
10071 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
10072}
10073
10074
10075/** Opcode 0xdc !11/2. */
10076FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
10077{
10078 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
10079
10080 IEM_MC_BEGIN(3, 3);
10081 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10082 IEM_MC_LOCAL(uint16_t, u16Fsw);
10083 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
10084 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10085 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10086 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
10087
10088 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10090
10091 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10092 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10093 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10094
10095 IEM_MC_PREPARE_FPU_USAGE();
10096 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10097 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
10098 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10099 } IEM_MC_ELSE() {
10100 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10101 } IEM_MC_ENDIF();
10102 IEM_MC_ADVANCE_RIP_AND_FINISH();
10103
10104 IEM_MC_END();
10105}
10106
10107
10108/** Opcode 0xdc !11/3. */
10109FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
10110{
10111 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
10112
10113 IEM_MC_BEGIN(3, 3);
10114 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10115 IEM_MC_LOCAL(uint16_t, u16Fsw);
10116 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
10117 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10118 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10119 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
10120
10121 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10123
10124 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10125 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10126 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10127
10128 IEM_MC_PREPARE_FPU_USAGE();
10129 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10130 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
10131 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10132 } IEM_MC_ELSE() {
10133 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10134 } IEM_MC_ENDIF();
10135 IEM_MC_ADVANCE_RIP_AND_FINISH();
10136
10137 IEM_MC_END();
10138}
10139
10140
10141/** Opcode 0xdc !11/4. */
10142FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
10143{
10144 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
10145 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
10146}
10147
10148
10149/** Opcode 0xdc !11/5. */
10150FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
10151{
10152 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
10153 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
10154}
10155
10156
10157/** Opcode 0xdc !11/6. */
10158FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
10159{
10160 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
10161 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
10162}
10163
10164
10165/** Opcode 0xdc !11/7. */
10166FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
10167{
10168 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
10169 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
10170}
10171
10172
10173/**
10174 * @opcode 0xdc
10175 */
10176FNIEMOP_DEF(iemOp_EscF4)
10177{
10178 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10179 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
10180 if (IEM_IS_MODRM_REG_MODE(bRm))
10181 {
10182 switch (IEM_GET_MODRM_REG_8(bRm))
10183 {
10184 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
10185 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
10186 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
10187 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
10188 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
10189 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
10190 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
10191 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
10192 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10193 }
10194 }
10195 else
10196 {
10197 switch (IEM_GET_MODRM_REG_8(bRm))
10198 {
10199 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
10200 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
10201 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
10202 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
10203 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
10204 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
10205 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
10206 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
10207 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10208 }
10209 }
10210}
10211
10212
10213/** Opcode 0xdd !11/0.
10214 * @sa iemOp_fld_m32r */
10215FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
10216{
10217 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
10218
10219 IEM_MC_BEGIN(2, 3);
10220 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10221 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10222 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
10223 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10224 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
10225
10226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10228 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10229 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10230
10231 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10232 IEM_MC_PREPARE_FPU_USAGE();
10233 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10234 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r64, pFpuRes, pr64Val);
10235 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10236 } IEM_MC_ELSE() {
10237 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10238 } IEM_MC_ENDIF();
10239 IEM_MC_ADVANCE_RIP_AND_FINISH();
10240
10241 IEM_MC_END();
10242}
10243
10244
10245/** Opcode 0xdd !11/0. */
10246FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
10247{
10248 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
10249 IEM_MC_BEGIN(3, 2);
10250 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10251 IEM_MC_LOCAL(uint16_t, u16Fsw);
10252 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10253 IEM_MC_ARG(int64_t *, pi64Dst, 1);
10254 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10255
10256 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10257 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10258 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10259 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10260
10261 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10262 IEM_MC_PREPARE_FPU_USAGE();
10263 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10264 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
10265 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10266 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10267 } IEM_MC_ELSE() {
10268 IEM_MC_IF_FCW_IM() {
10269 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
10270 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
10271 } IEM_MC_ENDIF();
10272 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10273 } IEM_MC_ENDIF();
10274 IEM_MC_ADVANCE_RIP_AND_FINISH();
10275
10276 IEM_MC_END();
10277}
10278
10279
10280/** Opcode 0xdd !11/0. */
10281FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
10282{
10283 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
10284 IEM_MC_BEGIN(3, 2);
10285 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10286 IEM_MC_LOCAL(uint16_t, u16Fsw);
10287 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10288 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
10289 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10290
10291 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10293 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10294 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10295
10296 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10297 IEM_MC_PREPARE_FPU_USAGE();
10298 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10299 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
10300 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10301 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10302 } IEM_MC_ELSE() {
10303 IEM_MC_IF_FCW_IM() {
10304 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
10305 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
10306 } IEM_MC_ENDIF();
10307 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10308 } IEM_MC_ENDIF();
10309 IEM_MC_ADVANCE_RIP_AND_FINISH();
10310
10311 IEM_MC_END();
10312}
10313
10314
10315
10316
10317/** Opcode 0xdd !11/0. */
10318FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
10319{
10320 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
10321 IEM_MC_BEGIN(3, 2);
10322 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10323 IEM_MC_LOCAL(uint16_t, u16Fsw);
10324 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10325 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
10326 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10327
10328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10330 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10331 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10332
10333 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10334 IEM_MC_PREPARE_FPU_USAGE();
10335 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10336 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
10337 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10338 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10339 } IEM_MC_ELSE() {
10340 IEM_MC_IF_FCW_IM() {
10341 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
10342 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
10343 } IEM_MC_ENDIF();
10344 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10345 } IEM_MC_ENDIF();
10346 IEM_MC_ADVANCE_RIP_AND_FINISH();
10347
10348 IEM_MC_END();
10349}
10350
10351
10352/** Opcode 0xdd !11/0. */
10353FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
10354{
10355 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
10356 IEM_MC_BEGIN(3, 0);
10357 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
10358 IEM_MC_ARG(uint8_t, iEffSeg, 1);
10359 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
10360 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10362 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10363 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10364 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
10365 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
10366 IEM_MC_END();
10367}
10368
10369
10370/** Opcode 0xdd !11/0. */
10371FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
10372{
10373 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
10374 IEM_MC_BEGIN(3, 0);
10375 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
10376 IEM_MC_ARG(uint8_t, iEffSeg, 1);
10377 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
10378 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10380 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10381 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); /* Note! Implicit fninit after the save, do not use FOR_READ here! */
10382 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
10383 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
10384 IEM_MC_END();
10385}
10386
10387/** Opcode 0xdd !11/0. */
10388FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
10389{
10390 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
10391
10392 IEM_MC_BEGIN(0, 2);
10393 IEM_MC_LOCAL(uint16_t, u16Tmp);
10394 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10395
10396 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10398 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10399
10400 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
10401 IEM_MC_FETCH_FSW(u16Tmp);
10402 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
10403 IEM_MC_ADVANCE_RIP_AND_FINISH();
10404
10405/** @todo Debug / drop a hint to the verifier that things may differ
10406 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
10407 * NT4SP1. (X86_FSW_PE) */
10408 IEM_MC_END();
10409}
10410
10411
10412/** Opcode 0xdd 11/0. */
10413FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
10414{
10415 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
10416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10417 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
10418 unmodified. */
10419
10420 IEM_MC_BEGIN(0, 0);
10421
10422 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10423 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10424
10425 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10426 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
10427 IEM_MC_UPDATE_FPU_OPCODE_IP();
10428
10429 IEM_MC_ADVANCE_RIP_AND_FINISH();
10430 IEM_MC_END();
10431}
10432
10433
10434/** Opcode 0xdd 11/1. */
10435FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
10436{
10437 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
10438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10439
10440 IEM_MC_BEGIN(0, 2);
10441 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
10442 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10443 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10444 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10445
10446 IEM_MC_PREPARE_FPU_USAGE();
10447 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10448 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
10449 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm));
10450 } IEM_MC_ELSE() {
10451 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm));
10452 } IEM_MC_ENDIF();
10453
10454 IEM_MC_ADVANCE_RIP_AND_FINISH();
10455 IEM_MC_END();
10456}
10457
10458
10459/** Opcode 0xdd 11/3. */
10460FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
10461{
10462 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
10463 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
10464}
10465
10466
10467/** Opcode 0xdd 11/4. */
10468FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
10469{
10470 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
10471 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
10472}
10473
10474
10475/**
10476 * @opcode 0xdd
10477 */
10478FNIEMOP_DEF(iemOp_EscF5)
10479{
10480 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10481 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
10482 if (IEM_IS_MODRM_REG_MODE(bRm))
10483 {
10484 switch (IEM_GET_MODRM_REG_8(bRm))
10485 {
10486 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
10487 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
10488 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
10489 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
10490 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
10491 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
10492 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10493 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
10494 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10495 }
10496 }
10497 else
10498 {
10499 switch (IEM_GET_MODRM_REG_8(bRm))
10500 {
10501 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
10502 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
10503 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
10504 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
10505 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
10506 case 5: IEMOP_RAISE_INVALID_OPCODE_RET();
10507 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
10508 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
10509 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10510 }
10511 }
10512}
10513
10514
10515/** Opcode 0xde 11/0. */
10516FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
10517{
10518 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
10519 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
10520}
10521
10522
10523/** Opcode 0xde 11/0. */
10524FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
10525{
10526 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
10527 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
10528}
10529
10530
10531/** Opcode 0xde 0xd9. */
10532FNIEMOP_DEF(iemOp_fcompp)
10533{
10534 IEMOP_MNEMONIC(fcompp, "fcompp");
10535 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fcom_r80_by_r80);
10536}
10537
10538
10539/** Opcode 0xde 11/4. */
10540FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
10541{
10542 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
10543 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
10544}
10545
10546
10547/** Opcode 0xde 11/5. */
10548FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
10549{
10550 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
10551 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
10552}
10553
10554
10555/** Opcode 0xde 11/6. */
10556FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
10557{
10558 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
10559 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
10560}
10561
10562
10563/** Opcode 0xde 11/7. */
10564FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
10565{
10566 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
10567 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
10568}
10569
10570
10571/**
10572 * Common worker for FPU instructions working on ST0 and an m16i, and storing
10573 * the result in ST0.
10574 *
10575 * @param bRm Mod R/M byte.
10576 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10577 */
10578FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
10579{
10580 IEM_MC_BEGIN(3, 3);
10581 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10582 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10583 IEM_MC_LOCAL(int16_t, i16Val2);
10584 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10585 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10586 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
10587
10588 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10590
10591 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10592 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10593 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10594
10595 IEM_MC_PREPARE_FPU_USAGE();
10596 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10597 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
10598 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
10599 } IEM_MC_ELSE() {
10600 IEM_MC_FPU_STACK_UNDERFLOW(0);
10601 } IEM_MC_ENDIF();
10602 IEM_MC_ADVANCE_RIP_AND_FINISH();
10603
10604 IEM_MC_END();
10605}
10606
10607
10608/** Opcode 0xde !11/0. */
10609FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
10610{
10611 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
10612 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
10613}
10614
10615
10616/** Opcode 0xde !11/1. */
10617FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
10618{
10619 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
10620 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
10621}
10622
10623
10624/** Opcode 0xde !11/2. */
10625FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
10626{
10627 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
10628
10629 IEM_MC_BEGIN(3, 3);
10630 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10631 IEM_MC_LOCAL(uint16_t, u16Fsw);
10632 IEM_MC_LOCAL(int16_t, i16Val2);
10633 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10634 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10635 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
10636
10637 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10639
10640 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10641 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10642 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10643
10644 IEM_MC_PREPARE_FPU_USAGE();
10645 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10646 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
10647 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10648 } IEM_MC_ELSE() {
10649 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10650 } IEM_MC_ENDIF();
10651 IEM_MC_ADVANCE_RIP_AND_FINISH();
10652
10653 IEM_MC_END();
10654}
10655
10656
10657/** Opcode 0xde !11/3. */
10658FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
10659{
10660 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
10661
10662 IEM_MC_BEGIN(3, 3);
10663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10664 IEM_MC_LOCAL(uint16_t, u16Fsw);
10665 IEM_MC_LOCAL(int16_t, i16Val2);
10666 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10667 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10668 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
10669
10670 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10672
10673 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10674 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10675 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10676
10677 IEM_MC_PREPARE_FPU_USAGE();
10678 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10679 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
10680 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10681 } IEM_MC_ELSE() {
10682 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10683 } IEM_MC_ENDIF();
10684 IEM_MC_ADVANCE_RIP_AND_FINISH();
10685
10686 IEM_MC_END();
10687}
10688
10689
10690/** Opcode 0xde !11/4. */
10691FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
10692{
10693 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
10694 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
10695}
10696
10697
10698/** Opcode 0xde !11/5. */
10699FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
10700{
10701 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
10702 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
10703}
10704
10705
10706/** Opcode 0xde !11/6. */
10707FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
10708{
10709 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
10710 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
10711}
10712
10713
10714/** Opcode 0xde !11/7. */
10715FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
10716{
10717 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
10718 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
10719}
10720
10721
10722/**
10723 * @opcode 0xde
10724 */
10725FNIEMOP_DEF(iemOp_EscF6)
10726{
10727 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10728 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
10729 if (IEM_IS_MODRM_REG_MODE(bRm))
10730 {
10731 switch (IEM_GET_MODRM_REG_8(bRm))
10732 {
10733 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
10734 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
10735 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
10736 case 3: if (bRm == 0xd9)
10737 return FNIEMOP_CALL(iemOp_fcompp);
10738 IEMOP_RAISE_INVALID_OPCODE_RET();
10739 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
10740 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
10741 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
10742 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
10743 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10744 }
10745 }
10746 else
10747 {
10748 switch (IEM_GET_MODRM_REG_8(bRm))
10749 {
10750 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
10751 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
10752 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
10753 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
10754 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
10755 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
10756 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
10757 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
10758 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10759 }
10760 }
10761}
10762
10763
10764/** Opcode 0xdf 11/0.
10765 * Undocument instruction, assumed to work like ffree + fincstp. */
10766FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
10767{
10768 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
10769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10770
10771 IEM_MC_BEGIN(0, 0);
10772
10773 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10774 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10775
10776 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10777 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
10778 IEM_MC_FPU_STACK_INC_TOP();
10779 IEM_MC_UPDATE_FPU_OPCODE_IP();
10780
10781 IEM_MC_ADVANCE_RIP_AND_FINISH();
10782 IEM_MC_END();
10783}
10784
10785
10786/** Opcode 0xdf 0xe0. */
10787FNIEMOP_DEF(iemOp_fnstsw_ax)
10788{
10789 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
10790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10791
10792 IEM_MC_BEGIN(0, 1);
10793 IEM_MC_LOCAL(uint16_t, u16Tmp);
10794 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10795 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
10796 IEM_MC_FETCH_FSW(u16Tmp);
10797 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
10798 IEM_MC_ADVANCE_RIP_AND_FINISH();
10799 IEM_MC_END();
10800}
10801
10802
10803/** Opcode 0xdf 11/5. */
10804FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
10805{
10806 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
10807 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
10808 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80,
10809 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
10810}
10811
10812
10813/** Opcode 0xdf 11/6. */
10814FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
10815{
10816 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
10817 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS,
10818 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80,
10819 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
10820}
10821
10822
10823/** Opcode 0xdf !11/0. */
10824FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
10825{
10826 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
10827
10828 IEM_MC_BEGIN(2, 3);
10829 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10830 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10831 IEM_MC_LOCAL(int16_t, i16Val);
10832 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10833 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
10834
10835 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10837
10838 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10839 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10840 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10841
10842 IEM_MC_PREPARE_FPU_USAGE();
10843 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10844 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i16, pFpuRes, pi16Val);
10845 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10846 } IEM_MC_ELSE() {
10847 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10848 } IEM_MC_ENDIF();
10849 IEM_MC_ADVANCE_RIP_AND_FINISH();
10850
10851 IEM_MC_END();
10852}
10853
10854
10855/** Opcode 0xdf !11/1. */
10856FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
10857{
10858 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
10859 IEM_MC_BEGIN(3, 2);
10860 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10861 IEM_MC_LOCAL(uint16_t, u16Fsw);
10862 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10863 IEM_MC_ARG(int16_t *, pi16Dst, 1);
10864 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10865
10866 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10868 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10869 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10870
10871 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10872 IEM_MC_PREPARE_FPU_USAGE();
10873 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10874 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
10875 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
10876 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10877 } IEM_MC_ELSE() {
10878 IEM_MC_IF_FCW_IM() {
10879 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
10880 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
10881 } IEM_MC_ENDIF();
10882 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10883 } IEM_MC_ENDIF();
10884 IEM_MC_ADVANCE_RIP_AND_FINISH();
10885
10886 IEM_MC_END();
10887}
10888
10889
10890/** Opcode 0xdf !11/2. */
10891FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
10892{
10893 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
10894 IEM_MC_BEGIN(3, 2);
10895 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10896 IEM_MC_LOCAL(uint16_t, u16Fsw);
10897 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10898 IEM_MC_ARG(int16_t *, pi16Dst, 1);
10899 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10900
10901 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10902 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10903 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10904 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10905
10906 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10907 IEM_MC_PREPARE_FPU_USAGE();
10908 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10909 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
10910 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
10911 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10912 } IEM_MC_ELSE() {
10913 IEM_MC_IF_FCW_IM() {
10914 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
10915 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
10916 } IEM_MC_ENDIF();
10917 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10918 } IEM_MC_ENDIF();
10919 IEM_MC_ADVANCE_RIP_AND_FINISH();
10920
10921 IEM_MC_END();
10922}
10923
10924
10925/** Opcode 0xdf !11/3. */
10926FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
10927{
10928 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
10929 IEM_MC_BEGIN(3, 2);
10930 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10931 IEM_MC_LOCAL(uint16_t, u16Fsw);
10932 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10933 IEM_MC_ARG(int16_t *, pi16Dst, 1);
10934 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10935
10936 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10938 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10939 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10940
10941 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10942 IEM_MC_PREPARE_FPU_USAGE();
10943 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10944 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
10945 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
10946 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10947 } IEM_MC_ELSE() {
10948 IEM_MC_IF_FCW_IM() {
10949 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
10950 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
10951 } IEM_MC_ENDIF();
10952 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10953 } IEM_MC_ENDIF();
10954 IEM_MC_ADVANCE_RIP_AND_FINISH();
10955
10956 IEM_MC_END();
10957}
10958
10959
10960/** Opcode 0xdf !11/4. */
10961FNIEMOP_DEF_1(iemOp_fbld_m80d, uint8_t, bRm)
10962{
10963 IEMOP_MNEMONIC(fbld_m80d, "fbld m80d");
10964
10965 IEM_MC_BEGIN(2, 3);
10966 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10967 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10968 IEM_MC_LOCAL(RTPBCD80U, d80Val);
10969 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10970 IEM_MC_ARG_LOCAL_REF(PCRTPBCD80U, pd80Val, d80Val, 1);
10971
10972 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10974
10975 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10976 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10977 IEM_MC_FETCH_MEM_D80(d80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10978
10979 IEM_MC_PREPARE_FPU_USAGE();
10980 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10981 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_d80, pFpuRes, pd80Val);
10982 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10983 } IEM_MC_ELSE() {
10984 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10985 } IEM_MC_ENDIF();
10986 IEM_MC_ADVANCE_RIP_AND_FINISH();
10987
10988 IEM_MC_END();
10989}
10990
10991
10992/** Opcode 0xdf !11/5. */
10993FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
10994{
10995 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
10996
10997 IEM_MC_BEGIN(2, 3);
10998 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10999 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11000 IEM_MC_LOCAL(int64_t, i64Val);
11001 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11002 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
11003
11004 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11006
11007 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11008 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11009 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11010
11011 IEM_MC_PREPARE_FPU_USAGE();
11012 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11013 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i64, pFpuRes, pi64Val);
11014 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11015 } IEM_MC_ELSE() {
11016 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11017 } IEM_MC_ENDIF();
11018 IEM_MC_ADVANCE_RIP_AND_FINISH();
11019
11020 IEM_MC_END();
11021}
11022
11023
11024/** Opcode 0xdf !11/6. */
11025FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
11026{
11027 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
11028 IEM_MC_BEGIN(3, 2);
11029 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11030 IEM_MC_LOCAL(uint16_t, u16Fsw);
11031 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11032 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
11033 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11034
11035 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11036 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11037 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11038 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11039
11040 IEM_MC_MEM_MAP_EX(pd80Dst, IEM_ACCESS_DATA_W, sizeof(*pd80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
11041 IEM_MC_PREPARE_FPU_USAGE();
11042 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11043 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
11044 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pd80Dst, IEM_ACCESS_DATA_W, u16Fsw);
11045 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11046 } IEM_MC_ELSE() {
11047 IEM_MC_IF_FCW_IM() {
11048 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
11049 IEM_MC_MEM_COMMIT_AND_UNMAP(pd80Dst, IEM_ACCESS_DATA_W);
11050 } IEM_MC_ENDIF();
11051 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11052 } IEM_MC_ENDIF();
11053 IEM_MC_ADVANCE_RIP_AND_FINISH();
11054
11055 IEM_MC_END();
11056}
11057
11058
11059/** Opcode 0xdf !11/7. */
11060FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
11061{
11062 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
11063 IEM_MC_BEGIN(3, 2);
11064 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11065 IEM_MC_LOCAL(uint16_t, u16Fsw);
11066 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11067 IEM_MC_ARG(int64_t *, pi64Dst, 1);
11068 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11069
11070 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11072 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11073 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11074
11075 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11076 IEM_MC_PREPARE_FPU_USAGE();
11077 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11078 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
11079 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11080 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11081 } IEM_MC_ELSE() {
11082 IEM_MC_IF_FCW_IM() {
11083 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
11084 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
11085 } IEM_MC_ENDIF();
11086 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11087 } IEM_MC_ENDIF();
11088 IEM_MC_ADVANCE_RIP_AND_FINISH();
11089
11090 IEM_MC_END();
11091}
11092
11093
11094/**
11095 * @opcode 0xdf
11096 */
11097FNIEMOP_DEF(iemOp_EscF7)
11098{
11099 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11100 if (IEM_IS_MODRM_REG_MODE(bRm))
11101 {
11102 switch (IEM_GET_MODRM_REG_8(bRm))
11103 {
11104 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
11105 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
11106 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
11107 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
11108 case 4: if (bRm == 0xe0)
11109 return FNIEMOP_CALL(iemOp_fnstsw_ax);
11110 IEMOP_RAISE_INVALID_OPCODE_RET();
11111 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
11112 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
11113 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11114 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11115 }
11116 }
11117 else
11118 {
11119 switch (IEM_GET_MODRM_REG_8(bRm))
11120 {
11121 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
11122 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
11123 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
11124 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
11125 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
11126 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
11127 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
11128 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
11129 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11130 }
11131 }
11132}
11133
11134
11135/**
11136 * @opcode 0xe0
11137 */
11138FNIEMOP_DEF(iemOp_loopne_Jb)
11139{
11140 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
11141 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11143 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11144
11145 switch (pVCpu->iem.s.enmEffAddrMode)
11146 {
11147 case IEMMODE_16BIT:
11148 IEM_MC_BEGIN(0,0);
11149 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11150 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11151 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11152 } IEM_MC_ELSE() {
11153 IEM_MC_ADVANCE_RIP_AND_FINISH();
11154 } IEM_MC_ENDIF();
11155 IEM_MC_END();
11156 break;
11157
11158 case IEMMODE_32BIT:
11159 IEM_MC_BEGIN(0,0);
11160 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11161 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11162 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11163 } IEM_MC_ELSE() {
11164 IEM_MC_ADVANCE_RIP_AND_FINISH();
11165 } IEM_MC_ENDIF();
11166 IEM_MC_END();
11167 break;
11168
11169 case IEMMODE_64BIT:
11170 IEM_MC_BEGIN(0,0);
11171 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
11172 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11173 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11174 } IEM_MC_ELSE() {
11175 IEM_MC_ADVANCE_RIP_AND_FINISH();
11176 } IEM_MC_ENDIF();
11177 IEM_MC_END();
11178 break;
11179
11180 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11181 }
11182}
11183
11184
11185/**
11186 * @opcode 0xe1
11187 */
11188FNIEMOP_DEF(iemOp_loope_Jb)
11189{
11190 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
11191 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11193 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11194
11195 switch (pVCpu->iem.s.enmEffAddrMode)
11196 {
11197 case IEMMODE_16BIT:
11198 IEM_MC_BEGIN(0,0);
11199 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11200 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11201 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11202 } IEM_MC_ELSE() {
11203 IEM_MC_ADVANCE_RIP_AND_FINISH();
11204 } IEM_MC_ENDIF();
11205 IEM_MC_END();
11206 break;
11207
11208 case IEMMODE_32BIT:
11209 IEM_MC_BEGIN(0,0);
11210 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11211 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11212 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11213 } IEM_MC_ELSE() {
11214 IEM_MC_ADVANCE_RIP_AND_FINISH();
11215 } IEM_MC_ENDIF();
11216 IEM_MC_END();
11217 break;
11218
11219 case IEMMODE_64BIT:
11220 IEM_MC_BEGIN(0,0);
11221 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
11222 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11223 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11224 } IEM_MC_ELSE() {
11225 IEM_MC_ADVANCE_RIP_AND_FINISH();
11226 } IEM_MC_ENDIF();
11227 IEM_MC_END();
11228 break;
11229
11230 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11231 }
11232}
11233
11234
11235/**
11236 * @opcode 0xe2
11237 */
11238FNIEMOP_DEF(iemOp_loop_Jb)
11239{
11240 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
11241 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11243 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11244
11245 /** @todo Check out the \#GP case if EIP < CS.Base or EIP > CS.Limit when
11246 * using the 32-bit operand size override. How can that be restarted? See
11247 * weird pseudo code in intel manual. */
11248
11249 /* NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
11250 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
11251 * the loop causes guest crashes, but when logging it's nice to skip a few million
11252 * lines of useless output. */
11253#if defined(LOG_ENABLED)
11254 if ((LogIs3Enabled() || LogIs4Enabled()) && -(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm)
11255 switch (pVCpu->iem.s.enmEffAddrMode)
11256 {
11257 case IEMMODE_16BIT:
11258 IEM_MC_BEGIN(0,0);
11259 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
11260 IEM_MC_ADVANCE_RIP_AND_FINISH();
11261 IEM_MC_END();
11262 break;
11263
11264 case IEMMODE_32BIT:
11265 IEM_MC_BEGIN(0,0);
11266 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
11267 IEM_MC_ADVANCE_RIP_AND_FINISH();
11268 IEM_MC_END();
11269 break;
11270
11271 case IEMMODE_64BIT:
11272 IEM_MC_BEGIN(0,0);
11273 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
11274 IEM_MC_ADVANCE_RIP_AND_FINISH();
11275 IEM_MC_END();
11276 break;
11277
11278 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11279 }
11280#endif
11281
11282 switch (pVCpu->iem.s.enmEffAddrMode)
11283 {
11284 case IEMMODE_16BIT:
11285 IEM_MC_BEGIN(0,0);
11286
11287 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11288 IEM_MC_IF_CX_IS_NZ() {
11289 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11290 } IEM_MC_ELSE() {
11291 IEM_MC_ADVANCE_RIP_AND_FINISH();
11292 } IEM_MC_ENDIF();
11293 IEM_MC_END();
11294 break;
11295
11296 case IEMMODE_32BIT:
11297 IEM_MC_BEGIN(0,0);
11298 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11299 IEM_MC_IF_ECX_IS_NZ() {
11300 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11301 } IEM_MC_ELSE() {
11302 IEM_MC_ADVANCE_RIP_AND_FINISH();
11303 } IEM_MC_ENDIF();
11304 IEM_MC_END();
11305 break;
11306
11307 case IEMMODE_64BIT:
11308 IEM_MC_BEGIN(0,0);
11309 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
11310 IEM_MC_IF_RCX_IS_NZ() {
11311 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11312 } IEM_MC_ELSE() {
11313 IEM_MC_ADVANCE_RIP_AND_FINISH();
11314 } IEM_MC_ENDIF();
11315 IEM_MC_END();
11316 break;
11317
11318 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11319 }
11320}
11321
11322
11323/**
11324 * @opcode 0xe3
11325 */
11326FNIEMOP_DEF(iemOp_jecxz_Jb)
11327{
11328 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
11329 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11331 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11332
11333 switch (pVCpu->iem.s.enmEffAddrMode)
11334 {
11335 case IEMMODE_16BIT:
11336 IEM_MC_BEGIN(0,0);
11337 IEM_MC_IF_CX_IS_NZ() {
11338 IEM_MC_ADVANCE_RIP_AND_FINISH();
11339 } IEM_MC_ELSE() {
11340 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11341 } IEM_MC_ENDIF();
11342 IEM_MC_END();
11343 break;
11344
11345 case IEMMODE_32BIT:
11346 IEM_MC_BEGIN(0,0);
11347 IEM_MC_IF_ECX_IS_NZ() {
11348 IEM_MC_ADVANCE_RIP_AND_FINISH();
11349 } IEM_MC_ELSE() {
11350 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11351 } IEM_MC_ENDIF();
11352 IEM_MC_END();
11353 break;
11354
11355 case IEMMODE_64BIT:
11356 IEM_MC_BEGIN(0,0);
11357 IEM_MC_IF_RCX_IS_NZ() {
11358 IEM_MC_ADVANCE_RIP_AND_FINISH();
11359 } IEM_MC_ELSE() {
11360 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11361 } IEM_MC_ENDIF();
11362 IEM_MC_END();
11363 break;
11364
11365 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11366 }
11367}
11368
11369
11370/** Opcode 0xe4 */
11371FNIEMOP_DEF(iemOp_in_AL_Ib)
11372{
11373 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
11374 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11376 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_in, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
11377}
11378
11379
11380/** Opcode 0xe5 */
11381FNIEMOP_DEF(iemOp_in_eAX_Ib)
11382{
11383 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
11384 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11385 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11386 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
11387 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
11388}
11389
11390
11391/** Opcode 0xe6 */
11392FNIEMOP_DEF(iemOp_out_Ib_AL)
11393{
11394 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
11395 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11397 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_out, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
11398}
11399
11400
11401/** Opcode 0xe7 */
11402FNIEMOP_DEF(iemOp_out_Ib_eAX)
11403{
11404 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
11405 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11407 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
11408 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
11409}
11410
11411
11412/**
11413 * @opcode 0xe8
11414 */
11415FNIEMOP_DEF(iemOp_call_Jv)
11416{
11417 IEMOP_MNEMONIC(call_Jv, "call Jv");
11418 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
11419 switch (pVCpu->iem.s.enmEffOpSize)
11420 {
11421 case IEMMODE_16BIT:
11422 {
11423 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
11424 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH, iemCImpl_call_rel_16, (int16_t)u16Imm);
11425 }
11426
11427 case IEMMODE_32BIT:
11428 {
11429 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
11430 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH, iemCImpl_call_rel_32, (int32_t)u32Imm);
11431 }
11432
11433 case IEMMODE_64BIT:
11434 {
11435 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
11436 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH, iemCImpl_call_rel_64, u64Imm);
11437 }
11438
11439 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11440 }
11441}
11442
11443
11444/**
11445 * @opcode 0xe9
11446 */
11447FNIEMOP_DEF(iemOp_jmp_Jv)
11448{
11449 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
11450 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
11451 switch (pVCpu->iem.s.enmEffOpSize)
11452 {
11453 case IEMMODE_16BIT:
11454 {
11455 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
11456 IEM_MC_BEGIN(0, 0);
11457 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
11458 IEM_MC_END();
11459 break;
11460 }
11461
11462 case IEMMODE_64BIT:
11463 case IEMMODE_32BIT:
11464 {
11465 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
11466 IEM_MC_BEGIN(0, 0);
11467 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
11468 IEM_MC_END();
11469 break;
11470 }
11471
11472 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11473 }
11474}
11475
11476
11477/**
11478 * @opcode 0xea
11479 */
11480FNIEMOP_DEF(iemOp_jmp_Ap)
11481{
11482 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
11483 IEMOP_HLP_NO_64BIT();
11484
11485 /* Decode the far pointer address and pass it on to the far call C implementation. */
11486 uint32_t off32Seg;
11487 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
11488 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
11489 else
11490 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
11491 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
11492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11493 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,
11494 iemCImpl_FarJmp, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
11495}
11496
11497
11498/**
11499 * @opcode 0xeb
11500 */
11501FNIEMOP_DEF(iemOp_jmp_Jb)
11502{
11503 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
11504 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11506 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
11507
11508 IEM_MC_BEGIN(0, 0);
11509 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11510 IEM_MC_END();
11511}
11512
11513
11514/** Opcode 0xec */
11515FNIEMOP_DEF(iemOp_in_AL_DX)
11516{
11517 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
11518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11519 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_in_eAX_DX, 1, pVCpu->iem.s.enmEffAddrMode);
11520}
11521
11522
11523/** Opcode 0xed */
11524FNIEMOP_DEF(iemOp_in_eAX_DX)
11525{
11526 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
11527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11528 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT,
11529 iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
11530 pVCpu->iem.s.enmEffAddrMode);
11531}
11532
11533
11534/** Opcode 0xee */
11535FNIEMOP_DEF(iemOp_out_DX_AL)
11536{
11537 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
11538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11539 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT, iemCImpl_out_DX_eAX, 1, pVCpu->iem.s.enmEffAddrMode);
11540}
11541
11542
11543/** Opcode 0xef */
11544FNIEMOP_DEF(iemOp_out_DX_eAX)
11545{
11546 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
11547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11548 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT,
11549 iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
11550 pVCpu->iem.s.enmEffAddrMode);
11551}
11552
11553
11554/**
11555 * @opcode 0xf0
11556 */
11557FNIEMOP_DEF(iemOp_lock)
11558{
11559 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
11560 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
11561 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
11562
11563 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
11564 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
11565}
11566
11567
11568/**
11569 * @opcode 0xf1
11570 */
11571FNIEMOP_DEF(iemOp_int1)
11572{
11573 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
11574 /** @todo Does not generate \#UD on 286, or so they say... Was allegedly a
11575 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
11576 * LOADALL memo. Needs some testing. */
11577 IEMOP_HLP_MIN_386();
11578 /** @todo testcase! */
11579 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
11580 iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
11581}
11582
11583
11584/**
11585 * @opcode 0xf2
11586 */
11587FNIEMOP_DEF(iemOp_repne)
11588{
11589 /* This overrides any previous REPE prefix. */
11590 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
11591 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
11592 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
11593
11594 /* For the 4 entry opcode tables, REPNZ overrides any previous
11595 REPZ and operand size prefixes. */
11596 pVCpu->iem.s.idxPrefix = 3;
11597
11598 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
11599 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
11600}
11601
11602
11603/**
11604 * @opcode 0xf3
11605 */
11606FNIEMOP_DEF(iemOp_repe)
11607{
11608 /* This overrides any previous REPNE prefix. */
11609 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
11610 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
11611 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
11612
11613 /* For the 4 entry opcode tables, REPNZ overrides any previous
11614 REPNZ and operand size prefixes. */
11615 pVCpu->iem.s.idxPrefix = 2;
11616
11617 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
11618 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
11619}
11620
11621
11622/**
11623 * @opcode 0xf4
11624 */
11625FNIEMOP_DEF(iemOp_hlt)
11626{
11627 IEMOP_MNEMONIC(hlt, "hlt");
11628 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11629 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, iemCImpl_hlt);
11630}
11631
11632
11633/**
11634 * @opcode 0xf5
11635 */
11636FNIEMOP_DEF(iemOp_cmc)
11637{
11638 IEMOP_MNEMONIC(cmc, "cmc");
11639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11640 IEM_MC_BEGIN(0, 0);
11641 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
11642 IEM_MC_ADVANCE_RIP_AND_FINISH();
11643 IEM_MC_END();
11644}
11645
11646
11647/**
11648 * Body for of 'inc/dec/not/neg Eb'.
11649 */
11650#define IEMOP_BODY_UNARY_Eb(a_bRm, a_fnNormalU8, a_fnLockedU8) \
11651 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
11652 { \
11653 /* register access */ \
11654 IEMOP_HLP_DONE_DECODING(); \
11655 IEM_MC_BEGIN(2, 0); \
11656 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
11657 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
11658 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
11659 IEM_MC_REF_EFLAGS(pEFlags); \
11660 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
11661 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11662 IEM_MC_END(); \
11663 } \
11664 else \
11665 { \
11666 /* memory access. */ \
11667 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
11668 { \
11669 IEM_MC_BEGIN(2, 2); \
11670 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
11671 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11672 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11673 \
11674 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
11675 IEMOP_HLP_DONE_DECODING(); \
11676 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11677 IEM_MC_FETCH_EFLAGS(EFlags); \
11678 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
11679 \
11680 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW); \
11681 IEM_MC_COMMIT_EFLAGS(EFlags); \
11682 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11683 IEM_MC_END(); \
11684 } \
11685 else \
11686 { \
11687 IEM_MC_BEGIN(2, 2); \
11688 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
11689 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11690 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11691 \
11692 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
11693 IEMOP_HLP_DONE_DECODING(); \
11694 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11695 IEM_MC_FETCH_EFLAGS(EFlags); \
11696 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU8, pu8Dst, pEFlags); \
11697 \
11698 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW); \
11699 IEM_MC_COMMIT_EFLAGS(EFlags); \
11700 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11701 IEM_MC_END(); \
11702 } \
11703 } \
11704 (void)0
11705
11706
11707/**
11708 * Body for 'inc/dec/not/neg Ev' (groups 3 and 5).
11709 */
11710#define IEMOP_BODY_UNARY_Ev(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
11711 if (IEM_IS_MODRM_REG_MODE(bRm)) \
11712 { \
11713 /* \
11714 * Register target \
11715 */ \
11716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
11717 switch (pVCpu->iem.s.enmEffOpSize) \
11718 { \
11719 case IEMMODE_16BIT: \
11720 IEM_MC_BEGIN(2, 0); \
11721 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
11722 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
11723 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11724 IEM_MC_REF_EFLAGS(pEFlags); \
11725 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
11726 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11727 IEM_MC_END(); \
11728 break; \
11729 \
11730 case IEMMODE_32BIT: \
11731 IEM_MC_BEGIN(2, 0); \
11732 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
11733 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
11734 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11735 IEM_MC_REF_EFLAGS(pEFlags); \
11736 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
11737 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
11738 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11739 IEM_MC_END(); \
11740 break; \
11741 \
11742 case IEMMODE_64BIT: \
11743 IEM_MC_BEGIN(2, 0); \
11744 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
11745 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
11746 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
11747 IEM_MC_REF_EFLAGS(pEFlags); \
11748 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
11749 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11750 IEM_MC_END(); \
11751 break; \
11752 \
11753 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11754 } \
11755 } \
11756 else \
11757 { \
11758 /* \
11759 * Memory target. \
11760 */ \
11761 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
11762 { \
11763 switch (pVCpu->iem.s.enmEffOpSize) \
11764 { \
11765 case IEMMODE_16BIT: \
11766 IEM_MC_BEGIN(2, 2); \
11767 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
11768 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11769 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11770 \
11771 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11772 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11773 IEM_MC_FETCH_EFLAGS(EFlags); \
11774 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
11775 \
11776 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
11777 IEM_MC_COMMIT_EFLAGS(EFlags); \
11778 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11779 IEM_MC_END(); \
11780 break; \
11781 \
11782 case IEMMODE_32BIT: \
11783 IEM_MC_BEGIN(2, 2); \
11784 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
11785 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11786 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11787 \
11788 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11789 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11790 IEM_MC_FETCH_EFLAGS(EFlags); \
11791 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
11792 \
11793 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); \
11794 IEM_MC_COMMIT_EFLAGS(EFlags); \
11795 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11796 IEM_MC_END(); \
11797 break; \
11798 \
11799 case IEMMODE_64BIT: \
11800 IEM_MC_BEGIN(2, 2); \
11801 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
11802 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11803 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11804 \
11805 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11806 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11807 IEM_MC_FETCH_EFLAGS(EFlags); \
11808 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
11809 \
11810 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
11811 IEM_MC_COMMIT_EFLAGS(EFlags); \
11812 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11813 IEM_MC_END(); \
11814 break; \
11815 \
11816 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11817 } \
11818 } \
11819 else \
11820 { \
11821 (void)0
11822
11823#define IEMOP_BODY_UNARY_Ev_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
11824 switch (pVCpu->iem.s.enmEffOpSize) \
11825 { \
11826 case IEMMODE_16BIT: \
11827 IEM_MC_BEGIN(2, 2); \
11828 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
11829 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11830 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11831 \
11832 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11833 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11834 IEM_MC_FETCH_EFLAGS(EFlags); \
11835 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU16, pu16Dst, pEFlags); \
11836 \
11837 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
11838 IEM_MC_COMMIT_EFLAGS(EFlags); \
11839 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11840 IEM_MC_END(); \
11841 break; \
11842 \
11843 case IEMMODE_32BIT: \
11844 IEM_MC_BEGIN(2, 2); \
11845 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
11846 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11847 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11848 \
11849 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11850 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11851 IEM_MC_FETCH_EFLAGS(EFlags); \
11852 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU32, pu32Dst, pEFlags); \
11853 \
11854 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); \
11855 IEM_MC_COMMIT_EFLAGS(EFlags); \
11856 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11857 IEM_MC_END(); \
11858 break; \
11859 \
11860 case IEMMODE_64BIT: \
11861 IEM_MC_BEGIN(2, 2); \
11862 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
11863 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11864 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11865 \
11866 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
11867 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11868 IEM_MC_FETCH_EFLAGS(EFlags); \
11869 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU64, pu64Dst, pEFlags); \
11870 \
11871 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
11872 IEM_MC_COMMIT_EFLAGS(EFlags); \
11873 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11874 IEM_MC_END(); \
11875 break; \
11876 \
11877 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
11878 } \
11879 } \
11880 } \
11881 (void)0
11882
11883
11884/**
11885 * @opmaps grp3_f6
11886 * @opcode /0
11887 * @todo also /1
11888 */
11889FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
11890{
11891 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
11892 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11893
11894 if (IEM_IS_MODRM_REG_MODE(bRm))
11895 {
11896 /* register access */
11897 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11899
11900 IEM_MC_BEGIN(3, 0);
11901 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11902 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
11903 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11904 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11905 IEM_MC_REF_EFLAGS(pEFlags);
11906 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
11907 IEM_MC_ADVANCE_RIP_AND_FINISH();
11908 IEM_MC_END();
11909 }
11910 else
11911 {
11912 /* memory access. */
11913 IEM_MC_BEGIN(3, 2);
11914 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11915 IEM_MC_ARG(uint8_t, u8Src, 1);
11916 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
11917 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11918
11919 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
11920 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11921 IEM_MC_ASSIGN(u8Src, u8Imm);
11922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11923 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11924 IEM_MC_FETCH_EFLAGS(EFlags);
11925 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
11926
11927 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
11928 IEM_MC_COMMIT_EFLAGS(EFlags);
11929 IEM_MC_ADVANCE_RIP_AND_FINISH();
11930 IEM_MC_END();
11931 }
11932}
11933
11934
11935/** Opcode 0xf6 /4, /5, /6 and /7. */
11936FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
11937{
11938 if (IEM_IS_MODRM_REG_MODE(bRm))
11939 {
11940 /* register access */
11941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11942 IEM_MC_BEGIN(3, 1);
11943 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11944 IEM_MC_ARG(uint8_t, u8Value, 1);
11945 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11946 IEM_MC_LOCAL(int32_t, rc);
11947
11948 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11949 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11950 IEM_MC_REF_EFLAGS(pEFlags);
11951 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
11952 IEM_MC_IF_LOCAL_IS_Z(rc) {
11953 IEM_MC_ADVANCE_RIP_AND_FINISH();
11954 } IEM_MC_ELSE() {
11955 IEM_MC_RAISE_DIVIDE_ERROR();
11956 } IEM_MC_ENDIF();
11957
11958 IEM_MC_END();
11959 }
11960 else
11961 {
11962 /* memory access. */
11963 IEM_MC_BEGIN(3, 2);
11964 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11965 IEM_MC_ARG(uint8_t, u8Value, 1);
11966 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11967 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11968 IEM_MC_LOCAL(int32_t, rc);
11969
11970 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11972 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11973 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11974 IEM_MC_REF_EFLAGS(pEFlags);
11975 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
11976 IEM_MC_IF_LOCAL_IS_Z(rc) {
11977 IEM_MC_ADVANCE_RIP_AND_FINISH();
11978 } IEM_MC_ELSE() {
11979 IEM_MC_RAISE_DIVIDE_ERROR();
11980 } IEM_MC_ENDIF();
11981
11982 IEM_MC_END();
11983 }
11984}
11985
11986
11987/** Opcode 0xf7 /4, /5, /6 and /7. */
11988FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
11989{
11990 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11991
11992 if (IEM_IS_MODRM_REG_MODE(bRm))
11993 {
11994 /* register access */
11995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11996 switch (pVCpu->iem.s.enmEffOpSize)
11997 {
11998 case IEMMODE_16BIT:
11999 {
12000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12001 IEM_MC_BEGIN(4, 1);
12002 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12003 IEM_MC_ARG(uint16_t *, pu16DX, 1);
12004 IEM_MC_ARG(uint16_t, u16Value, 2);
12005 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12006 IEM_MC_LOCAL(int32_t, rc);
12007
12008 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12009 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12010 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
12011 IEM_MC_REF_EFLAGS(pEFlags);
12012 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
12013 IEM_MC_IF_LOCAL_IS_Z(rc) {
12014 IEM_MC_ADVANCE_RIP_AND_FINISH();
12015 } IEM_MC_ELSE() {
12016 IEM_MC_RAISE_DIVIDE_ERROR();
12017 } IEM_MC_ENDIF();
12018
12019 IEM_MC_END();
12020 break;
12021 }
12022
12023 case IEMMODE_32BIT:
12024 {
12025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12026 IEM_MC_BEGIN(4, 1);
12027 IEM_MC_ARG(uint32_t *, pu32AX, 0);
12028 IEM_MC_ARG(uint32_t *, pu32DX, 1);
12029 IEM_MC_ARG(uint32_t, u32Value, 2);
12030 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12031 IEM_MC_LOCAL(int32_t, rc);
12032
12033 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12034 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
12035 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
12036 IEM_MC_REF_EFLAGS(pEFlags);
12037 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
12038 IEM_MC_IF_LOCAL_IS_Z(rc) {
12039 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
12040 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
12041 IEM_MC_ADVANCE_RIP_AND_FINISH();
12042 } IEM_MC_ELSE() {
12043 IEM_MC_RAISE_DIVIDE_ERROR();
12044 } IEM_MC_ENDIF();
12045
12046 IEM_MC_END();
12047 break;
12048 }
12049
12050 case IEMMODE_64BIT:
12051 {
12052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12053 IEM_MC_BEGIN(4, 1);
12054 IEM_MC_ARG(uint64_t *, pu64AX, 0);
12055 IEM_MC_ARG(uint64_t *, pu64DX, 1);
12056 IEM_MC_ARG(uint64_t, u64Value, 2);
12057 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12058 IEM_MC_LOCAL(int32_t, rc);
12059
12060 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12061 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
12062 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
12063 IEM_MC_REF_EFLAGS(pEFlags);
12064 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
12065 IEM_MC_IF_LOCAL_IS_Z(rc) {
12066 IEM_MC_ADVANCE_RIP_AND_FINISH();
12067 } IEM_MC_ELSE() {
12068 IEM_MC_RAISE_DIVIDE_ERROR();
12069 } IEM_MC_ENDIF();
12070
12071 IEM_MC_END();
12072 break;
12073 }
12074
12075 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12076 }
12077 }
12078 else
12079 {
12080 /* memory access. */
12081 switch (pVCpu->iem.s.enmEffOpSize)
12082 {
12083 case IEMMODE_16BIT:
12084 {
12085 IEM_MC_BEGIN(4, 2);
12086 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12087 IEM_MC_ARG(uint16_t *, pu16DX, 1);
12088 IEM_MC_ARG(uint16_t, u16Value, 2);
12089 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12090 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12091 IEM_MC_LOCAL(int32_t, rc);
12092
12093 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12095 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12096 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12097 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
12098 IEM_MC_REF_EFLAGS(pEFlags);
12099 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
12100 IEM_MC_IF_LOCAL_IS_Z(rc) {
12101 IEM_MC_ADVANCE_RIP_AND_FINISH();
12102 } IEM_MC_ELSE() {
12103 IEM_MC_RAISE_DIVIDE_ERROR();
12104 } IEM_MC_ENDIF();
12105
12106 IEM_MC_END();
12107 break;
12108 }
12109
12110 case IEMMODE_32BIT:
12111 {
12112 IEM_MC_BEGIN(4, 2);
12113 IEM_MC_ARG(uint32_t *, pu32AX, 0);
12114 IEM_MC_ARG(uint32_t *, pu32DX, 1);
12115 IEM_MC_ARG(uint32_t, u32Value, 2);
12116 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12117 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12118 IEM_MC_LOCAL(int32_t, rc);
12119
12120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12122 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12123 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
12124 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
12125 IEM_MC_REF_EFLAGS(pEFlags);
12126 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
12127 IEM_MC_IF_LOCAL_IS_Z(rc) {
12128 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
12129 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
12130 IEM_MC_ADVANCE_RIP_AND_FINISH();
12131 } IEM_MC_ELSE() {
12132 IEM_MC_RAISE_DIVIDE_ERROR();
12133 } IEM_MC_ENDIF();
12134
12135 IEM_MC_END();
12136 break;
12137 }
12138
12139 case IEMMODE_64BIT:
12140 {
12141 IEM_MC_BEGIN(4, 2);
12142 IEM_MC_ARG(uint64_t *, pu64AX, 0);
12143 IEM_MC_ARG(uint64_t *, pu64DX, 1);
12144 IEM_MC_ARG(uint64_t, u64Value, 2);
12145 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12146 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12147 IEM_MC_LOCAL(int32_t, rc);
12148
12149 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12150 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12151 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12152 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
12153 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
12154 IEM_MC_REF_EFLAGS(pEFlags);
12155 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
12156 IEM_MC_IF_LOCAL_IS_Z(rc) {
12157 IEM_MC_ADVANCE_RIP_AND_FINISH();
12158 } IEM_MC_ELSE() {
12159 IEM_MC_RAISE_DIVIDE_ERROR();
12160 } IEM_MC_ENDIF();
12161
12162 IEM_MC_END();
12163 break;
12164 }
12165
12166 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12167 }
12168 }
12169}
12170
12171
12172/**
12173 * @opmaps grp3_f6
12174 * @opcode /2
12175 */
12176FNIEMOP_DEF_1(iemOp_grp3_not_Eb, uint8_t, bRm)
12177{
12178 IEMOP_MNEMONIC(not_Eb, "not Eb");
12179 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_not_u8, iemAImpl_not_u8_locked);
12180}
12181
12182
12183/**
12184 * @opmaps grp3_f6
12185 * @opcode /3
12186 */
12187FNIEMOP_DEF_1(iemOp_grp3_neg_Eb, uint8_t, bRm)
12188{
12189 IEMOP_MNEMONIC(net_Eb, "neg Eb");
12190 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_neg_u8, iemAImpl_neg_u8_locked);
12191}
12192
12193
12194/**
12195 * @opcode 0xf6
12196 */
12197FNIEMOP_DEF(iemOp_Grp3_Eb)
12198{
12199 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12200 switch (IEM_GET_MODRM_REG_8(bRm))
12201 {
12202 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
12203 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
12204 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Eb, bRm);
12205 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Eb, bRm);
12206 case 4:
12207 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
12208 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12209 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
12210 case 5:
12211 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
12212 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12213 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
12214 case 6:
12215 IEMOP_MNEMONIC(div_Eb, "div Eb");
12216 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12217 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
12218 case 7:
12219 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
12220 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12221 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
12222 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12223 }
12224}
12225
12226
12227/** Opcode 0xf7 /0. */
12228FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
12229{
12230 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
12231 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
12232
12233 if (IEM_IS_MODRM_REG_MODE(bRm))
12234 {
12235 /* register access */
12236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12237 switch (pVCpu->iem.s.enmEffOpSize)
12238 {
12239 case IEMMODE_16BIT:
12240 {
12241 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12242 IEM_MC_BEGIN(3, 0);
12243 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12244 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
12245 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12246 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12247 IEM_MC_REF_EFLAGS(pEFlags);
12248 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
12249 IEM_MC_ADVANCE_RIP_AND_FINISH();
12250 IEM_MC_END();
12251 break;
12252 }
12253
12254 case IEMMODE_32BIT:
12255 {
12256 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12257 IEM_MC_BEGIN(3, 0);
12258 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12259 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
12260 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12261 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12262 IEM_MC_REF_EFLAGS(pEFlags);
12263 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
12264 /* No clearing the high dword here - test doesn't write back the result. */
12265 IEM_MC_ADVANCE_RIP_AND_FINISH();
12266 IEM_MC_END();
12267 break;
12268 }
12269
12270 case IEMMODE_64BIT:
12271 {
12272 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12273 IEM_MC_BEGIN(3, 0);
12274 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12275 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
12276 IEM_MC_ARG(uint32_t *, pEFlags, 2);
12277 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
12278 IEM_MC_REF_EFLAGS(pEFlags);
12279 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
12280 IEM_MC_ADVANCE_RIP_AND_FINISH();
12281 IEM_MC_END();
12282 break;
12283 }
12284
12285 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12286 }
12287 }
12288 else
12289 {
12290 /* memory access. */
12291 switch (pVCpu->iem.s.enmEffOpSize)
12292 {
12293 case IEMMODE_16BIT:
12294 {
12295 IEM_MC_BEGIN(3, 2);
12296 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
12297 IEM_MC_ARG(uint16_t, u16Src, 1);
12298 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
12299 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12300
12301 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
12302 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12303 IEM_MC_ASSIGN(u16Src, u16Imm);
12304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12305 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12306 IEM_MC_FETCH_EFLAGS(EFlags);
12307 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
12308
12309 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
12310 IEM_MC_COMMIT_EFLAGS(EFlags);
12311 IEM_MC_ADVANCE_RIP_AND_FINISH();
12312 IEM_MC_END();
12313 break;
12314 }
12315
12316 case IEMMODE_32BIT:
12317 {
12318 IEM_MC_BEGIN(3, 2);
12319 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
12320 IEM_MC_ARG(uint32_t, u32Src, 1);
12321 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
12322 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12323
12324 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12325 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12326 IEM_MC_ASSIGN(u32Src, u32Imm);
12327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12328 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12329 IEM_MC_FETCH_EFLAGS(EFlags);
12330 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
12331
12332 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
12333 IEM_MC_COMMIT_EFLAGS(EFlags);
12334 IEM_MC_ADVANCE_RIP_AND_FINISH();
12335 IEM_MC_END();
12336 break;
12337 }
12338
12339 case IEMMODE_64BIT:
12340 {
12341 IEM_MC_BEGIN(3, 2);
12342 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
12343 IEM_MC_ARG(uint64_t, u64Src, 1);
12344 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
12345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12346
12347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
12348 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12349 IEM_MC_ASSIGN(u64Src, u64Imm);
12350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12351 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
12352 IEM_MC_FETCH_EFLAGS(EFlags);
12353 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
12354
12355 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
12356 IEM_MC_COMMIT_EFLAGS(EFlags);
12357 IEM_MC_ADVANCE_RIP_AND_FINISH();
12358 IEM_MC_END();
12359 break;
12360 }
12361
12362 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12363 }
12364 }
12365}
12366
12367
12368/** Opcode 0xf7 /2. */
12369FNIEMOP_DEF_1(iemOp_grp3_not_Ev, uint8_t, bRm)
12370{
12371 IEMOP_MNEMONIC(not_Ev, "not Ev");
12372 IEMOP_BODY_UNARY_Ev( iemAImpl_not_u16, iemAImpl_not_u32, iemAImpl_not_u64);
12373 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_not_u16_locked, iemAImpl_not_u32_locked, iemAImpl_not_u64_locked);
12374}
12375
12376
12377/** Opcode 0xf7 /3. */
12378FNIEMOP_DEF_1(iemOp_grp3_neg_Ev, uint8_t, bRm)
12379{
12380 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
12381 IEMOP_BODY_UNARY_Ev( iemAImpl_neg_u16, iemAImpl_neg_u32, iemAImpl_neg_u64);
12382 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_neg_u16_locked, iemAImpl_neg_u32_locked, iemAImpl_neg_u64_locked);
12383}
12384
12385
12386/**
12387 * @opcode 0xf7
12388 */
12389FNIEMOP_DEF(iemOp_Grp3_Ev)
12390{
12391 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12392 switch (IEM_GET_MODRM_REG_8(bRm))
12393 {
12394 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
12395 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
12396 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Ev, bRm);
12397 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Ev, bRm);
12398 case 4:
12399 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
12400 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12401 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
12402 case 5:
12403 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
12404 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12405 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
12406 case 6:
12407 IEMOP_MNEMONIC(div_Ev, "div Ev");
12408 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12409 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
12410 case 7:
12411 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
12412 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12413 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
12414 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12415 }
12416}
12417
12418
12419/**
12420 * @opcode 0xf8
12421 */
12422FNIEMOP_DEF(iemOp_clc)
12423{
12424 IEMOP_MNEMONIC(clc, "clc");
12425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12426 IEM_MC_BEGIN(0, 0);
12427 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
12428 IEM_MC_ADVANCE_RIP_AND_FINISH();
12429 IEM_MC_END();
12430}
12431
12432
12433/**
12434 * @opcode 0xf9
12435 */
12436FNIEMOP_DEF(iemOp_stc)
12437{
12438 IEMOP_MNEMONIC(stc, "stc");
12439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12440 IEM_MC_BEGIN(0, 0);
12441 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
12442 IEM_MC_ADVANCE_RIP_AND_FINISH();
12443 IEM_MC_END();
12444}
12445
12446
12447/**
12448 * @opcode 0xfa
12449 */
12450FNIEMOP_DEF(iemOp_cli)
12451{
12452 IEMOP_MNEMONIC(cli, "cli");
12453 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12454 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, iemCImpl_cli);
12455}
12456
12457
12458FNIEMOP_DEF(iemOp_sti)
12459{
12460 IEMOP_MNEMONIC(sti, "sti");
12461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12462 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, iemCImpl_sti);
12463}
12464
12465
12466/**
12467 * @opcode 0xfc
12468 */
12469FNIEMOP_DEF(iemOp_cld)
12470{
12471 IEMOP_MNEMONIC(cld, "cld");
12472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12473 IEM_MC_BEGIN(0, 0);
12474 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
12475 IEM_MC_ADVANCE_RIP_AND_FINISH();
12476 IEM_MC_END();
12477}
12478
12479
12480/**
12481 * @opcode 0xfd
12482 */
12483FNIEMOP_DEF(iemOp_std)
12484{
12485 IEMOP_MNEMONIC(std, "std");
12486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12487 IEM_MC_BEGIN(0, 0);
12488 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
12489 IEM_MC_ADVANCE_RIP_AND_FINISH();
12490 IEM_MC_END();
12491}
12492
12493
12494/**
12495 * @opmaps grp4
12496 * @opcode /0
12497 */
12498FNIEMOP_DEF_1(iemOp_Grp4_inc_Eb, uint8_t, bRm)
12499{
12500 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
12501 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_inc_u8, iemAImpl_inc_u8_locked);
12502}
12503
12504
12505/**
12506 * @opmaps grp4
12507 * @opcode /1
12508 */
12509FNIEMOP_DEF_1(iemOp_Grp4_dec_Eb, uint8_t, bRm)
12510{
12511 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
12512 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_dec_u8, iemAImpl_dec_u8_locked);
12513}
12514
12515
12516/**
12517 * @opcode 0xfe
12518 */
12519FNIEMOP_DEF(iemOp_Grp4)
12520{
12521 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12522 switch (IEM_GET_MODRM_REG_8(bRm))
12523 {
12524 case 0: return FNIEMOP_CALL_1(iemOp_Grp4_inc_Eb, bRm);
12525 case 1: return FNIEMOP_CALL_1(iemOp_Grp4_dec_Eb, bRm);
12526 default:
12527 /** @todo is the eff-addr decoded? */
12528 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
12529 IEMOP_RAISE_INVALID_OPCODE_RET();
12530 }
12531}
12532
12533/** Opcode 0xff /0. */
12534FNIEMOP_DEF_1(iemOp_Grp5_inc_Ev, uint8_t, bRm)
12535{
12536 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
12537 IEMOP_BODY_UNARY_Ev( iemAImpl_inc_u16, iemAImpl_inc_u32, iemAImpl_inc_u64);
12538 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_inc_u16_locked, iemAImpl_inc_u32_locked, iemAImpl_inc_u64_locked);
12539}
12540
12541
12542/** Opcode 0xff /1. */
12543FNIEMOP_DEF_1(iemOp_Grp5_dec_Ev, uint8_t, bRm)
12544{
12545 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
12546 IEMOP_BODY_UNARY_Ev( iemAImpl_dec_u16, iemAImpl_dec_u32, iemAImpl_dec_u64);
12547 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_dec_u16_locked, iemAImpl_dec_u32_locked, iemAImpl_dec_u64_locked);
12548}
12549
12550
12551/**
12552 * Opcode 0xff /2.
12553 * @param bRm The RM byte.
12554 */
12555FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
12556{
12557 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
12558 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12559
12560 if (IEM_IS_MODRM_REG_MODE(bRm))
12561 {
12562 /* The new RIP is taken from a register. */
12563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12564 switch (pVCpu->iem.s.enmEffOpSize)
12565 {
12566 case IEMMODE_16BIT:
12567 IEM_MC_BEGIN(1, 0);
12568 IEM_MC_ARG(uint16_t, u16Target, 0);
12569 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12570 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH, iemCImpl_call_16, u16Target);
12571 IEM_MC_END();
12572 break;
12573
12574 case IEMMODE_32BIT:
12575 IEM_MC_BEGIN(1, 0);
12576 IEM_MC_ARG(uint32_t, u32Target, 0);
12577 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12578 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH, iemCImpl_call_32, u32Target);
12579 IEM_MC_END();
12580 break;
12581
12582 case IEMMODE_64BIT:
12583 IEM_MC_BEGIN(1, 0);
12584 IEM_MC_ARG(uint64_t, u64Target, 0);
12585 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12586 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH, iemCImpl_call_64, u64Target);
12587 IEM_MC_END();
12588 break;
12589
12590 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12591 }
12592 }
12593 else
12594 {
12595 /* The new RIP is taken from a register. */
12596 switch (pVCpu->iem.s.enmEffOpSize)
12597 {
12598 case IEMMODE_16BIT:
12599 IEM_MC_BEGIN(1, 1);
12600 IEM_MC_ARG(uint16_t, u16Target, 0);
12601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12602 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12604 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12605 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH, iemCImpl_call_16, u16Target);
12606 IEM_MC_END();
12607 break;
12608
12609 case IEMMODE_32BIT:
12610 IEM_MC_BEGIN(1, 1);
12611 IEM_MC_ARG(uint32_t, u32Target, 0);
12612 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12613 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12615 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12616 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH, iemCImpl_call_32, u32Target);
12617 IEM_MC_END();
12618 break;
12619
12620 case IEMMODE_64BIT:
12621 IEM_MC_BEGIN(1, 1);
12622 IEM_MC_ARG(uint64_t, u64Target, 0);
12623 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12624 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12626 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12627 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH, iemCImpl_call_64, u64Target);
12628 IEM_MC_END();
12629 break;
12630
12631 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12632 }
12633 }
12634}
12635
12636#define IEMOP_BODY_GRP5_FAR_EP(a_bRm, a_fnCImpl) \
12637 /* Registers? How?? */ \
12638 if (RT_LIKELY(IEM_IS_MODRM_MEM_MODE(a_bRm))) \
12639 { /* likely */ } \
12640 else \
12641 IEMOP_RAISE_INVALID_OPCODE_RET(); /* callf eax is not legal */ \
12642 \
12643 /* 64-bit mode: Default is 32-bit, but only intel respects a REX.W prefix. */ \
12644 /** @todo what does VIA do? */ \
12645 if (!IEM_IS_64BIT_CODE(pVCpu) || pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT || IEM_IS_GUEST_CPU_INTEL(pVCpu)) \
12646 { /* likely */ } \
12647 else \
12648 pVCpu->iem.s.enmEffOpSize = IEMMODE_32BIT; \
12649 \
12650 /* Far pointer loaded from memory. */ \
12651 switch (pVCpu->iem.s.enmEffOpSize) \
12652 { \
12653 case IEMMODE_16BIT: \
12654 IEM_MC_BEGIN(3, 1); \
12655 IEM_MC_ARG(uint16_t, u16Sel, 0); \
12656 IEM_MC_ARG(uint16_t, offSeg, 1); \
12657 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2); \
12658 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
12659 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
12660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12661 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
12662 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2); \
12663 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, \
12664 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
12665 IEM_MC_END(); \
12666 break; \
12667 \
12668 case IEMMODE_32BIT: \
12669 IEM_MC_BEGIN(3, 1); \
12670 IEM_MC_ARG(uint16_t, u16Sel, 0); \
12671 IEM_MC_ARG(uint32_t, offSeg, 1); \
12672 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2); \
12673 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
12674 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
12675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12676 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
12677 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4); \
12678 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, \
12679 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
12680 IEM_MC_END(); \
12681 break; \
12682 \
12683 case IEMMODE_64BIT: \
12684 Assert(!IEM_IS_GUEST_CPU_AMD(pVCpu)); \
12685 IEM_MC_BEGIN(3, 1); \
12686 IEM_MC_ARG(uint16_t, u16Sel, 0); \
12687 IEM_MC_ARG(uint64_t, offSeg, 1); \
12688 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_64BIT, 2); \
12689 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
12690 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
12691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12692 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
12693 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8); \
12694 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_MODE /* no gates */, \
12695 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
12696 IEM_MC_END(); \
12697 break; \
12698 \
12699 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
12700 } do {} while (0)
12701
12702
12703/**
12704 * Opcode 0xff /3.
12705 * @param bRm The RM byte.
12706 */
12707FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
12708{
12709 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
12710 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_callf);
12711}
12712
12713
12714/**
12715 * Opcode 0xff /4.
12716 * @param bRm The RM byte.
12717 */
12718FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
12719{
12720 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
12721 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12722
12723 if (IEM_IS_MODRM_REG_MODE(bRm))
12724 {
12725 /* The new RIP is taken from a register. */
12726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12727 switch (pVCpu->iem.s.enmEffOpSize)
12728 {
12729 case IEMMODE_16BIT:
12730 IEM_MC_BEGIN(0, 1);
12731 IEM_MC_LOCAL(uint16_t, u16Target);
12732 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12733 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
12734 IEM_MC_END();
12735 break;
12736
12737 case IEMMODE_32BIT:
12738 IEM_MC_BEGIN(0, 1);
12739 IEM_MC_LOCAL(uint32_t, u32Target);
12740 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12741 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
12742 IEM_MC_END();
12743 break;
12744
12745 case IEMMODE_64BIT:
12746 IEM_MC_BEGIN(0, 1);
12747 IEM_MC_LOCAL(uint64_t, u64Target);
12748 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12749 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
12750 IEM_MC_END();
12751 break;
12752
12753 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12754 }
12755 }
12756 else
12757 {
12758 /* The new RIP is taken from a memory location. */
12759 switch (pVCpu->iem.s.enmEffOpSize)
12760 {
12761 case IEMMODE_16BIT:
12762 IEM_MC_BEGIN(0, 2);
12763 IEM_MC_LOCAL(uint16_t, u16Target);
12764 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12767 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12768 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
12769 IEM_MC_END();
12770 break;
12771
12772 case IEMMODE_32BIT:
12773 IEM_MC_BEGIN(0, 2);
12774 IEM_MC_LOCAL(uint32_t, u32Target);
12775 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12776 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12778 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12779 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
12780 IEM_MC_END();
12781 break;
12782
12783 case IEMMODE_64BIT:
12784 IEM_MC_BEGIN(0, 2);
12785 IEM_MC_LOCAL(uint64_t, u64Target);
12786 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12789 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12790 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
12791 IEM_MC_END();
12792 break;
12793
12794 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12795 }
12796 }
12797}
12798
12799
12800/**
12801 * Opcode 0xff /5.
12802 * @param bRm The RM byte.
12803 */
12804FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
12805{
12806 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
12807 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_FarJmp);
12808}
12809
12810
12811/**
12812 * Opcode 0xff /6.
12813 * @param bRm The RM byte.
12814 */
12815FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
12816{
12817 IEMOP_MNEMONIC(push_Ev, "push Ev");
12818
12819 /* Registers are handled by a common worker. */
12820 if (IEM_IS_MODRM_REG_MODE(bRm))
12821 return FNIEMOP_CALL_1(iemOpCommonPushGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
12822
12823 /* Memory we do here. */
12824 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12825 switch (pVCpu->iem.s.enmEffOpSize)
12826 {
12827 case IEMMODE_16BIT:
12828 IEM_MC_BEGIN(0, 2);
12829 IEM_MC_LOCAL(uint16_t, u16Src);
12830 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12833 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12834 IEM_MC_PUSH_U16(u16Src);
12835 IEM_MC_ADVANCE_RIP_AND_FINISH();
12836 IEM_MC_END();
12837 break;
12838
12839 case IEMMODE_32BIT:
12840 IEM_MC_BEGIN(0, 2);
12841 IEM_MC_LOCAL(uint32_t, u32Src);
12842 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12843 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12845 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12846 IEM_MC_PUSH_U32(u32Src);
12847 IEM_MC_ADVANCE_RIP_AND_FINISH();
12848 IEM_MC_END();
12849 break;
12850
12851 case IEMMODE_64BIT:
12852 IEM_MC_BEGIN(0, 2);
12853 IEM_MC_LOCAL(uint64_t, u64Src);
12854 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12855 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12857 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12858 IEM_MC_PUSH_U64(u64Src);
12859 IEM_MC_ADVANCE_RIP_AND_FINISH();
12860 IEM_MC_END();
12861 break;
12862
12863 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12864 }
12865}
12866
12867
12868/**
12869 * @opcode 0xff
12870 */
12871FNIEMOP_DEF(iemOp_Grp5)
12872{
12873 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12874 switch (IEM_GET_MODRM_REG_8(bRm))
12875 {
12876 case 0:
12877 return FNIEMOP_CALL_1(iemOp_Grp5_inc_Ev, bRm);
12878 case 1:
12879 return FNIEMOP_CALL_1(iemOp_Grp5_dec_Ev, bRm);
12880 case 2:
12881 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
12882 case 3:
12883 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
12884 case 4:
12885 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
12886 case 5:
12887 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
12888 case 6:
12889 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
12890 case 7:
12891 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
12892 IEMOP_RAISE_INVALID_OPCODE_RET();
12893 }
12894 AssertFailedReturn(VERR_IEM_IPE_3);
12895}
12896
12897
12898
12899const PFNIEMOP g_apfnOneByteMap[256] =
12900{
12901 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
12902 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
12903 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
12904 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
12905 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
12906 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
12907 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
12908 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
12909 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
12910 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
12911 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
12912 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
12913 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
12914 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
12915 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
12916 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
12917 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
12918 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
12919 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
12920 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
12921 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
12922 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
12923 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
12924 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
12925 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
12926 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
12927 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
12928 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
12929 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
12930 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
12931 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
12932 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
12933 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
12934 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
12935 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
12936 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
12937 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
12938 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
12939 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
12940 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
12941 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
12942 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
12943 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
12944 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
12945 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
12946 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
12947 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
12948 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
12949 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
12950 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
12951 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
12952 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
12953 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
12954 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
12955 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
12956 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
12957 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
12958 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
12959 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
12960 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
12961 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
12962 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
12963 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
12964 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
12965};
12966
12967
12968/** @} */
12969
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette