VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsOneByte.cpp.h@ 99051

Last change on this file since 99051 was 98975, checked in by vboxsync, 21 months ago

VMM/IEM: More work on processing MC blocks, mainly related to reworking common functions for unary operations into body macros. bugref:10369

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 434.0 KB
Line 
1/* $Id: IEMAllInstructionsOneByte.cpp.h 98975 2023-03-15 10:05:22Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*******************************************************************************
30* Global Variables *
31*******************************************************************************/
32extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
33
34/* Instruction group definitions: */
35
36/** @defgroup og_gen General
37 * @{ */
38 /** @defgroup og_gen_arith Arithmetic
39 * @{ */
40 /** @defgroup og_gen_arith_bin Binary numbers */
41 /** @defgroup og_gen_arith_dec Decimal numbers */
42 /** @} */
43/** @} */
44
45/** @defgroup og_stack Stack
46 * @{ */
47 /** @defgroup og_stack_sreg Segment registers */
48/** @} */
49
50/** @defgroup og_prefix Prefixes */
51/** @defgroup og_escapes Escape bytes */
52
53
54
55/** @name One byte opcodes.
56 * @{
57 */
58
59/**
60 * Body for instructions like ADD, AND, OR, TEST, CMP, ++ with a byte
61 * memory/register as the destination.
62 *
63 * Used with IEMOP_BODY_BINARY_rm_r8_NO_LOCK or IEMOP_BODY_BINARY_rm_r8_LOCKED.
64 */
65#define IEMOP_BODY_BINARY_rm_r8(a_fnNormalU8, a_fRW) \
66 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
67 \
68 /* \
69 * If rm is denoting a register, no more instruction bytes. \
70 */ \
71 if (IEM_IS_MODRM_REG_MODE(bRm)) \
72 { \
73 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
74 \
75 IEM_MC_BEGIN(3, 0); \
76 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
77 IEM_MC_ARG(uint8_t, u8Src, 1); \
78 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
79 \
80 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
81 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
82 IEM_MC_REF_EFLAGS(pEFlags); \
83 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
84 \
85 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
86 IEM_MC_END(); \
87 } \
88 else \
89 { \
90 /* \
91 * We're accessing memory. \
92 * Note! We're putting the eflags on the stack here so we can commit them \
93 * after the memory. \
94 */ \
95 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
96 { \
97 IEM_MC_BEGIN(3, 2); \
98 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
99 IEM_MC_ARG(uint8_t, u8Src, 1); \
100 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
102 \
103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
104 IEMOP_HLP_DONE_DECODING(); \
105 IEM_MC_MEM_MAP(pu8Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
106 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
107 IEM_MC_FETCH_EFLAGS(EFlags); \
108 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
109 \
110 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, a_fRW); \
111 IEM_MC_COMMIT_EFLAGS(EFlags); \
112 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
113 IEM_MC_END(); \
114 } \
115 else \
116 { \
117 (void)0
118
119#define IEMOP_BODY_BINARY_rm_r8_NO_LOCK() \
120 IEMOP_HLP_DONE_DECODING(); \
121 return IEMOP_RAISE_INVALID_LOCK_PREFIX(); \
122 } \
123 } \
124 (void)0
125
126#define IEMOP_BODY_BINARY_rm_r8_LOCKED(a_fnLockedU8) \
127 IEM_MC_BEGIN(3, 2); \
128 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
129 IEM_MC_ARG(uint8_t, u8Src, 1); \
130 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
131 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
132 \
133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
134 IEMOP_HLP_DONE_DECODING(); \
135 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
136 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
137 IEM_MC_FETCH_EFLAGS(EFlags); \
138 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
139 \
140 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW); \
141 IEM_MC_COMMIT_EFLAGS(EFlags); \
142 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
143 IEM_MC_END(); \
144 } \
145 } \
146 (void)0
147
148/**
149 * Body for byte instructions like ADD, AND, OR, ++ with a register as the
150 * destination.
151 */
152#define IEMOP_BODY_BINARY_r8_rm(a_fnNormalU8) \
153 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
154 \
155 /* \
156 * If rm is denoting a register, no more instruction bytes. \
157 */ \
158 if (IEM_IS_MODRM_REG_MODE(bRm)) \
159 { \
160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
161 IEM_MC_BEGIN(3, 0); \
162 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
163 IEM_MC_ARG(uint8_t, u8Src, 1); \
164 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
165 \
166 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
167 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
168 IEM_MC_REF_EFLAGS(pEFlags); \
169 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
170 \
171 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
172 IEM_MC_END(); \
173 } \
174 else \
175 { \
176 /* \
177 * We're accessing memory. \
178 */ \
179 IEM_MC_BEGIN(3, 1); \
180 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
181 IEM_MC_ARG(uint8_t, u8Src, 1); \
182 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
183 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
184 \
185 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
187 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
188 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
189 IEM_MC_REF_EFLAGS(pEFlags); \
190 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
191 \
192 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
193 IEM_MC_END(); \
194 } \
195 (void)0
196
197
198/**
199 * Body for word/dword/qword instructions like ADD, AND, OR, ++ with
200 * memory/register as the destination.
201 */
202#define IEMOP_BODY_BINARY_rm_rv(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fRW) \
203 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
204 \
205 /* \
206 * If rm is denoting a register, no more instruction bytes. \
207 */ \
208 if (IEM_IS_MODRM_REG_MODE(bRm)) \
209 { \
210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
211 switch (pVCpu->iem.s.enmEffOpSize) \
212 { \
213 case IEMMODE_16BIT: \
214 IEM_MC_BEGIN(3, 0); \
215 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
216 IEM_MC_ARG(uint16_t, u16Src, 1); \
217 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
218 \
219 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
220 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
221 IEM_MC_REF_EFLAGS(pEFlags); \
222 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
223 \
224 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
225 IEM_MC_END(); \
226 break; \
227 \
228 case IEMMODE_32BIT: \
229 IEM_MC_BEGIN(3, 0); \
230 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
231 IEM_MC_ARG(uint32_t, u32Src, 1); \
232 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
233 \
234 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
235 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
236 IEM_MC_REF_EFLAGS(pEFlags); \
237 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
238 \
239 if ((a_fRW) == IEM_ACCESS_DATA_RW) /* not TEST and CMP */ \
240 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
241 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
242 IEM_MC_END(); \
243 break; \
244 \
245 case IEMMODE_64BIT: \
246 IEM_MC_BEGIN(3, 0); \
247 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
248 IEM_MC_ARG(uint64_t, u64Src, 1); \
249 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
250 \
251 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
252 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
253 IEM_MC_REF_EFLAGS(pEFlags); \
254 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
255 \
256 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
257 IEM_MC_END(); \
258 break; \
259 \
260 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
261 } \
262 } \
263 else \
264 { \
265 /* \
266 * We're accessing memory. \
267 * Note! We're putting the eflags on the stack here so we can commit them \
268 * after the memory. \
269 */ \
270 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
271 { \
272 switch (pVCpu->iem.s.enmEffOpSize) \
273 { \
274 case IEMMODE_16BIT: \
275 IEM_MC_BEGIN(3, 2); \
276 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
277 IEM_MC_ARG(uint16_t, u16Src, 1); \
278 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
279 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
280 \
281 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
282 IEMOP_HLP_DONE_DECODING(); \
283 IEM_MC_MEM_MAP(pu16Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
284 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
285 IEM_MC_FETCH_EFLAGS(EFlags); \
286 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
287 \
288 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, a_fRW); \
289 IEM_MC_COMMIT_EFLAGS(EFlags); \
290 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
291 IEM_MC_END(); \
292 break; \
293 \
294 case IEMMODE_32BIT: \
295 IEM_MC_BEGIN(3, 2); \
296 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
297 IEM_MC_ARG(uint32_t, u32Src, 1); \
298 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
299 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
300 \
301 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
302 IEMOP_HLP_DONE_DECODING(); \
303 IEM_MC_MEM_MAP(pu32Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
304 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
305 IEM_MC_FETCH_EFLAGS(EFlags); \
306 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
307 \
308 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, a_fRW); \
309 IEM_MC_COMMIT_EFLAGS(EFlags); \
310 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
311 IEM_MC_END(); \
312 break; \
313 \
314 case IEMMODE_64BIT: \
315 IEM_MC_BEGIN(3, 2); \
316 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
317 IEM_MC_ARG(uint64_t, u64Src, 1); \
318 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
319 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
320 \
321 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
322 IEMOP_HLP_DONE_DECODING(); \
323 IEM_MC_MEM_MAP(pu64Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
324 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
325 IEM_MC_FETCH_EFLAGS(EFlags); \
326 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
327 \
328 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, a_fRW); \
329 IEM_MC_COMMIT_EFLAGS(EFlags); \
330 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
331 IEM_MC_END(); \
332 break; \
333 \
334 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
335 } \
336 } \
337 else \
338 { \
339 (void)0
340
341#define IEMOP_BODY_BINARY_rm_rv_NO_LOCK() \
342 IEMOP_HLP_DONE_DECODING(); \
343 return IEMOP_RAISE_INVALID_LOCK_PREFIX(); \
344 } \
345 } \
346 (void)0
347
348#define IEMOP_BODY_BINARY_rm_rv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
349 switch (pVCpu->iem.s.enmEffOpSize) \
350 { \
351 case IEMMODE_16BIT: \
352 IEM_MC_BEGIN(3, 2); \
353 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
354 IEM_MC_ARG(uint16_t, u16Src, 1); \
355 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
356 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
357 \
358 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
359 IEMOP_HLP_DONE_DECODING(); \
360 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
361 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
362 IEM_MC_FETCH_EFLAGS(EFlags); \
363 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
364 \
365 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
366 IEM_MC_COMMIT_EFLAGS(EFlags); \
367 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
368 IEM_MC_END(); \
369 break; \
370 \
371 case IEMMODE_32BIT: \
372 IEM_MC_BEGIN(3, 2); \
373 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
374 IEM_MC_ARG(uint32_t, u32Src, 1); \
375 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
376 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
377 \
378 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
379 IEMOP_HLP_DONE_DECODING(); \
380 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
381 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
382 IEM_MC_FETCH_EFLAGS(EFlags); \
383 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
384 \
385 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW /* CMP,TEST */); \
386 IEM_MC_COMMIT_EFLAGS(EFlags); \
387 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
388 IEM_MC_END(); \
389 break; \
390 \
391 case IEMMODE_64BIT: \
392 IEM_MC_BEGIN(3, 2); \
393 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
394 IEM_MC_ARG(uint64_t, u64Src, 1); \
395 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
397 \
398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
399 IEMOP_HLP_DONE_DECODING(); \
400 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
401 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
402 IEM_MC_FETCH_EFLAGS(EFlags); \
403 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
404 \
405 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
406 IEM_MC_COMMIT_EFLAGS(EFlags); \
407 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
408 IEM_MC_END(); \
409 break; \
410 \
411 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
412 } \
413 } \
414 } \
415 (void)0
416
417
418/**
419 * Body for instructions like ADD, AND, OR, ++ with working on AL with
420 * a byte immediate.
421 */
422#define IEMOP_BODY_BINARY_AL_Ib(a_fnNormalU8) \
423 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
425 \
426 IEM_MC_BEGIN(3, 0); \
427 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
428 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1); \
429 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
430 \
431 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX); \
432 IEM_MC_REF_EFLAGS(pEFlags); \
433 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
434 \
435 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
436 IEM_MC_END()
437
438/**
439 * Body for instructions like ADD, AND, OR, ++ with working on
440 * AX/EAX/RAX with a word/dword immediate.
441 */
442#define IEMOP_BODY_BINARY_rAX_Iz(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fModifiesDstReg) \
443 switch (pVCpu->iem.s.enmEffOpSize) \
444 { \
445 case IEMMODE_16BIT: \
446 { \
447 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
449 \
450 IEM_MC_BEGIN(3, 0); \
451 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
452 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1); \
453 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
454 \
455 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX); \
456 IEM_MC_REF_EFLAGS(pEFlags); \
457 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
458 \
459 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
460 IEM_MC_END(); \
461 } \
462 \
463 case IEMMODE_32BIT: \
464 { \
465 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
467 \
468 IEM_MC_BEGIN(3, 0); \
469 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
470 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1); \
471 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
472 \
473 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX); \
474 IEM_MC_REF_EFLAGS(pEFlags); \
475 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
476 \
477 if (a_fModifiesDstReg) \
478 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
479 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
480 IEM_MC_END(); \
481 } \
482 \
483 case IEMMODE_64BIT: \
484 { \
485 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
487 \
488 IEM_MC_BEGIN(3, 0); \
489 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
490 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1); \
491 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
492 \
493 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX); \
494 IEM_MC_REF_EFLAGS(pEFlags); \
495 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
496 \
497 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
498 IEM_MC_END(); \
499 } \
500 \
501 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
502 } \
503 (void)0
504
505
506
507/* Instruction specification format - work in progress: */
508
509/**
510 * @opcode 0x00
511 * @opmnemonic add
512 * @op1 rm:Eb
513 * @op2 reg:Gb
514 * @opmaps one
515 * @openc ModR/M
516 * @opflmodify cf,pf,af,zf,sf,of
517 * @ophints harmless ignores_op_sizes
518 * @opstats add_Eb_Gb
519 * @opgroup og_gen_arith_bin
520 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
521 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
522 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
523 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
524 */
525FNIEMOP_DEF(iemOp_add_Eb_Gb)
526{
527 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
528 IEMOP_BODY_BINARY_rm_r8( iemAImpl_add_u8, IEM_ACCESS_DATA_RW);
529 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_add_u8_locked);
530}
531
532
533/**
534 * @opcode 0x01
535 * @opgroup og_gen_arith_bin
536 * @opflmodify cf,pf,af,zf,sf,of
537 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
538 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
539 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
540 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
541 */
542FNIEMOP_DEF(iemOp_add_Ev_Gv)
543{
544 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
545 IEMOP_BODY_BINARY_rm_rv( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, IEM_ACCESS_DATA_RW);
546 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
547}
548
549
550/**
551 * @opcode 0x02
552 * @opgroup og_gen_arith_bin
553 * @opflmodify cf,pf,af,zf,sf,of
554 * @opcopytests iemOp_add_Eb_Gb
555 */
556FNIEMOP_DEF(iemOp_add_Gb_Eb)
557{
558 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
559 IEMOP_BODY_BINARY_r8_rm(iemAImpl_add_u8);
560}
561
562
563/**
564 * @opcode 0x03
565 * @opgroup og_gen_arith_bin
566 * @opflmodify cf,pf,af,zf,sf,of
567 * @opcopytests iemOp_add_Ev_Gv
568 */
569FNIEMOP_DEF(iemOp_add_Gv_Ev)
570{
571 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
572 IEMOP_BODY_BINARY_rv_rm(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
573}
574
575
576/**
577 * @opcode 0x04
578 * @opgroup og_gen_arith_bin
579 * @opflmodify cf,pf,af,zf,sf,of
580 * @opcopytests iemOp_add_Eb_Gb
581 */
582FNIEMOP_DEF(iemOp_add_Al_Ib)
583{
584 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
585 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_add_u8);
586}
587
588
589/**
590 * @opcode 0x05
591 * @opgroup og_gen_arith_bin
592 * @opflmodify cf,pf,af,zf,sf,of
593 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
594 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
595 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
596 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
597 */
598FNIEMOP_DEF(iemOp_add_eAX_Iz)
599{
600 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
601 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
602}
603
604
605/**
606 * @opcode 0x06
607 * @opgroup og_stack_sreg
608 */
609FNIEMOP_DEF(iemOp_push_ES)
610{
611 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
612 IEMOP_HLP_NO_64BIT();
613 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
614}
615
616
617/**
618 * @opcode 0x07
619 * @opgroup og_stack_sreg
620 */
621FNIEMOP_DEF(iemOp_pop_ES)
622{
623 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
624 IEMOP_HLP_NO_64BIT();
625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
626 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
627}
628
629
630/**
631 * @opcode 0x08
632 * @opgroup og_gen_arith_bin
633 * @opflmodify cf,pf,af,zf,sf,of
634 * @opflundef af
635 * @opflclear of,cf
636 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
637 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
638 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
639 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
640 */
641FNIEMOP_DEF(iemOp_or_Eb_Gb)
642{
643 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
644 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
645 IEMOP_BODY_BINARY_rm_r8( iemAImpl_or_u8, IEM_ACCESS_DATA_RW);
646 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_or_u8_locked);
647}
648
649
650/*
651 * @opcode 0x09
652 * @opgroup og_gen_arith_bin
653 * @opflmodify cf,pf,af,zf,sf,of
654 * @opflundef af
655 * @opflclear of,cf
656 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
657 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
658 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
659 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
660 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
661 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
662 */
663FNIEMOP_DEF(iemOp_or_Ev_Gv)
664{
665 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
666 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
667 IEMOP_BODY_BINARY_rm_rv( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, IEM_ACCESS_DATA_RW);
668 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
669}
670
671
672/**
673 * @opcode 0x0a
674 * @opgroup og_gen_arith_bin
675 * @opflmodify cf,pf,af,zf,sf,of
676 * @opflundef af
677 * @opflclear of,cf
678 * @opcopytests iemOp_or_Eb_Gb
679 */
680FNIEMOP_DEF(iemOp_or_Gb_Eb)
681{
682 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
683 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
684 IEMOP_BODY_BINARY_r8_rm(iemAImpl_or_u8);
685}
686
687
688/**
689 * @opcode 0x0b
690 * @opgroup og_gen_arith_bin
691 * @opflmodify cf,pf,af,zf,sf,of
692 * @opflundef af
693 * @opflclear of,cf
694 * @opcopytests iemOp_or_Ev_Gv
695 */
696FNIEMOP_DEF(iemOp_or_Gv_Ev)
697{
698 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
699 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
700 IEMOP_BODY_BINARY_rv_rm(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
701}
702
703
704/**
705 * @opcode 0x0c
706 * @opgroup og_gen_arith_bin
707 * @opflmodify cf,pf,af,zf,sf,of
708 * @opflundef af
709 * @opflclear of,cf
710 * @opcopytests iemOp_or_Eb_Gb
711 */
712FNIEMOP_DEF(iemOp_or_Al_Ib)
713{
714 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
715 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
716 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_or_u8);
717}
718
719
720/**
721 * @opcode 0x0d
722 * @opgroup og_gen_arith_bin
723 * @opflmodify cf,pf,af,zf,sf,of
724 * @opflundef af
725 * @opflclear of,cf
726 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
727 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
728 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
729 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
730 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
731 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
732 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
733 */
734FNIEMOP_DEF(iemOp_or_eAX_Iz)
735{
736 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
737 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
738 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
739}
740
741
742/**
743 * @opcode 0x0e
744 * @opgroup og_stack_sreg
745 */
746FNIEMOP_DEF(iemOp_push_CS)
747{
748 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_INVALID_64, 0);
749 IEMOP_HLP_NO_64BIT();
750 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
751}
752
753
754/**
755 * @opcode 0x0f
756 * @opmnemonic EscTwo0f
757 * @openc two0f
758 * @opdisenum OP_2B_ESC
759 * @ophints harmless
760 * @opgroup og_escapes
761 */
762FNIEMOP_DEF(iemOp_2byteEscape)
763{
764#ifdef VBOX_STRICT
765 /* Sanity check the table the first time around. */
766 static bool s_fTested = false;
767 if (RT_LIKELY(s_fTested)) { /* likely */ }
768 else
769 {
770 s_fTested = true;
771 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
772 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
773 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
774 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
775 }
776#endif
777
778 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
779 {
780 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
781 IEMOP_HLP_MIN_286();
782 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
783 }
784 /* @opdone */
785
786 /*
787 * On the 8086 this is a POP CS instruction.
788 * For the time being we don't specify this this.
789 */
790 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_INVALID_64, IEMOPHINT_SKIP_PYTHON);
791 IEMOP_HLP_NO_64BIT();
792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
793 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
794}
795
796/**
797 * @opcode 0x10
798 * @opgroup og_gen_arith_bin
799 * @opfltest cf
800 * @opflmodify cf,pf,af,zf,sf,of
801 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
802 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
803 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
804 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
805 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
806 */
807FNIEMOP_DEF(iemOp_adc_Eb_Gb)
808{
809 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
810 IEMOP_BODY_BINARY_rm_r8( iemAImpl_adc_u8, IEM_ACCESS_DATA_RW);
811 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_adc_u8_locked);
812}
813
814
815/**
816 * @opcode 0x11
817 * @opgroup og_gen_arith_bin
818 * @opfltest cf
819 * @opflmodify cf,pf,af,zf,sf,of
820 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
821 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
822 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
823 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
824 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
825 */
826FNIEMOP_DEF(iemOp_adc_Ev_Gv)
827{
828 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
829 IEMOP_BODY_BINARY_rm_rv( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, IEM_ACCESS_DATA_RW);
830 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
831}
832
833
834/**
835 * @opcode 0x12
836 * @opgroup og_gen_arith_bin
837 * @opfltest cf
838 * @opflmodify cf,pf,af,zf,sf,of
839 * @opcopytests iemOp_adc_Eb_Gb
840 */
841FNIEMOP_DEF(iemOp_adc_Gb_Eb)
842{
843 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
844 IEMOP_BODY_BINARY_r8_rm(iemAImpl_adc_u8);
845}
846
847
848/**
849 * @opcode 0x13
850 * @opgroup og_gen_arith_bin
851 * @opfltest cf
852 * @opflmodify cf,pf,af,zf,sf,of
853 * @opcopytests iemOp_adc_Ev_Gv
854 */
855FNIEMOP_DEF(iemOp_adc_Gv_Ev)
856{
857 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
858 IEMOP_BODY_BINARY_rv_rm(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
859}
860
861
862/**
863 * @opcode 0x14
864 * @opgroup og_gen_arith_bin
865 * @opfltest cf
866 * @opflmodify cf,pf,af,zf,sf,of
867 * @opcopytests iemOp_adc_Eb_Gb
868 */
869FNIEMOP_DEF(iemOp_adc_Al_Ib)
870{
871 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
872 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_adc_u8);
873}
874
875
876/**
877 * @opcode 0x15
878 * @opgroup og_gen_arith_bin
879 * @opfltest cf
880 * @opflmodify cf,pf,af,zf,sf,of
881 * @opcopytests iemOp_adc_Ev_Gv
882 */
883FNIEMOP_DEF(iemOp_adc_eAX_Iz)
884{
885 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
886 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
887}
888
889
890/**
891 * @opcode 0x16
892 */
893FNIEMOP_DEF(iemOp_push_SS)
894{
895 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
896 IEMOP_HLP_NO_64BIT();
897 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
898}
899
900
901/**
902 * @opcode 0x17
903 * @opgroup og_gen_arith_bin
904 * @opfltest cf
905 * @opflmodify cf,pf,af,zf,sf,of
906 */
907FNIEMOP_DEF(iemOp_pop_SS)
908{
909 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
911 IEMOP_HLP_NO_64BIT();
912 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
913}
914
915
916/**
917 * @opcode 0x18
918 * @opgroup og_gen_arith_bin
919 * @opfltest cf
920 * @opflmodify cf,pf,af,zf,sf,of
921 */
922FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
923{
924 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
925 IEMOP_BODY_BINARY_rm_r8( iemAImpl_sbb_u8, IEM_ACCESS_DATA_RW);
926 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sbb_u8_locked);
927}
928
929
930/**
931 * @opcode 0x19
932 * @opgroup og_gen_arith_bin
933 * @opfltest cf
934 * @opflmodify cf,pf,af,zf,sf,of
935 */
936FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
937{
938 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
939 IEMOP_BODY_BINARY_rm_rv( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, IEM_ACCESS_DATA_RW);
940 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
941}
942
943
944/**
945 * @opcode 0x1a
946 * @opgroup og_gen_arith_bin
947 * @opfltest cf
948 * @opflmodify cf,pf,af,zf,sf,of
949 */
950FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
951{
952 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
953 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sbb_u8);
954}
955
956
957/**
958 * @opcode 0x1b
959 * @opgroup og_gen_arith_bin
960 * @opfltest cf
961 * @opflmodify cf,pf,af,zf,sf,of
962 */
963FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
964{
965 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
966 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
967}
968
969
970/**
971 * @opcode 0x1c
972 * @opgroup og_gen_arith_bin
973 * @opfltest cf
974 * @opflmodify cf,pf,af,zf,sf,of
975 */
976FNIEMOP_DEF(iemOp_sbb_Al_Ib)
977{
978 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
979 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sbb_u8);
980}
981
982
983/**
984 * @opcode 0x1d
985 * @opgroup og_gen_arith_bin
986 * @opfltest cf
987 * @opflmodify cf,pf,af,zf,sf,of
988 */
989FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
990{
991 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
992 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
993}
994
995
996/**
997 * @opcode 0x1e
998 * @opgroup og_stack_sreg
999 */
1000FNIEMOP_DEF(iemOp_push_DS)
1001{
1002 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
1003 IEMOP_HLP_NO_64BIT();
1004 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
1005}
1006
1007
1008/**
1009 * @opcode 0x1f
1010 * @opgroup og_stack_sreg
1011 */
1012FNIEMOP_DEF(iemOp_pop_DS)
1013{
1014 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1016 IEMOP_HLP_NO_64BIT();
1017 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
1018}
1019
1020
1021/**
1022 * @opcode 0x20
1023 * @opgroup og_gen_arith_bin
1024 * @opflmodify cf,pf,af,zf,sf,of
1025 * @opflundef af
1026 * @opflclear of,cf
1027 */
1028FNIEMOP_DEF(iemOp_and_Eb_Gb)
1029{
1030 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1031 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1032 IEMOP_BODY_BINARY_rm_r8( iemAImpl_and_u8, IEM_ACCESS_DATA_RW);
1033 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_and_u8_locked);
1034}
1035
1036
1037/**
1038 * @opcode 0x21
1039 * @opgroup og_gen_arith_bin
1040 * @opflmodify cf,pf,af,zf,sf,of
1041 * @opflundef af
1042 * @opflclear of,cf
1043 */
1044FNIEMOP_DEF(iemOp_and_Ev_Gv)
1045{
1046 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1047 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1048 IEMOP_BODY_BINARY_rm_rv( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, IEM_ACCESS_DATA_RW);
1049 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
1050}
1051
1052
1053/**
1054 * @opcode 0x22
1055 * @opgroup og_gen_arith_bin
1056 * @opflmodify cf,pf,af,zf,sf,of
1057 * @opflundef af
1058 * @opflclear of,cf
1059 */
1060FNIEMOP_DEF(iemOp_and_Gb_Eb)
1061{
1062 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1063 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1064 IEMOP_BODY_BINARY_r8_rm(iemAImpl_and_u8);
1065}
1066
1067
1068/**
1069 * @opcode 0x23
1070 * @opgroup og_gen_arith_bin
1071 * @opflmodify cf,pf,af,zf,sf,of
1072 * @opflundef af
1073 * @opflclear of,cf
1074 */
1075FNIEMOP_DEF(iemOp_and_Gv_Ev)
1076{
1077 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1078 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1079 IEMOP_BODY_BINARY_rv_rm(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1080}
1081
1082
1083/**
1084 * @opcode 0x24
1085 * @opgroup og_gen_arith_bin
1086 * @opflmodify cf,pf,af,zf,sf,of
1087 * @opflundef af
1088 * @opflclear of,cf
1089 */
1090FNIEMOP_DEF(iemOp_and_Al_Ib)
1091{
1092 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
1093 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1094 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_and_u8);
1095}
1096
1097
1098/**
1099 * @opcode 0x25
1100 * @opgroup og_gen_arith_bin
1101 * @opflmodify cf,pf,af,zf,sf,of
1102 * @opflundef af
1103 * @opflclear of,cf
1104 */
1105FNIEMOP_DEF(iemOp_and_eAX_Iz)
1106{
1107 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1108 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1109 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1110}
1111
1112
1113/**
1114 * @opcode 0x26
1115 * @opmnemonic SEG
1116 * @op1 ES
1117 * @opgroup og_prefix
1118 * @openc prefix
1119 * @opdisenum OP_SEG
1120 * @ophints harmless
1121 */
1122FNIEMOP_DEF(iemOp_seg_ES)
1123{
1124 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
1125 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
1126 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
1127
1128 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1129 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1130}
1131
1132
1133/**
1134 * @opcode 0x27
1135 * @opfltest af,cf
1136 * @opflmodify cf,pf,af,zf,sf,of
1137 * @opflundef of
1138 */
1139FNIEMOP_DEF(iemOp_daa)
1140{
1141 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL register use */
1142 IEMOP_HLP_NO_64BIT();
1143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1144 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1145 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
1146}
1147
1148
1149/**
1150 * @opcode 0x28
1151 * @opgroup og_gen_arith_bin
1152 * @opflmodify cf,pf,af,zf,sf,of
1153 */
1154FNIEMOP_DEF(iemOp_sub_Eb_Gb)
1155{
1156 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1157 IEMOP_BODY_BINARY_rm_r8( iemAImpl_sub_u8, IEM_ACCESS_DATA_RW);
1158 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sub_u8_locked);
1159}
1160
1161
1162/**
1163 * @opcode 0x29
1164 * @opgroup og_gen_arith_bin
1165 * @opflmodify cf,pf,af,zf,sf,of
1166 */
1167FNIEMOP_DEF(iemOp_sub_Ev_Gv)
1168{
1169 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1170 IEMOP_BODY_BINARY_rm_rv( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, IEM_ACCESS_DATA_RW);
1171 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
1172}
1173
1174
1175/**
1176 * @opcode 0x2a
1177 * @opgroup og_gen_arith_bin
1178 * @opflmodify cf,pf,af,zf,sf,of
1179 */
1180FNIEMOP_DEF(iemOp_sub_Gb_Eb)
1181{
1182 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1183 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sub_u8);
1184}
1185
1186
1187/**
1188 * @opcode 0x2b
1189 * @opgroup og_gen_arith_bin
1190 * @opflmodify cf,pf,af,zf,sf,of
1191 */
1192FNIEMOP_DEF(iemOp_sub_Gv_Ev)
1193{
1194 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1195 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1196}
1197
1198
1199/**
1200 * @opcode 0x2c
1201 * @opgroup og_gen_arith_bin
1202 * @opflmodify cf,pf,af,zf,sf,of
1203 */
1204FNIEMOP_DEF(iemOp_sub_Al_Ib)
1205{
1206 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1207 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sub_u8);
1208}
1209
1210
1211/**
1212 * @opcode 0x2d
1213 * @opgroup og_gen_arith_bin
1214 * @opflmodify cf,pf,af,zf,sf,of
1215 */
1216FNIEMOP_DEF(iemOp_sub_eAX_Iz)
1217{
1218 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1219 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1220}
1221
1222
1223/**
1224 * @opcode 0x2e
1225 * @opmnemonic SEG
1226 * @op1 CS
1227 * @opgroup og_prefix
1228 * @openc prefix
1229 * @opdisenum OP_SEG
1230 * @ophints harmless
1231 */
1232FNIEMOP_DEF(iemOp_seg_CS)
1233{
1234 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
1235 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
1236 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
1237
1238 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1239 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1240}
1241
1242
1243/**
1244 * @opcode 0x2f
1245 * @opfltest af,cf
1246 * @opflmodify cf,pf,af,zf,sf,of
1247 * @opflundef of
1248 */
1249FNIEMOP_DEF(iemOp_das)
1250{
1251 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL register use */
1252 IEMOP_HLP_NO_64BIT();
1253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1254 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1255 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
1256}
1257
1258
1259/**
1260 * @opcode 0x30
1261 * @opgroup og_gen_arith_bin
1262 * @opflmodify cf,pf,af,zf,sf,of
1263 * @opflundef af
1264 * @opflclear of,cf
1265 */
1266FNIEMOP_DEF(iemOp_xor_Eb_Gb)
1267{
1268 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1269 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1270 IEMOP_BODY_BINARY_rm_r8( iemAImpl_xor_u8, IEM_ACCESS_DATA_RW);
1271 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_xor_u8_locked);
1272}
1273
1274
1275/**
1276 * @opcode 0x31
1277 * @opgroup og_gen_arith_bin
1278 * @opflmodify cf,pf,af,zf,sf,of
1279 * @opflundef af
1280 * @opflclear of,cf
1281 */
1282FNIEMOP_DEF(iemOp_xor_Ev_Gv)
1283{
1284 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1285 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1286 IEMOP_BODY_BINARY_rm_rv( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, IEM_ACCESS_DATA_RW);
1287 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
1288}
1289
1290
1291/**
1292 * @opcode 0x32
1293 * @opgroup og_gen_arith_bin
1294 * @opflmodify cf,pf,af,zf,sf,of
1295 * @opflundef af
1296 * @opflclear of,cf
1297 */
1298FNIEMOP_DEF(iemOp_xor_Gb_Eb)
1299{
1300 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1301 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1302 IEMOP_BODY_BINARY_r8_rm(iemAImpl_xor_u8);
1303}
1304
1305
1306/**
1307 * @opcode 0x33
1308 * @opgroup og_gen_arith_bin
1309 * @opflmodify cf,pf,af,zf,sf,of
1310 * @opflundef af
1311 * @opflclear of,cf
1312 */
1313FNIEMOP_DEF(iemOp_xor_Gv_Ev)
1314{
1315 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1316 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1317 IEMOP_BODY_BINARY_rv_rm(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1318}
1319
1320
1321/**
1322 * @opcode 0x34
1323 * @opgroup og_gen_arith_bin
1324 * @opflmodify cf,pf,af,zf,sf,of
1325 * @opflundef af
1326 * @opflclear of,cf
1327 */
1328FNIEMOP_DEF(iemOp_xor_Al_Ib)
1329{
1330 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1331 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1332 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_xor_u8);
1333}
1334
1335
1336/**
1337 * @opcode 0x35
1338 * @opgroup og_gen_arith_bin
1339 * @opflmodify cf,pf,af,zf,sf,of
1340 * @opflundef af
1341 * @opflclear of,cf
1342 */
1343FNIEMOP_DEF(iemOp_xor_eAX_Iz)
1344{
1345 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1346 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1347 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1348}
1349
1350
1351/**
1352 * @opcode 0x36
1353 * @opmnemonic SEG
1354 * @op1 SS
1355 * @opgroup og_prefix
1356 * @openc prefix
1357 * @opdisenum OP_SEG
1358 * @ophints harmless
1359 */
1360FNIEMOP_DEF(iemOp_seg_SS)
1361{
1362 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
1363 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
1364 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
1365
1366 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1367 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1368}
1369
1370
1371/**
1372 * @opcode 0x37
1373 * @opfltest af,cf
1374 * @opflmodify cf,pf,af,zf,sf,of
1375 * @opflundef pf,zf,sf,of
1376 * @opgroup og_gen_arith_dec
1377 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
1378 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
1379 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1380 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1381 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1382 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
1383 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
1384 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
1385 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
1386 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
1387 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
1388 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
1389 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
1390 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
1391 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
1392 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1393 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1394 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1395 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1396 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
1397 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
1398 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
1399 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
1400 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
1401 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
1402 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1403 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
1404 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
1405 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
1406 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
1407 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
1408 */
1409FNIEMOP_DEF(iemOp_aaa)
1410{
1411 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL/AX register use */
1412 IEMOP_HLP_NO_64BIT();
1413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1414 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1415
1416 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_aaa);
1417}
1418
1419
1420/**
1421 * @opcode 0x38
1422 */
1423FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
1424{
1425 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
1426 IEMOP_BODY_BINARY_rm_r8(iemAImpl_cmp_u8, IEM_ACCESS_DATA_R);
1427 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
1428}
1429
1430
1431/**
1432 * @opcode 0x39
1433 */
1434FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
1435{
1436 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
1437 IEMOP_BODY_BINARY_rm_rv(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, IEM_ACCESS_DATA_R);
1438 IEMOP_BODY_BINARY_rm_rv_NO_LOCK();
1439}
1440
1441
1442/**
1443 * @opcode 0x3a
1444 */
1445FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
1446{
1447 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
1448 IEMOP_BODY_BINARY_r8_rm(iemAImpl_cmp_u8);
1449}
1450
1451
1452/**
1453 * @opcode 0x3b
1454 */
1455FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
1456{
1457 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
1458 IEMOP_BODY_BINARY_rv_rm(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1459}
1460
1461
1462/**
1463 * @opcode 0x3c
1464 */
1465FNIEMOP_DEF(iemOp_cmp_Al_Ib)
1466{
1467 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
1468 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_cmp_u8);
1469}
1470
1471
1472/**
1473 * @opcode 0x3d
1474 */
1475FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1476{
1477 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1478 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1479}
1480
1481
1482/**
1483 * @opcode 0x3e
1484 */
1485FNIEMOP_DEF(iemOp_seg_DS)
1486{
1487 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1488 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1489 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1490
1491 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1492 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1493}
1494
1495
1496/**
1497 * @opcode 0x3f
1498 * @opfltest af,cf
1499 * @opflmodify cf,pf,af,zf,sf,of
1500 * @opflundef pf,zf,sf,of
1501 * @opgroup og_gen_arith_dec
1502 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1503 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1504 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1505 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1506 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1507 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1508 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1509 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1510 * @optest amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1511 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1512 * @optest amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1513 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1514 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1515 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1516 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1517 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1518 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1519 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1520 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1521 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1522 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
1523 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1524 * @optest amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
1525 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1526 * @optest amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
1527 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1528 * @optest amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
1529 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1530 * @optest amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
1531 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1532 * @optest amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
1533 * @optest intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1534 * @optest amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1535 * @optest intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1536 * @optest amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1537 */
1538FNIEMOP_DEF(iemOp_aas)
1539{
1540 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL/AX register use */
1541 IEMOP_HLP_NO_64BIT();
1542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1543 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1544
1545 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_aas);
1546}
1547
1548
1549/**
1550 * Common 'inc/dec/not/neg register' helper.
1551 */
1552FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
1553{
1554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1555 switch (pVCpu->iem.s.enmEffOpSize)
1556 {
1557 case IEMMODE_16BIT:
1558 IEM_MC_BEGIN(2, 0);
1559 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1560 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1561 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
1562 IEM_MC_REF_EFLAGS(pEFlags);
1563 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
1564 IEM_MC_ADVANCE_RIP_AND_FINISH();
1565 IEM_MC_END();
1566 break;
1567
1568 case IEMMODE_32BIT:
1569 IEM_MC_BEGIN(2, 0);
1570 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
1571 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1572 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
1573 IEM_MC_REF_EFLAGS(pEFlags);
1574 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
1575 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
1576 IEM_MC_ADVANCE_RIP_AND_FINISH();
1577 IEM_MC_END();
1578 break;
1579
1580 case IEMMODE_64BIT:
1581 IEM_MC_BEGIN(2, 0);
1582 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1583 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1584 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
1585 IEM_MC_REF_EFLAGS(pEFlags);
1586 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
1587 IEM_MC_ADVANCE_RIP_AND_FINISH();
1588 IEM_MC_END();
1589 break;
1590
1591 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1592 }
1593}
1594
1595
1596/**
1597 * @opcode 0x40
1598 */
1599FNIEMOP_DEF(iemOp_inc_eAX)
1600{
1601 /*
1602 * This is a REX prefix in 64-bit mode.
1603 */
1604 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1605 {
1606 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1607 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1608
1609 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1610 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1611 }
1612
1613 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1614 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
1615}
1616
1617
1618/**
1619 * @opcode 0x41
1620 */
1621FNIEMOP_DEF(iemOp_inc_eCX)
1622{
1623 /*
1624 * This is a REX prefix in 64-bit mode.
1625 */
1626 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1627 {
1628 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1629 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1630 pVCpu->iem.s.uRexB = 1 << 3;
1631
1632 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1633 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1634 }
1635
1636 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1637 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
1638}
1639
1640
1641/**
1642 * @opcode 0x42
1643 */
1644FNIEMOP_DEF(iemOp_inc_eDX)
1645{
1646 /*
1647 * This is a REX prefix in 64-bit mode.
1648 */
1649 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1650 {
1651 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1652 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1653 pVCpu->iem.s.uRexIndex = 1 << 3;
1654
1655 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1656 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1657 }
1658
1659 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1660 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
1661}
1662
1663
1664
1665/**
1666 * @opcode 0x43
1667 */
1668FNIEMOP_DEF(iemOp_inc_eBX)
1669{
1670 /*
1671 * This is a REX prefix in 64-bit mode.
1672 */
1673 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1674 {
1675 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1676 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1677 pVCpu->iem.s.uRexB = 1 << 3;
1678 pVCpu->iem.s.uRexIndex = 1 << 3;
1679
1680 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1681 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1682 }
1683
1684 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1685 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
1686}
1687
1688
1689/**
1690 * @opcode 0x44
1691 */
1692FNIEMOP_DEF(iemOp_inc_eSP)
1693{
1694 /*
1695 * This is a REX prefix in 64-bit mode.
1696 */
1697 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1698 {
1699 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1700 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1701 pVCpu->iem.s.uRexReg = 1 << 3;
1702
1703 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1704 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1705 }
1706
1707 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1708 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
1709}
1710
1711
1712/**
1713 * @opcode 0x45
1714 */
1715FNIEMOP_DEF(iemOp_inc_eBP)
1716{
1717 /*
1718 * This is a REX prefix in 64-bit mode.
1719 */
1720 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1721 {
1722 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1723 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1724 pVCpu->iem.s.uRexReg = 1 << 3;
1725 pVCpu->iem.s.uRexB = 1 << 3;
1726
1727 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1728 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1729 }
1730
1731 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1732 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
1733}
1734
1735
1736/**
1737 * @opcode 0x46
1738 */
1739FNIEMOP_DEF(iemOp_inc_eSI)
1740{
1741 /*
1742 * This is a REX prefix in 64-bit mode.
1743 */
1744 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1745 {
1746 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1747 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1748 pVCpu->iem.s.uRexReg = 1 << 3;
1749 pVCpu->iem.s.uRexIndex = 1 << 3;
1750
1751 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1752 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1753 }
1754
1755 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1756 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
1757}
1758
1759
1760/**
1761 * @opcode 0x47
1762 */
1763FNIEMOP_DEF(iemOp_inc_eDI)
1764{
1765 /*
1766 * This is a REX prefix in 64-bit mode.
1767 */
1768 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1769 {
1770 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1771 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1772 pVCpu->iem.s.uRexReg = 1 << 3;
1773 pVCpu->iem.s.uRexB = 1 << 3;
1774 pVCpu->iem.s.uRexIndex = 1 << 3;
1775
1776 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1777 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1778 }
1779
1780 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1781 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
1782}
1783
1784
1785/**
1786 * @opcode 0x48
1787 */
1788FNIEMOP_DEF(iemOp_dec_eAX)
1789{
1790 /*
1791 * This is a REX prefix in 64-bit mode.
1792 */
1793 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1794 {
1795 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
1796 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
1797 iemRecalEffOpSize(pVCpu);
1798
1799 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1800 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1801 }
1802
1803 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
1804 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
1805}
1806
1807
1808/**
1809 * @opcode 0x49
1810 */
1811FNIEMOP_DEF(iemOp_dec_eCX)
1812{
1813 /*
1814 * This is a REX prefix in 64-bit mode.
1815 */
1816 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1817 {
1818 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
1819 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1820 pVCpu->iem.s.uRexB = 1 << 3;
1821 iemRecalEffOpSize(pVCpu);
1822
1823 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1824 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1825 }
1826
1827 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
1828 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
1829}
1830
1831
1832/**
1833 * @opcode 0x4a
1834 */
1835FNIEMOP_DEF(iemOp_dec_eDX)
1836{
1837 /*
1838 * This is a REX prefix in 64-bit mode.
1839 */
1840 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1841 {
1842 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
1843 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1844 pVCpu->iem.s.uRexIndex = 1 << 3;
1845 iemRecalEffOpSize(pVCpu);
1846
1847 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1848 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1849 }
1850
1851 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
1852 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
1853}
1854
1855
1856/**
1857 * @opcode 0x4b
1858 */
1859FNIEMOP_DEF(iemOp_dec_eBX)
1860{
1861 /*
1862 * This is a REX prefix in 64-bit mode.
1863 */
1864 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1865 {
1866 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
1867 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1868 pVCpu->iem.s.uRexB = 1 << 3;
1869 pVCpu->iem.s.uRexIndex = 1 << 3;
1870 iemRecalEffOpSize(pVCpu);
1871
1872 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1873 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1874 }
1875
1876 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
1877 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
1878}
1879
1880
1881/**
1882 * @opcode 0x4c
1883 */
1884FNIEMOP_DEF(iemOp_dec_eSP)
1885{
1886 /*
1887 * This is a REX prefix in 64-bit mode.
1888 */
1889 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1890 {
1891 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
1892 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
1893 pVCpu->iem.s.uRexReg = 1 << 3;
1894 iemRecalEffOpSize(pVCpu);
1895
1896 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1897 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1898 }
1899
1900 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
1901 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
1902}
1903
1904
1905/**
1906 * @opcode 0x4d
1907 */
1908FNIEMOP_DEF(iemOp_dec_eBP)
1909{
1910 /*
1911 * This is a REX prefix in 64-bit mode.
1912 */
1913 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1914 {
1915 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
1916 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1917 pVCpu->iem.s.uRexReg = 1 << 3;
1918 pVCpu->iem.s.uRexB = 1 << 3;
1919 iemRecalEffOpSize(pVCpu);
1920
1921 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1922 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1923 }
1924
1925 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
1926 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
1927}
1928
1929
1930/**
1931 * @opcode 0x4e
1932 */
1933FNIEMOP_DEF(iemOp_dec_eSI)
1934{
1935 /*
1936 * This is a REX prefix in 64-bit mode.
1937 */
1938 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1939 {
1940 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
1941 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1942 pVCpu->iem.s.uRexReg = 1 << 3;
1943 pVCpu->iem.s.uRexIndex = 1 << 3;
1944 iemRecalEffOpSize(pVCpu);
1945
1946 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1947 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1948 }
1949
1950 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
1951 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
1952}
1953
1954
1955/**
1956 * @opcode 0x4f
1957 */
1958FNIEMOP_DEF(iemOp_dec_eDI)
1959{
1960 /*
1961 * This is a REX prefix in 64-bit mode.
1962 */
1963 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1964 {
1965 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
1966 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1967 pVCpu->iem.s.uRexReg = 1 << 3;
1968 pVCpu->iem.s.uRexB = 1 << 3;
1969 pVCpu->iem.s.uRexIndex = 1 << 3;
1970 iemRecalEffOpSize(pVCpu);
1971
1972 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1973 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1974 }
1975
1976 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
1977 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
1978}
1979
1980
1981/**
1982 * Common 'push register' helper.
1983 */
1984FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
1985{
1986 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1987 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1988 {
1989 iReg |= pVCpu->iem.s.uRexB;
1990 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1991 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1992 }
1993
1994 switch (pVCpu->iem.s.enmEffOpSize)
1995 {
1996 case IEMMODE_16BIT:
1997 IEM_MC_BEGIN(0, 1);
1998 IEM_MC_LOCAL(uint16_t, u16Value);
1999 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
2000 IEM_MC_PUSH_U16(u16Value);
2001 IEM_MC_ADVANCE_RIP_AND_FINISH();
2002 IEM_MC_END();
2003 break;
2004
2005 case IEMMODE_32BIT:
2006 IEM_MC_BEGIN(0, 1);
2007 IEM_MC_LOCAL(uint32_t, u32Value);
2008 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
2009 IEM_MC_PUSH_U32(u32Value);
2010 IEM_MC_ADVANCE_RIP_AND_FINISH();
2011 IEM_MC_END();
2012 break;
2013
2014 case IEMMODE_64BIT:
2015 IEM_MC_BEGIN(0, 1);
2016 IEM_MC_LOCAL(uint64_t, u64Value);
2017 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
2018 IEM_MC_PUSH_U64(u64Value);
2019 IEM_MC_ADVANCE_RIP_AND_FINISH();
2020 IEM_MC_END();
2021 break;
2022
2023 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2024 }
2025}
2026
2027
2028/**
2029 * @opcode 0x50
2030 */
2031FNIEMOP_DEF(iemOp_push_eAX)
2032{
2033 IEMOP_MNEMONIC(push_rAX, "push rAX");
2034 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
2035}
2036
2037
2038/**
2039 * @opcode 0x51
2040 */
2041FNIEMOP_DEF(iemOp_push_eCX)
2042{
2043 IEMOP_MNEMONIC(push_rCX, "push rCX");
2044 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
2045}
2046
2047
2048/**
2049 * @opcode 0x52
2050 */
2051FNIEMOP_DEF(iemOp_push_eDX)
2052{
2053 IEMOP_MNEMONIC(push_rDX, "push rDX");
2054 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
2055}
2056
2057
2058/**
2059 * @opcode 0x53
2060 */
2061FNIEMOP_DEF(iemOp_push_eBX)
2062{
2063 IEMOP_MNEMONIC(push_rBX, "push rBX");
2064 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
2065}
2066
2067
2068/**
2069 * @opcode 0x54
2070 */
2071FNIEMOP_DEF(iemOp_push_eSP)
2072{
2073 IEMOP_MNEMONIC(push_rSP, "push rSP");
2074 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
2075 {
2076 IEM_MC_BEGIN(0, 1);
2077 IEM_MC_LOCAL(uint16_t, u16Value);
2078 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
2079 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
2080 IEM_MC_PUSH_U16(u16Value);
2081 IEM_MC_ADVANCE_RIP_AND_FINISH();
2082 IEM_MC_END();
2083 }
2084 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
2085}
2086
2087
2088/**
2089 * @opcode 0x55
2090 */
2091FNIEMOP_DEF(iemOp_push_eBP)
2092{
2093 IEMOP_MNEMONIC(push_rBP, "push rBP");
2094 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
2095}
2096
2097
2098/**
2099 * @opcode 0x56
2100 */
2101FNIEMOP_DEF(iemOp_push_eSI)
2102{
2103 IEMOP_MNEMONIC(push_rSI, "push rSI");
2104 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
2105}
2106
2107
2108/**
2109 * @opcode 0x57
2110 */
2111FNIEMOP_DEF(iemOp_push_eDI)
2112{
2113 IEMOP_MNEMONIC(push_rDI, "push rDI");
2114 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
2115}
2116
2117
2118/**
2119 * Common 'pop register' helper.
2120 */
2121FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
2122{
2123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2124 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2125 {
2126 iReg |= pVCpu->iem.s.uRexB;
2127 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2128 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2129 }
2130
2131 switch (pVCpu->iem.s.enmEffOpSize)
2132 {
2133 case IEMMODE_16BIT:
2134 IEM_MC_BEGIN(0, 1);
2135 IEM_MC_LOCAL(uint16_t *, pu16Dst);
2136 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
2137 IEM_MC_POP_U16(pu16Dst);
2138 IEM_MC_ADVANCE_RIP_AND_FINISH();
2139 IEM_MC_END();
2140 break;
2141
2142 case IEMMODE_32BIT:
2143 IEM_MC_BEGIN(0, 1);
2144 IEM_MC_LOCAL(uint32_t *, pu32Dst);
2145 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
2146 IEM_MC_POP_U32(pu32Dst);
2147 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
2148 IEM_MC_ADVANCE_RIP_AND_FINISH();
2149 IEM_MC_END();
2150 break;
2151
2152 case IEMMODE_64BIT:
2153 IEM_MC_BEGIN(0, 1);
2154 IEM_MC_LOCAL(uint64_t *, pu64Dst);
2155 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
2156 IEM_MC_POP_U64(pu64Dst);
2157 IEM_MC_ADVANCE_RIP_AND_FINISH();
2158 IEM_MC_END();
2159 break;
2160
2161 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2162 }
2163}
2164
2165
2166/**
2167 * @opcode 0x58
2168 */
2169FNIEMOP_DEF(iemOp_pop_eAX)
2170{
2171 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
2172 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
2173}
2174
2175
2176/**
2177 * @opcode 0x59
2178 */
2179FNIEMOP_DEF(iemOp_pop_eCX)
2180{
2181 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
2182 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
2183}
2184
2185
2186/**
2187 * @opcode 0x5a
2188 */
2189FNIEMOP_DEF(iemOp_pop_eDX)
2190{
2191 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
2192 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
2193}
2194
2195
2196/**
2197 * @opcode 0x5b
2198 */
2199FNIEMOP_DEF(iemOp_pop_eBX)
2200{
2201 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
2202 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
2203}
2204
2205
2206/**
2207 * @opcode 0x5c
2208 */
2209FNIEMOP_DEF(iemOp_pop_eSP)
2210{
2211 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
2212 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
2213 {
2214 if (pVCpu->iem.s.uRexB)
2215 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
2216 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2217 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2218 }
2219
2220 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2221 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
2222 /** @todo add testcase for this instruction. */
2223 switch (pVCpu->iem.s.enmEffOpSize)
2224 {
2225 case IEMMODE_16BIT:
2226 IEM_MC_BEGIN(0, 1);
2227 IEM_MC_LOCAL(uint16_t, u16Dst);
2228 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
2229 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
2230 IEM_MC_ADVANCE_RIP_AND_FINISH();
2231 IEM_MC_END();
2232 break;
2233
2234 case IEMMODE_32BIT:
2235 IEM_MC_BEGIN(0, 1);
2236 IEM_MC_LOCAL(uint32_t, u32Dst);
2237 IEM_MC_POP_U32(&u32Dst);
2238 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
2239 IEM_MC_ADVANCE_RIP_AND_FINISH();
2240 IEM_MC_END();
2241 break;
2242
2243 case IEMMODE_64BIT:
2244 IEM_MC_BEGIN(0, 1);
2245 IEM_MC_LOCAL(uint64_t, u64Dst);
2246 IEM_MC_POP_U64(&u64Dst);
2247 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
2248 IEM_MC_ADVANCE_RIP_AND_FINISH();
2249 IEM_MC_END();
2250 break;
2251
2252 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2253 }
2254}
2255
2256
2257/**
2258 * @opcode 0x5d
2259 */
2260FNIEMOP_DEF(iemOp_pop_eBP)
2261{
2262 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
2263 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
2264}
2265
2266
2267/**
2268 * @opcode 0x5e
2269 */
2270FNIEMOP_DEF(iemOp_pop_eSI)
2271{
2272 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
2273 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
2274}
2275
2276
2277/**
2278 * @opcode 0x5f
2279 */
2280FNIEMOP_DEF(iemOp_pop_eDI)
2281{
2282 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
2283 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
2284}
2285
2286
2287/**
2288 * @opcode 0x60
2289 */
2290FNIEMOP_DEF(iemOp_pusha)
2291{
2292 IEMOP_MNEMONIC(pusha, "pusha");
2293 IEMOP_HLP_MIN_186();
2294 IEMOP_HLP_NO_64BIT();
2295 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2296 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
2297 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2298 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
2299}
2300
2301
2302/**
2303 * @opcode 0x61
2304 */
2305FNIEMOP_DEF(iemOp_popa__mvex)
2306{
2307 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
2308 {
2309 IEMOP_MNEMONIC(popa, "popa");
2310 IEMOP_HLP_MIN_186();
2311 IEMOP_HLP_NO_64BIT();
2312 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2313 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
2314 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2315 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
2316 }
2317 IEMOP_MNEMONIC(mvex, "mvex");
2318 Log(("mvex prefix is not supported!\n"));
2319 return IEMOP_RAISE_INVALID_OPCODE();
2320}
2321
2322
2323/**
2324 * @opcode 0x62
2325 * @opmnemonic bound
2326 * @op1 Gv_RO
2327 * @op2 Ma
2328 * @opmincpu 80186
2329 * @ophints harmless invalid_64
2330 * @optest op1=0 op2=0 ->
2331 * @optest op1=1 op2=0 -> value.xcpt=5
2332 * @optest o16 / op1=0xffff op2=0x0000fffe ->
2333 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
2334 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
2335 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
2336 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
2337 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
2338 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
2339 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
2340 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
2341 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
2342 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
2343 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
2344 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
2345 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
2346 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
2347 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
2348 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
2349 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
2350 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
2351 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
2352 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
2353 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
2354 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
2355 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
2356 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
2357 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
2358 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
2359 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
2360 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
2361 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
2362 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
2363 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
2364 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
2365 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
2366 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
2367 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
2368 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
2369 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
2370 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
2371 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
2372 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
2373 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
2374 */
2375FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
2376{
2377 /* The BOUND instruction is invalid 64-bit mode. In legacy and
2378 compatability mode it is invalid with MOD=3.
2379
2380 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
2381 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
2382 given as R and X without an exact description, so we assume it builds on
2383 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
2384 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
2385 uint8_t bRm;
2386 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
2387 {
2388 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2389 IEMOP_HLP_MIN_186();
2390 IEM_OPCODE_GET_NEXT_U8(&bRm);
2391 if (IEM_IS_MODRM_MEM_MODE(bRm))
2392 {
2393 /** @todo testcase: check that there are two memory accesses involved. Check
2394 * whether they're both read before the \#BR triggers. */
2395 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2396 {
2397 IEM_MC_BEGIN(3, 1);
2398 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2399 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
2400 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
2401 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2402
2403 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2405
2406 IEM_MC_FETCH_GREG_U16(u16Index, IEM_GET_MODRM_REG_8(bRm));
2407 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2408 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
2409
2410 IEM_MC_CALL_CIMPL_3(iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
2411 IEM_MC_END();
2412 }
2413 else /* 32-bit operands */
2414 {
2415 IEM_MC_BEGIN(3, 1);
2416 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2417 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
2418 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
2419 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2420
2421 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2423
2424 IEM_MC_FETCH_GREG_U32(u32Index, IEM_GET_MODRM_REG_8(bRm));
2425 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2426 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
2427
2428 IEM_MC_CALL_CIMPL_3(iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
2429 IEM_MC_END();
2430 }
2431 }
2432
2433 /*
2434 * @opdone
2435 */
2436 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2437 {
2438 /* Note that there is no need for the CPU to fetch further bytes
2439 here because MODRM.MOD == 3. */
2440 Log(("evex not supported by the guest CPU!\n"));
2441 return IEMOP_RAISE_INVALID_OPCODE();
2442 }
2443 }
2444 else
2445 {
2446 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
2447 * does modr/m read, whereas AMD probably doesn't... */
2448 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2449 {
2450 Log(("evex not supported by the guest CPU!\n"));
2451 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
2452 }
2453 IEM_OPCODE_GET_NEXT_U8(&bRm);
2454 }
2455
2456 IEMOP_MNEMONIC(evex, "evex");
2457 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
2458 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
2459 Log(("evex prefix is not implemented!\n"));
2460 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2461}
2462
2463
2464/** Opcode 0x63 - non-64-bit modes. */
2465FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
2466{
2467 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
2468 IEMOP_HLP_MIN_286();
2469 IEMOP_HLP_NO_REAL_OR_V86_MODE();
2470 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2471
2472 if (IEM_IS_MODRM_REG_MODE(bRm))
2473 {
2474 /* Register */
2475 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2476 IEM_MC_BEGIN(3, 0);
2477 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2478 IEM_MC_ARG(uint16_t, u16Src, 1);
2479 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2480
2481 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2482 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM_8(bRm));
2483 IEM_MC_REF_EFLAGS(pEFlags);
2484 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2485
2486 IEM_MC_ADVANCE_RIP_AND_FINISH();
2487 IEM_MC_END();
2488 }
2489 else
2490 {
2491 /* Memory */
2492 IEM_MC_BEGIN(3, 2);
2493 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2494 IEM_MC_ARG(uint16_t, u16Src, 1);
2495 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2496 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2497
2498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2499 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2500 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2501 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2502 IEM_MC_FETCH_EFLAGS(EFlags);
2503 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2504
2505 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
2506 IEM_MC_COMMIT_EFLAGS(EFlags);
2507 IEM_MC_ADVANCE_RIP_AND_FINISH();
2508 IEM_MC_END();
2509 }
2510}
2511
2512
2513/**
2514 * @opcode 0x63
2515 *
2516 * @note This is a weird one. It works like a regular move instruction if
2517 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2518 * @todo This definitely needs a testcase to verify the odd cases. */
2519FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2520{
2521 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2522
2523 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2524 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2525
2526 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2527 {
2528 if (IEM_IS_MODRM_REG_MODE(bRm))
2529 {
2530 /*
2531 * Register to register.
2532 */
2533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2534 IEM_MC_BEGIN(0, 1);
2535 IEM_MC_LOCAL(uint64_t, u64Value);
2536 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
2537 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2538 IEM_MC_ADVANCE_RIP_AND_FINISH();
2539 IEM_MC_END();
2540 }
2541 else
2542 {
2543 /*
2544 * We're loading a register from memory.
2545 */
2546 IEM_MC_BEGIN(0, 2);
2547 IEM_MC_LOCAL(uint64_t, u64Value);
2548 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2549 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2551 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2552 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2553 IEM_MC_ADVANCE_RIP_AND_FINISH();
2554 IEM_MC_END();
2555 }
2556 }
2557 else
2558 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
2559}
2560
2561
2562/**
2563 * @opcode 0x64
2564 * @opmnemonic segfs
2565 * @opmincpu 80386
2566 * @opgroup og_prefixes
2567 */
2568FNIEMOP_DEF(iemOp_seg_FS)
2569{
2570 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2571 IEMOP_HLP_MIN_386();
2572
2573 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2574 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2575
2576 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2577 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2578}
2579
2580
2581/**
2582 * @opcode 0x65
2583 * @opmnemonic seggs
2584 * @opmincpu 80386
2585 * @opgroup og_prefixes
2586 */
2587FNIEMOP_DEF(iemOp_seg_GS)
2588{
2589 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2590 IEMOP_HLP_MIN_386();
2591
2592 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2593 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2594
2595 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2596 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2597}
2598
2599
2600/**
2601 * @opcode 0x66
2602 * @opmnemonic opsize
2603 * @openc prefix
2604 * @opmincpu 80386
2605 * @ophints harmless
2606 * @opgroup og_prefixes
2607 */
2608FNIEMOP_DEF(iemOp_op_size)
2609{
2610 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2611 IEMOP_HLP_MIN_386();
2612
2613 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2614 iemRecalEffOpSize(pVCpu);
2615
2616 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2617 when REPZ or REPNZ are present. */
2618 if (pVCpu->iem.s.idxPrefix == 0)
2619 pVCpu->iem.s.idxPrefix = 1;
2620
2621 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2622 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2623}
2624
2625
2626/**
2627 * @opcode 0x67
2628 * @opmnemonic addrsize
2629 * @openc prefix
2630 * @opmincpu 80386
2631 * @ophints harmless
2632 * @opgroup og_prefixes
2633 */
2634FNIEMOP_DEF(iemOp_addr_size)
2635{
2636 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2637 IEMOP_HLP_MIN_386();
2638
2639 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2640 switch (pVCpu->iem.s.enmDefAddrMode)
2641 {
2642 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2643 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2644 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2645 default: AssertFailed();
2646 }
2647
2648 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2649 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2650}
2651
2652
2653/**
2654 * @opcode 0x68
2655 */
2656FNIEMOP_DEF(iemOp_push_Iz)
2657{
2658 IEMOP_MNEMONIC(push_Iz, "push Iz");
2659 IEMOP_HLP_MIN_186();
2660 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2661 switch (pVCpu->iem.s.enmEffOpSize)
2662 {
2663 case IEMMODE_16BIT:
2664 {
2665 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2667 IEM_MC_BEGIN(0,0);
2668 IEM_MC_PUSH_U16(u16Imm);
2669 IEM_MC_ADVANCE_RIP_AND_FINISH();
2670 IEM_MC_END();
2671 break;
2672 }
2673
2674 case IEMMODE_32BIT:
2675 {
2676 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2678 IEM_MC_BEGIN(0,0);
2679 IEM_MC_PUSH_U32(u32Imm);
2680 IEM_MC_ADVANCE_RIP_AND_FINISH();
2681 IEM_MC_END();
2682 break;
2683 }
2684
2685 case IEMMODE_64BIT:
2686 {
2687 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2689 IEM_MC_BEGIN(0,0);
2690 IEM_MC_PUSH_U64(u64Imm);
2691 IEM_MC_ADVANCE_RIP_AND_FINISH();
2692 IEM_MC_END();
2693 break;
2694 }
2695
2696 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2697 }
2698}
2699
2700
2701/**
2702 * @opcode 0x69
2703 */
2704FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
2705{
2706 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
2707 IEMOP_HLP_MIN_186();
2708 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2709 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2710
2711 switch (pVCpu->iem.s.enmEffOpSize)
2712 {
2713 case IEMMODE_16BIT:
2714 {
2715 if (IEM_IS_MODRM_REG_MODE(bRm))
2716 {
2717 /* register operand */
2718 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2720
2721 IEM_MC_BEGIN(3, 1);
2722 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2723 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
2724 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2725 IEM_MC_LOCAL(uint16_t, u16Tmp);
2726
2727 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2728 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2729 IEM_MC_REF_EFLAGS(pEFlags);
2730 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags),
2731 pu16Dst, u16Src, pEFlags);
2732 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2733
2734 IEM_MC_ADVANCE_RIP_AND_FINISH();
2735 IEM_MC_END();
2736 }
2737 else
2738 {
2739 /* memory operand */
2740 IEM_MC_BEGIN(3, 2);
2741 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2742 IEM_MC_ARG(uint16_t, u16Src, 1);
2743 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2744 IEM_MC_LOCAL(uint16_t, u16Tmp);
2745 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2746
2747 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2748 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2749 IEM_MC_ASSIGN(u16Src, u16Imm);
2750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2751 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2752 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2753 IEM_MC_REF_EFLAGS(pEFlags);
2754 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags),
2755 pu16Dst, u16Src, pEFlags);
2756 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2757
2758 IEM_MC_ADVANCE_RIP_AND_FINISH();
2759 IEM_MC_END();
2760 }
2761 break;
2762 }
2763
2764 case IEMMODE_32BIT:
2765 {
2766 if (IEM_IS_MODRM_REG_MODE(bRm))
2767 {
2768 /* register operand */
2769 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2771
2772 IEM_MC_BEGIN(3, 1);
2773 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2774 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
2775 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2776 IEM_MC_LOCAL(uint32_t, u32Tmp);
2777
2778 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2779 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2780 IEM_MC_REF_EFLAGS(pEFlags);
2781 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags),
2782 pu32Dst, u32Src, pEFlags);
2783 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2784
2785 IEM_MC_ADVANCE_RIP_AND_FINISH();
2786 IEM_MC_END();
2787 }
2788 else
2789 {
2790 /* memory operand */
2791 IEM_MC_BEGIN(3, 2);
2792 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2793 IEM_MC_ARG(uint32_t, u32Src, 1);
2794 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2795 IEM_MC_LOCAL(uint32_t, u32Tmp);
2796 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2797
2798 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2799 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2800 IEM_MC_ASSIGN(u32Src, u32Imm);
2801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2802 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2803 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2804 IEM_MC_REF_EFLAGS(pEFlags);
2805 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags),
2806 pu32Dst, u32Src, pEFlags);
2807 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2808
2809 IEM_MC_ADVANCE_RIP_AND_FINISH();
2810 IEM_MC_END();
2811 }
2812 break;
2813 }
2814
2815 case IEMMODE_64BIT:
2816 {
2817 if (IEM_IS_MODRM_REG_MODE(bRm))
2818 {
2819 /* register operand */
2820 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2822
2823 IEM_MC_BEGIN(3, 1);
2824 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2825 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
2826 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2827 IEM_MC_LOCAL(uint64_t, u64Tmp);
2828
2829 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2830 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2831 IEM_MC_REF_EFLAGS(pEFlags);
2832 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags),
2833 pu64Dst, u64Src, pEFlags);
2834 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2835
2836 IEM_MC_ADVANCE_RIP_AND_FINISH();
2837 IEM_MC_END();
2838 }
2839 else
2840 {
2841 /* memory operand */
2842 IEM_MC_BEGIN(3, 2);
2843 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2844 IEM_MC_ARG(uint64_t, u64Src, 1);
2845 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2846 IEM_MC_LOCAL(uint64_t, u64Tmp);
2847 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2848
2849 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2850 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2851 IEM_MC_ASSIGN(u64Src, u64Imm);
2852 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2853 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2854 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2855 IEM_MC_REF_EFLAGS(pEFlags);
2856 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags),
2857 pu64Dst, u64Src, pEFlags);
2858 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2859
2860 IEM_MC_ADVANCE_RIP_AND_FINISH();
2861 IEM_MC_END();
2862 }
2863 break;
2864 }
2865
2866 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2867 }
2868}
2869
2870
2871/**
2872 * @opcode 0x6a
2873 */
2874FNIEMOP_DEF(iemOp_push_Ib)
2875{
2876 IEMOP_MNEMONIC(push_Ib, "push Ib");
2877 IEMOP_HLP_MIN_186();
2878 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2879 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2880 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2881
2882 switch (pVCpu->iem.s.enmEffOpSize)
2883 {
2884 case IEMMODE_16BIT:
2885 IEM_MC_BEGIN(0,0);
2886 IEM_MC_PUSH_U16(i8Imm);
2887 IEM_MC_ADVANCE_RIP_AND_FINISH();
2888 IEM_MC_END();
2889 break;
2890 case IEMMODE_32BIT:
2891 IEM_MC_BEGIN(0,0);
2892 IEM_MC_PUSH_U32(i8Imm);
2893 IEM_MC_ADVANCE_RIP_AND_FINISH();
2894 IEM_MC_END();
2895 break;
2896 case IEMMODE_64BIT:
2897 IEM_MC_BEGIN(0,0);
2898 IEM_MC_PUSH_U64(i8Imm);
2899 IEM_MC_ADVANCE_RIP_AND_FINISH();
2900 IEM_MC_END();
2901 break;
2902 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2903 }
2904}
2905
2906
2907/**
2908 * @opcode 0x6b
2909 */
2910FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
2911{
2912 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
2913 IEMOP_HLP_MIN_186();
2914 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2915 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2916
2917 switch (pVCpu->iem.s.enmEffOpSize)
2918 {
2919 case IEMMODE_16BIT:
2920 if (IEM_IS_MODRM_REG_MODE(bRm))
2921 {
2922 /* register operand */
2923 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2925
2926 IEM_MC_BEGIN(3, 1);
2927 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2928 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
2929 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2930 IEM_MC_LOCAL(uint16_t, u16Tmp);
2931
2932 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2933 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2934 IEM_MC_REF_EFLAGS(pEFlags);
2935 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags),
2936 pu16Dst, u16Src, pEFlags);
2937 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2938
2939 IEM_MC_ADVANCE_RIP_AND_FINISH();
2940 IEM_MC_END();
2941 }
2942 else
2943 {
2944 /* memory operand */
2945 IEM_MC_BEGIN(3, 2);
2946 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2947 IEM_MC_ARG(uint16_t, u16Src, 1);
2948 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2949 IEM_MC_LOCAL(uint16_t, u16Tmp);
2950 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2951
2952 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2953 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
2954 IEM_MC_ASSIGN(u16Src, u16Imm);
2955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2956 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2957 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2958 IEM_MC_REF_EFLAGS(pEFlags);
2959 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags),
2960 pu16Dst, u16Src, pEFlags);
2961 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2962
2963 IEM_MC_ADVANCE_RIP_AND_FINISH();
2964 IEM_MC_END();
2965 }
2966 break;
2967
2968 case IEMMODE_32BIT:
2969 if (IEM_IS_MODRM_REG_MODE(bRm))
2970 {
2971 /* register operand */
2972 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2974
2975 IEM_MC_BEGIN(3, 1);
2976 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2977 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
2978 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2979 IEM_MC_LOCAL(uint32_t, u32Tmp);
2980
2981 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2982 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2983 IEM_MC_REF_EFLAGS(pEFlags);
2984 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags),
2985 pu32Dst, u32Src, pEFlags);
2986 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2987
2988 IEM_MC_ADVANCE_RIP_AND_FINISH();
2989 IEM_MC_END();
2990 }
2991 else
2992 {
2993 /* memory operand */
2994 IEM_MC_BEGIN(3, 2);
2995 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2996 IEM_MC_ARG(uint32_t, u32Src, 1);
2997 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2998 IEM_MC_LOCAL(uint32_t, u32Tmp);
2999 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3000
3001 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3002 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
3003 IEM_MC_ASSIGN(u32Src, u32Imm);
3004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3005 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3006 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
3007 IEM_MC_REF_EFLAGS(pEFlags);
3008 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags),
3009 pu32Dst, u32Src, pEFlags);
3010 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3011
3012 IEM_MC_ADVANCE_RIP_AND_FINISH();
3013 IEM_MC_END();
3014 }
3015 break;
3016
3017 case IEMMODE_64BIT:
3018 if (IEM_IS_MODRM_REG_MODE(bRm))
3019 {
3020 /* register operand */
3021 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3023
3024 IEM_MC_BEGIN(3, 1);
3025 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3026 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
3027 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3028 IEM_MC_LOCAL(uint64_t, u64Tmp);
3029
3030 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3031 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3032 IEM_MC_REF_EFLAGS(pEFlags);
3033 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags),
3034 pu64Dst, u64Src, pEFlags);
3035 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3036
3037 IEM_MC_ADVANCE_RIP_AND_FINISH();
3038 IEM_MC_END();
3039 }
3040 else
3041 {
3042 /* memory operand */
3043 IEM_MC_BEGIN(3, 2);
3044 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3045 IEM_MC_ARG(uint64_t, u64Src, 1);
3046 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3047 IEM_MC_LOCAL(uint64_t, u64Tmp);
3048 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3049
3050 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3051 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
3052 IEM_MC_ASSIGN(u64Src, u64Imm);
3053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3054 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3055 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3056 IEM_MC_REF_EFLAGS(pEFlags);
3057 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags),
3058 pu64Dst, u64Src, pEFlags);
3059 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3060
3061 IEM_MC_ADVANCE_RIP_AND_FINISH();
3062 IEM_MC_END();
3063 }
3064 break;
3065
3066 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3067 }
3068}
3069
3070
3071/**
3072 * @opcode 0x6c
3073 */
3074FNIEMOP_DEF(iemOp_insb_Yb_DX)
3075{
3076 IEMOP_HLP_MIN_186();
3077 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3078 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3079 {
3080 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
3081 switch (pVCpu->iem.s.enmEffAddrMode)
3082 {
3083 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
3084 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
3085 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
3086 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3087 }
3088 }
3089 else
3090 {
3091 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
3092 switch (pVCpu->iem.s.enmEffAddrMode)
3093 {
3094 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
3095 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
3096 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
3097 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3098 }
3099 }
3100}
3101
3102
3103/**
3104 * @opcode 0x6d
3105 */
3106FNIEMOP_DEF(iemOp_inswd_Yv_DX)
3107{
3108 IEMOP_HLP_MIN_186();
3109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3110 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3111 {
3112 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
3113 switch (pVCpu->iem.s.enmEffOpSize)
3114 {
3115 case IEMMODE_16BIT:
3116 switch (pVCpu->iem.s.enmEffAddrMode)
3117 {
3118 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
3119 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
3120 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
3121 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3122 }
3123 break;
3124 case IEMMODE_64BIT:
3125 case IEMMODE_32BIT:
3126 switch (pVCpu->iem.s.enmEffAddrMode)
3127 {
3128 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
3129 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
3130 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
3131 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3132 }
3133 break;
3134 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3135 }
3136 }
3137 else
3138 {
3139 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
3140 switch (pVCpu->iem.s.enmEffOpSize)
3141 {
3142 case IEMMODE_16BIT:
3143 switch (pVCpu->iem.s.enmEffAddrMode)
3144 {
3145 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
3146 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
3147 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
3148 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3149 }
3150 break;
3151 case IEMMODE_64BIT:
3152 case IEMMODE_32BIT:
3153 switch (pVCpu->iem.s.enmEffAddrMode)
3154 {
3155 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
3156 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
3157 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
3158 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3159 }
3160 break;
3161 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3162 }
3163 }
3164}
3165
3166
3167/**
3168 * @opcode 0x6e
3169 */
3170FNIEMOP_DEF(iemOp_outsb_Yb_DX)
3171{
3172 IEMOP_HLP_MIN_186();
3173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3174 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3175 {
3176 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
3177 switch (pVCpu->iem.s.enmEffAddrMode)
3178 {
3179 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3180 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3181 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3182 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3183 }
3184 }
3185 else
3186 {
3187 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
3188 switch (pVCpu->iem.s.enmEffAddrMode)
3189 {
3190 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3191 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3192 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3193 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3194 }
3195 }
3196}
3197
3198
3199/**
3200 * @opcode 0x6f
3201 */
3202FNIEMOP_DEF(iemOp_outswd_Yv_DX)
3203{
3204 IEMOP_HLP_MIN_186();
3205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3206 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3207 {
3208 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
3209 switch (pVCpu->iem.s.enmEffOpSize)
3210 {
3211 case IEMMODE_16BIT:
3212 switch (pVCpu->iem.s.enmEffAddrMode)
3213 {
3214 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3215 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3216 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3217 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3218 }
3219 break;
3220 case IEMMODE_64BIT:
3221 case IEMMODE_32BIT:
3222 switch (pVCpu->iem.s.enmEffAddrMode)
3223 {
3224 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3225 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3226 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3227 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3228 }
3229 break;
3230 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3231 }
3232 }
3233 else
3234 {
3235 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
3236 switch (pVCpu->iem.s.enmEffOpSize)
3237 {
3238 case IEMMODE_16BIT:
3239 switch (pVCpu->iem.s.enmEffAddrMode)
3240 {
3241 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3242 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3243 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3244 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3245 }
3246 break;
3247 case IEMMODE_64BIT:
3248 case IEMMODE_32BIT:
3249 switch (pVCpu->iem.s.enmEffAddrMode)
3250 {
3251 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3252 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3253 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3254 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3255 }
3256 break;
3257 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3258 }
3259 }
3260}
3261
3262
3263/**
3264 * @opcode 0x70
3265 */
3266FNIEMOP_DEF(iemOp_jo_Jb)
3267{
3268 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
3269 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3271 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3272
3273 IEM_MC_BEGIN(0, 0);
3274 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3275 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3276 } IEM_MC_ELSE() {
3277 IEM_MC_ADVANCE_RIP_AND_FINISH();
3278 } IEM_MC_ENDIF();
3279 IEM_MC_END();
3280}
3281
3282
3283/**
3284 * @opcode 0x71
3285 */
3286FNIEMOP_DEF(iemOp_jno_Jb)
3287{
3288 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
3289 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3291 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3292
3293 IEM_MC_BEGIN(0, 0);
3294 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3295 IEM_MC_ADVANCE_RIP_AND_FINISH();
3296 } IEM_MC_ELSE() {
3297 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3298 } IEM_MC_ENDIF();
3299 IEM_MC_END();
3300}
3301
3302/**
3303 * @opcode 0x72
3304 */
3305FNIEMOP_DEF(iemOp_jc_Jb)
3306{
3307 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
3308 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3310 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3311
3312 IEM_MC_BEGIN(0, 0);
3313 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3314 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3315 } IEM_MC_ELSE() {
3316 IEM_MC_ADVANCE_RIP_AND_FINISH();
3317 } IEM_MC_ENDIF();
3318 IEM_MC_END();
3319}
3320
3321
3322/**
3323 * @opcode 0x73
3324 */
3325FNIEMOP_DEF(iemOp_jnc_Jb)
3326{
3327 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
3328 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3330 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3331
3332 IEM_MC_BEGIN(0, 0);
3333 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3334 IEM_MC_ADVANCE_RIP_AND_FINISH();
3335 } IEM_MC_ELSE() {
3336 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3337 } IEM_MC_ENDIF();
3338 IEM_MC_END();
3339}
3340
3341
3342/**
3343 * @opcode 0x74
3344 */
3345FNIEMOP_DEF(iemOp_je_Jb)
3346{
3347 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
3348 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3350 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3351
3352 IEM_MC_BEGIN(0, 0);
3353 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3354 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3355 } IEM_MC_ELSE() {
3356 IEM_MC_ADVANCE_RIP_AND_FINISH();
3357 } IEM_MC_ENDIF();
3358 IEM_MC_END();
3359}
3360
3361
3362/**
3363 * @opcode 0x75
3364 */
3365FNIEMOP_DEF(iemOp_jne_Jb)
3366{
3367 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
3368 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3370 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3371
3372 IEM_MC_BEGIN(0, 0);
3373 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3374 IEM_MC_ADVANCE_RIP_AND_FINISH();
3375 } IEM_MC_ELSE() {
3376 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3377 } IEM_MC_ENDIF();
3378 IEM_MC_END();
3379}
3380
3381
3382/**
3383 * @opcode 0x76
3384 */
3385FNIEMOP_DEF(iemOp_jbe_Jb)
3386{
3387 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
3388 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3390 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3391
3392 IEM_MC_BEGIN(0, 0);
3393 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3394 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3395 } IEM_MC_ELSE() {
3396 IEM_MC_ADVANCE_RIP_AND_FINISH();
3397 } IEM_MC_ENDIF();
3398 IEM_MC_END();
3399}
3400
3401
3402/**
3403 * @opcode 0x77
3404 */
3405FNIEMOP_DEF(iemOp_jnbe_Jb)
3406{
3407 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
3408 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3409 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3410 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3411
3412 IEM_MC_BEGIN(0, 0);
3413 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3414 IEM_MC_ADVANCE_RIP_AND_FINISH();
3415 } IEM_MC_ELSE() {
3416 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3417 } IEM_MC_ENDIF();
3418 IEM_MC_END();
3419}
3420
3421
3422/**
3423 * @opcode 0x78
3424 */
3425FNIEMOP_DEF(iemOp_js_Jb)
3426{
3427 IEMOP_MNEMONIC(js_Jb, "js Jb");
3428 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3430 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3431
3432 IEM_MC_BEGIN(0, 0);
3433 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3434 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3435 } IEM_MC_ELSE() {
3436 IEM_MC_ADVANCE_RIP_AND_FINISH();
3437 } IEM_MC_ENDIF();
3438 IEM_MC_END();
3439}
3440
3441
3442/**
3443 * @opcode 0x79
3444 */
3445FNIEMOP_DEF(iemOp_jns_Jb)
3446{
3447 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
3448 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3450 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3451
3452 IEM_MC_BEGIN(0, 0);
3453 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3454 IEM_MC_ADVANCE_RIP_AND_FINISH();
3455 } IEM_MC_ELSE() {
3456 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3457 } IEM_MC_ENDIF();
3458 IEM_MC_END();
3459}
3460
3461
3462/**
3463 * @opcode 0x7a
3464 */
3465FNIEMOP_DEF(iemOp_jp_Jb)
3466{
3467 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
3468 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3470 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3471
3472 IEM_MC_BEGIN(0, 0);
3473 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3474 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3475 } IEM_MC_ELSE() {
3476 IEM_MC_ADVANCE_RIP_AND_FINISH();
3477 } IEM_MC_ENDIF();
3478 IEM_MC_END();
3479}
3480
3481
3482/**
3483 * @opcode 0x7b
3484 */
3485FNIEMOP_DEF(iemOp_jnp_Jb)
3486{
3487 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
3488 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3490 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3491
3492 IEM_MC_BEGIN(0, 0);
3493 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3494 IEM_MC_ADVANCE_RIP_AND_FINISH();
3495 } IEM_MC_ELSE() {
3496 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3497 } IEM_MC_ENDIF();
3498 IEM_MC_END();
3499}
3500
3501
3502/**
3503 * @opcode 0x7c
3504 */
3505FNIEMOP_DEF(iemOp_jl_Jb)
3506{
3507 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
3508 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3510 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3511
3512 IEM_MC_BEGIN(0, 0);
3513 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3514 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3515 } IEM_MC_ELSE() {
3516 IEM_MC_ADVANCE_RIP_AND_FINISH();
3517 } IEM_MC_ENDIF();
3518 IEM_MC_END();
3519}
3520
3521
3522/**
3523 * @opcode 0x7d
3524 */
3525FNIEMOP_DEF(iemOp_jnl_Jb)
3526{
3527 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
3528 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3530 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3531
3532 IEM_MC_BEGIN(0, 0);
3533 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3534 IEM_MC_ADVANCE_RIP_AND_FINISH();
3535 } IEM_MC_ELSE() {
3536 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3537 } IEM_MC_ENDIF();
3538 IEM_MC_END();
3539}
3540
3541
3542/**
3543 * @opcode 0x7e
3544 */
3545FNIEMOP_DEF(iemOp_jle_Jb)
3546{
3547 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
3548 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3550 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3551
3552 IEM_MC_BEGIN(0, 0);
3553 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3554 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3555 } IEM_MC_ELSE() {
3556 IEM_MC_ADVANCE_RIP_AND_FINISH();
3557 } IEM_MC_ENDIF();
3558 IEM_MC_END();
3559}
3560
3561
3562/**
3563 * @opcode 0x7f
3564 */
3565FNIEMOP_DEF(iemOp_jnle_Jb)
3566{
3567 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
3568 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3570 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3571
3572 IEM_MC_BEGIN(0, 0);
3573 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3574 IEM_MC_ADVANCE_RIP_AND_FINISH();
3575 } IEM_MC_ELSE() {
3576 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3577 } IEM_MC_ENDIF();
3578 IEM_MC_END();
3579}
3580
3581
3582/**
3583 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
3584 * iemOp_Grp1_Eb_Ib_80.
3585 */
3586#define IEMOP_BODY_BINARY_Eb_Ib(a_fnNormalU8, a_fRW) \
3587 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3588 { \
3589 /* register target */ \
3590 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3592 IEM_MC_BEGIN(3, 0); \
3593 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3594 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3595 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3596 \
3597 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3598 IEM_MC_REF_EFLAGS(pEFlags); \
3599 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3600 \
3601 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3602 IEM_MC_END(); \
3603 } \
3604 else \
3605 { \
3606 /* memory target */ \
3607 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
3608 { \
3609 IEM_MC_BEGIN(3, 2); \
3610 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3611 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3612 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3613 \
3614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3615 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3616 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3617 IEMOP_HLP_DONE_DECODING(); \
3618 \
3619 IEM_MC_MEM_MAP(pu8Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3620 IEM_MC_FETCH_EFLAGS(EFlags); \
3621 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3622 \
3623 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, a_fRW); \
3624 IEM_MC_COMMIT_EFLAGS(EFlags); \
3625 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3626 IEM_MC_END(); \
3627 } \
3628 else \
3629 { \
3630 (void)0
3631
3632#define IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK() \
3633 IEMOP_HLP_DONE_DECODING(); \
3634 return IEMOP_RAISE_INVALID_LOCK_PREFIX(); \
3635 } \
3636 } \
3637 (void)0
3638
3639#define IEMOP_BODY_BINARY_Eb_Ib_LOCKED(a_fnLockedU8) \
3640 IEM_MC_BEGIN(3, 2); \
3641 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3642 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3643 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3644 \
3645 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3646 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3647 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3648 IEMOP_HLP_DONE_DECODING(); \
3649 \
3650 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3651 IEM_MC_FETCH_EFLAGS(EFlags); \
3652 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
3653 \
3654 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW); \
3655 IEM_MC_COMMIT_EFLAGS(EFlags); \
3656 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3657 IEM_MC_END(); \
3658 } \
3659 } \
3660 (void)0
3661
3662
3663/**
3664 * @opmaps grp1_80,grp1_83
3665 * @opcode /0
3666 */
3667FNIEMOP_DEF_1(iemOp_Grp1_add_Eb_Ib, uint8_t, bRm)
3668{
3669 IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib");
3670 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_add_u8, IEM_ACCESS_DATA_RW);
3671 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_add_u8_locked);
3672}
3673
3674
3675/**
3676 * @opmaps grp1_80,grp1_83
3677 * @opcode /1
3678 */
3679FNIEMOP_DEF_1(iemOp_Grp1_or_Eb_Ib, uint8_t, bRm)
3680{
3681 IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib");
3682 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_or_u8, IEM_ACCESS_DATA_RW);
3683 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_or_u8_locked);
3684}
3685
3686
3687/**
3688 * @opmaps grp1_80,grp1_83
3689 * @opcode /2
3690 */
3691FNIEMOP_DEF_1(iemOp_Grp1_adc_Eb_Ib, uint8_t, bRm)
3692{
3693 IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib");
3694 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_adc_u8, IEM_ACCESS_DATA_RW);
3695 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_adc_u8_locked);
3696}
3697
3698
3699/**
3700 * @opmaps grp1_80,grp1_83
3701 * @opcode /3
3702 */
3703FNIEMOP_DEF_1(iemOp_Grp1_sbb_Eb_Ib, uint8_t, bRm)
3704{
3705 IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib");
3706 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_sbb_u8, IEM_ACCESS_DATA_RW);
3707 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sbb_u8_locked);
3708}
3709
3710
3711/**
3712 * @opmaps grp1_80,grp1_83
3713 * @opcode /4
3714 */
3715FNIEMOP_DEF_1(iemOp_Grp1_and_Eb_Ib, uint8_t, bRm)
3716{
3717 IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib");
3718 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_and_u8, IEM_ACCESS_DATA_RW);
3719 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_and_u8_locked);
3720}
3721
3722
3723/**
3724 * @opmaps grp1_80,grp1_83
3725 * @opcode /5
3726 */
3727FNIEMOP_DEF_1(iemOp_Grp1_sub_Eb_Ib, uint8_t, bRm)
3728{
3729 IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib");
3730 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_sub_u8, IEM_ACCESS_DATA_RW);
3731 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sub_u8_locked);
3732}
3733
3734
3735/**
3736 * @opmaps grp1_80,grp1_83
3737 * @opcode /6
3738 */
3739FNIEMOP_DEF_1(iemOp_Grp1_xor_Eb_Ib, uint8_t, bRm)
3740{
3741 IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib");
3742 IEMOP_BODY_BINARY_Eb_Ib( iemAImpl_xor_u8, IEM_ACCESS_DATA_RW);
3743 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_xor_u8_locked);
3744}
3745
3746
3747/**
3748 * @opmaps grp1_80,grp1_83
3749 * @opcode /7
3750 */
3751FNIEMOP_DEF_1(iemOp_Grp1_cmp_Eb_Ib, uint8_t, bRm)
3752{
3753 IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib");
3754 IEMOP_BODY_BINARY_Eb_Ib(iemAImpl_cmp_u8, IEM_ACCESS_DATA_R);
3755 IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK();
3756}
3757
3758
3759/**
3760 * @opcode 0x80
3761 */
3762FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
3763{
3764 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3765 switch (IEM_GET_MODRM_REG_8(bRm))
3766 {
3767 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Eb_Ib, bRm);
3768 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Eb_Ib, bRm);
3769 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Eb_Ib, bRm);
3770 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Eb_Ib, bRm);
3771 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Eb_Ib, bRm);
3772 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Eb_Ib, bRm);
3773 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Eb_Ib, bRm);
3774 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Eb_Ib, bRm);
3775 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3776 }
3777}
3778
3779
3780/**
3781 * Body for a group 1 binary operator.
3782 */
3783#define IEMOP_BODY_BINARY_Ev_Iz(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fRW) \
3784 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3785 { \
3786 /* register target */ \
3787 switch (pVCpu->iem.s.enmEffOpSize) \
3788 { \
3789 case IEMMODE_16BIT: \
3790 { \
3791 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
3792 IEMOP_HLP_DONE_DECODING(); \
3793 IEM_MC_BEGIN(3, 0); \
3794 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
3795 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
3796 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3797 \
3798 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3799 IEM_MC_REF_EFLAGS(pEFlags); \
3800 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
3801 \
3802 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3803 IEM_MC_END(); \
3804 break; \
3805 } \
3806 \
3807 case IEMMODE_32BIT: \
3808 { \
3809 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
3810 IEMOP_HLP_DONE_DECODING(); \
3811 IEM_MC_BEGIN(3, 0); \
3812 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
3813 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
3814 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3815 \
3816 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3817 IEM_MC_REF_EFLAGS(pEFlags); \
3818 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
3819 if (a_fRW == IEM_ACCESS_DATA_RW) \
3820 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
3821 \
3822 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3823 IEM_MC_END(); \
3824 break; \
3825 } \
3826 \
3827 case IEMMODE_64BIT: \
3828 { \
3829 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
3830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3831 IEM_MC_BEGIN(3, 0); \
3832 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
3833 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
3834 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3835 \
3836 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3837 IEM_MC_REF_EFLAGS(pEFlags); \
3838 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
3839 \
3840 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3841 IEM_MC_END(); \
3842 break; \
3843 } \
3844 \
3845 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
3846 } \
3847 } \
3848 else \
3849 { \
3850 /* memory target */ \
3851 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
3852 { \
3853 switch (pVCpu->iem.s.enmEffOpSize) \
3854 { \
3855 case IEMMODE_16BIT: \
3856 { \
3857 IEM_MC_BEGIN(3, 2); \
3858 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
3859 IEM_MC_ARG(uint16_t, u16Src, 1); \
3860 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3861 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3862 \
3863 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
3864 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
3865 IEM_MC_ASSIGN(u16Src, u16Imm); \
3866 IEMOP_HLP_DONE_DECODING(); \
3867 IEM_MC_MEM_MAP(pu16Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3868 IEM_MC_FETCH_EFLAGS(EFlags); \
3869 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
3870 \
3871 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, a_fRW); \
3872 IEM_MC_COMMIT_EFLAGS(EFlags); \
3873 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3874 IEM_MC_END(); \
3875 break; \
3876 } \
3877 \
3878 case IEMMODE_32BIT: \
3879 { \
3880 IEM_MC_BEGIN(3, 2); \
3881 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
3882 IEM_MC_ARG(uint32_t, u32Src, 1); \
3883 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3884 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3885 \
3886 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
3887 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
3888 IEM_MC_ASSIGN(u32Src, u32Imm); \
3889 IEMOP_HLP_DONE_DECODING(); \
3890 IEM_MC_MEM_MAP(pu32Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3891 IEM_MC_FETCH_EFLAGS(EFlags); \
3892 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
3893 \
3894 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, a_fRW); \
3895 IEM_MC_COMMIT_EFLAGS(EFlags); \
3896 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3897 IEM_MC_END(); \
3898 break; \
3899 } \
3900 \
3901 case IEMMODE_64BIT: \
3902 { \
3903 IEM_MC_BEGIN(3, 2); \
3904 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
3905 IEM_MC_ARG(uint64_t, u64Src, 1); \
3906 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3907 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3908 \
3909 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
3910 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
3911 IEMOP_HLP_DONE_DECODING(); \
3912 IEM_MC_ASSIGN(u64Src, u64Imm); \
3913 IEM_MC_MEM_MAP(pu64Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3914 IEM_MC_FETCH_EFLAGS(EFlags); \
3915 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
3916 \
3917 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, a_fRW); \
3918 IEM_MC_COMMIT_EFLAGS(EFlags); \
3919 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3920 IEM_MC_END(); \
3921 break; \
3922 } \
3923 \
3924 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
3925 } \
3926 } \
3927 else \
3928 { \
3929 (void)0
3930
3931#define IEMOP_BODY_BINARY_Ev_Iz_NO_LOCK() \
3932 IEMOP_HLP_DONE_DECODING(); \
3933 return IEMOP_RAISE_INVALID_LOCK_PREFIX(); \
3934 } \
3935 } \
3936 (void)0
3937
3938#define IEMOP_BODY_BINARY_Ev_Iz_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
3939 switch (pVCpu->iem.s.enmEffOpSize) \
3940 { \
3941 case IEMMODE_16BIT: \
3942 { \
3943 IEM_MC_BEGIN(3, 2); \
3944 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
3945 IEM_MC_ARG(uint16_t, u16Src, 1); \
3946 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3947 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3948 \
3949 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
3950 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
3951 IEM_MC_ASSIGN(u16Src, u16Imm); \
3952 IEMOP_HLP_DONE_DECODING(); \
3953 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3954 IEM_MC_FETCH_EFLAGS(EFlags); \
3955 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
3956 \
3957 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
3958 IEM_MC_COMMIT_EFLAGS(EFlags); \
3959 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3960 IEM_MC_END(); \
3961 break; \
3962 } \
3963 \
3964 case IEMMODE_32BIT: \
3965 { \
3966 IEM_MC_BEGIN(3, 2); \
3967 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
3968 IEM_MC_ARG(uint32_t, u32Src, 1); \
3969 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3970 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3971 \
3972 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
3973 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
3974 IEM_MC_ASSIGN(u32Src, u32Imm); \
3975 IEMOP_HLP_DONE_DECODING(); \
3976 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
3977 IEM_MC_FETCH_EFLAGS(EFlags); \
3978 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
3979 \
3980 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); \
3981 IEM_MC_COMMIT_EFLAGS(EFlags); \
3982 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3983 IEM_MC_END(); \
3984 break; \
3985 } \
3986 \
3987 case IEMMODE_64BIT: \
3988 { \
3989 IEM_MC_BEGIN(3, 2); \
3990 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
3991 IEM_MC_ARG(uint64_t, u64Src, 1); \
3992 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3993 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3994 \
3995 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
3996 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
3997 IEMOP_HLP_DONE_DECODING(); \
3998 IEM_MC_ASSIGN(u64Src, u64Imm); \
3999 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4000 IEM_MC_FETCH_EFLAGS(EFlags); \
4001 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4002 \
4003 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
4004 IEM_MC_COMMIT_EFLAGS(EFlags); \
4005 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4006 IEM_MC_END(); \
4007 break; \
4008 } \
4009 \
4010 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4011 } \
4012 } \
4013 } \
4014 (void)0
4015
4016
4017/**
4018 * @opmaps grp1_81
4019 * @opcode /0
4020 */
4021FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Iz, uint8_t, bRm)
4022{
4023 IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz");
4024 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, IEM_ACCESS_DATA_RW);
4025 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4026}
4027
4028
4029/**
4030 * @opmaps grp1_81
4031 * @opcode /1
4032 */
4033FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Iz, uint8_t, bRm)
4034{
4035 IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz");
4036 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, IEM_ACCESS_DATA_RW);
4037 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4038}
4039
4040
4041/**
4042 * @opmaps grp1_81
4043 * @opcode /2
4044 */
4045FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Iz, uint8_t, bRm)
4046{
4047 IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz");
4048 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, IEM_ACCESS_DATA_RW);
4049 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
4050}
4051
4052
4053/**
4054 * @opmaps grp1_81
4055 * @opcode /3
4056 */
4057FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Iz, uint8_t, bRm)
4058{
4059 IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz");
4060 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, IEM_ACCESS_DATA_RW);
4061 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
4062}
4063
4064
4065/**
4066 * @opmaps grp1_81
4067 * @opcode /4
4068 */
4069FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Iz, uint8_t, bRm)
4070{
4071 IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz");
4072 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, IEM_ACCESS_DATA_RW);
4073 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
4074}
4075
4076
4077/**
4078 * @opmaps grp1_81
4079 * @opcode /5
4080 */
4081FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Iz, uint8_t, bRm)
4082{
4083 IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz");
4084 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, IEM_ACCESS_DATA_RW);
4085 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
4086}
4087
4088
4089/**
4090 * @opmaps grp1_81
4091 * @opcode /6
4092 */
4093FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Iz, uint8_t, bRm)
4094{
4095 IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz");
4096 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, IEM_ACCESS_DATA_RW);
4097 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
4098}
4099
4100
4101/**
4102 * @opmaps grp1_81
4103 * @opcode /7
4104 */
4105FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Iz, uint8_t, bRm)
4106{
4107 IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz");
4108 IEMOP_BODY_BINARY_Ev_Iz( iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, IEM_ACCESS_DATA_R);
4109 IEMOP_BODY_BINARY_Ev_Iz_NO_LOCK();
4110}
4111
4112
4113/**
4114 * @opcode 0x81
4115 */
4116FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
4117{
4118 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4119 switch (IEM_GET_MODRM_REG_8(bRm))
4120 {
4121 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Iz, bRm);
4122 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Iz, bRm);
4123 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Iz, bRm);
4124 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Iz, bRm);
4125 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Iz, bRm);
4126 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Iz, bRm);
4127 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Iz, bRm);
4128 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Iz, bRm);
4129 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4130 }
4131}
4132
4133
4134/**
4135 * @opcode 0x82
4136 * @opmnemonic grp1_82
4137 * @opgroup og_groups
4138 */
4139FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
4140{
4141 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
4142 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
4143}
4144
4145
4146/**
4147 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
4148 * iemOp_Grp1_Ev_Ib.
4149 */
4150#define IEMOP_BODY_BINARY_Ev_Ib(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fRW) \
4151 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4152 { \
4153 /* \
4154 * Register target \
4155 */ \
4156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4157 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4158 switch (pVCpu->iem.s.enmEffOpSize) \
4159 { \
4160 case IEMMODE_16BIT: \
4161 { \
4162 IEM_MC_BEGIN(3, 0); \
4163 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4164 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4165 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4166 \
4167 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4168 IEM_MC_REF_EFLAGS(pEFlags); \
4169 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4170 \
4171 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4172 IEM_MC_END(); \
4173 break; \
4174 } \
4175 \
4176 case IEMMODE_32BIT: \
4177 { \
4178 IEM_MC_BEGIN(3, 0); \
4179 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4180 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4181 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4182 \
4183 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4184 IEM_MC_REF_EFLAGS(pEFlags); \
4185 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4186 if ((a_fRW) != IEM_ACCESS_DATA_R) \
4187 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \
4188 \
4189 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4190 IEM_MC_END(); \
4191 break; \
4192 } \
4193 \
4194 case IEMMODE_64BIT: \
4195 { \
4196 IEM_MC_BEGIN(3, 0); \
4197 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4198 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4199 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4200 \
4201 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4202 IEM_MC_REF_EFLAGS(pEFlags); \
4203 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4204 \
4205 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4206 IEM_MC_END(); \
4207 break; \
4208 } \
4209 \
4210 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4211 } \
4212 } \
4213 else \
4214 { \
4215 /* \
4216 * Memory target. \
4217 */ \
4218 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4219 { \
4220 switch (pVCpu->iem.s.enmEffOpSize) \
4221 { \
4222 case IEMMODE_16BIT: \
4223 { \
4224 IEM_MC_BEGIN(3, 2); \
4225 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4226 IEM_MC_ARG(uint16_t, u16Src, 1); \
4227 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4228 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4229 \
4230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4231 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4232 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm); \
4233 IEMOP_HLP_DONE_DECODING(); \
4234 IEM_MC_MEM_MAP(pu16Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4235 IEM_MC_FETCH_EFLAGS(EFlags); \
4236 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4237 \
4238 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, a_fRW); \
4239 IEM_MC_COMMIT_EFLAGS(EFlags); \
4240 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4241 IEM_MC_END(); \
4242 break; \
4243 } \
4244 \
4245 case IEMMODE_32BIT: \
4246 { \
4247 IEM_MC_BEGIN(3, 2); \
4248 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4249 IEM_MC_ARG(uint32_t, u32Src, 1); \
4250 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4251 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4252 \
4253 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4254 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4255 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm); \
4256 IEMOP_HLP_DONE_DECODING(); \
4257 IEM_MC_MEM_MAP(pu32Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4258 IEM_MC_FETCH_EFLAGS(EFlags); \
4259 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4260 \
4261 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, a_fRW); \
4262 IEM_MC_COMMIT_EFLAGS(EFlags); \
4263 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4264 IEM_MC_END(); \
4265 break; \
4266 } \
4267 \
4268 case IEMMODE_64BIT: \
4269 { \
4270 IEM_MC_BEGIN(3, 2); \
4271 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4272 IEM_MC_ARG(uint64_t, u64Src, 1); \
4273 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4274 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4275 \
4276 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4277 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4278 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm); \
4279 IEMOP_HLP_DONE_DECODING(); \
4280 IEM_MC_MEM_MAP(pu64Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4281 IEM_MC_FETCH_EFLAGS(EFlags); \
4282 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4283 \
4284 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, a_fRW); \
4285 IEM_MC_COMMIT_EFLAGS(EFlags); \
4286 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4287 IEM_MC_END(); \
4288 break; \
4289 } \
4290 \
4291 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4292 } \
4293 } \
4294 else \
4295 { \
4296 (void)0
4297
4298#define IEMOP_BODY_BINARY_Ev_Ib_NO_LOCK() \
4299 IEMOP_HLP_DONE_DECODING(); \
4300 return IEMOP_RAISE_INVALID_LOCK_PREFIX(); \
4301 } \
4302 } \
4303 (void)0
4304
4305#define IEMOP_BODY_BINARY_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4306 switch (pVCpu->iem.s.enmEffOpSize) \
4307 { \
4308 case IEMMODE_16BIT: \
4309 { \
4310 IEM_MC_BEGIN(3, 2); \
4311 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4312 IEM_MC_ARG(uint16_t, u16Src, 1); \
4313 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4314 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4315 \
4316 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4317 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4318 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm); \
4319 IEMOP_HLP_DONE_DECODING(); \
4320 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4321 IEM_MC_FETCH_EFLAGS(EFlags); \
4322 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4323 \
4324 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \
4325 IEM_MC_COMMIT_EFLAGS(EFlags); \
4326 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4327 IEM_MC_END(); \
4328 break; \
4329 } \
4330 \
4331 case IEMMODE_32BIT: \
4332 { \
4333 IEM_MC_BEGIN(3, 2); \
4334 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4335 IEM_MC_ARG(uint32_t, u32Src, 1); \
4336 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4337 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4338 \
4339 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4340 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4341 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm); \
4342 IEMOP_HLP_DONE_DECODING(); \
4343 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4344 IEM_MC_FETCH_EFLAGS(EFlags); \
4345 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4346 \
4347 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); \
4348 IEM_MC_COMMIT_EFLAGS(EFlags); \
4349 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4350 IEM_MC_END(); \
4351 break; \
4352 } \
4353 \
4354 case IEMMODE_64BIT: \
4355 { \
4356 IEM_MC_BEGIN(3, 2); \
4357 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4358 IEM_MC_ARG(uint64_t, u64Src, 1); \
4359 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4360 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4361 \
4362 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4363 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4364 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm); \
4365 IEMOP_HLP_DONE_DECODING(); \
4366 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
4367 IEM_MC_FETCH_EFLAGS(EFlags); \
4368 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4369 \
4370 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \
4371 IEM_MC_COMMIT_EFLAGS(EFlags); \
4372 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4373 IEM_MC_END(); \
4374 break; \
4375 } \
4376 \
4377 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4378 } \
4379 } \
4380 } \
4381 (void)0
4382
4383/**
4384 * @opmaps grp1_83
4385 * @opcode /0
4386 */
4387FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Ib, uint8_t, bRm)
4388{
4389 IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib");
4390 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, IEM_ACCESS_DATA_RW);
4391 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4392}
4393
4394
4395/**
4396 * @opmaps grp1_83
4397 * @opcode /1
4398 */
4399FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Ib, uint8_t, bRm)
4400{
4401 IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib");
4402 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, IEM_ACCESS_DATA_RW);
4403 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4404}
4405
4406
4407/**
4408 * @opmaps grp1_83
4409 * @opcode /2
4410 */
4411FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Ib, uint8_t, bRm)
4412{
4413 IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib");
4414 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, IEM_ACCESS_DATA_RW);
4415 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
4416}
4417
4418
4419/**
4420 * @opmaps grp1_83
4421 * @opcode /3
4422 */
4423FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Ib, uint8_t, bRm)
4424{
4425 IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib");
4426 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, IEM_ACCESS_DATA_RW);
4427 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
4428}
4429
4430
4431/**
4432 * @opmaps grp1_83
4433 * @opcode /4
4434 */
4435FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Ib, uint8_t, bRm)
4436{
4437 IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib");
4438 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, IEM_ACCESS_DATA_RW);
4439 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
4440}
4441
4442
4443/**
4444 * @opmaps grp1_83
4445 * @opcode /5
4446 */
4447FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Ib, uint8_t, bRm)
4448{
4449 IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib");
4450 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, IEM_ACCESS_DATA_RW);
4451 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
4452}
4453
4454
4455/**
4456 * @opmaps grp1_83
4457 * @opcode /6
4458 */
4459FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Ib, uint8_t, bRm)
4460{
4461 IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib");
4462 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, IEM_ACCESS_DATA_RW);
4463 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
4464}
4465
4466
4467/**
4468 * @opmaps grp1_83
4469 * @opcode /7
4470 */
4471FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Ib, uint8_t, bRm)
4472{
4473 IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib");
4474 IEMOP_BODY_BINARY_Ev_Ib( iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, IEM_ACCESS_DATA_R);
4475 IEMOP_BODY_BINARY_Ev_Ib_NO_LOCK();
4476}
4477
4478
4479/**
4480 * @opcode 0x83
4481 */
4482FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
4483{
4484 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
4485 to the 386 even if absent in the intel reference manuals and some
4486 3rd party opcode listings. */
4487 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4488 switch (IEM_GET_MODRM_REG_8(bRm))
4489 {
4490 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Ib, bRm);
4491 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Ib, bRm);
4492 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Ib, bRm);
4493 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Ib, bRm);
4494 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Ib, bRm);
4495 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Ib, bRm);
4496 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Ib, bRm);
4497 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Ib, bRm);
4498 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4499 }
4500}
4501
4502
4503/**
4504 * @opcode 0x84
4505 */
4506FNIEMOP_DEF(iemOp_test_Eb_Gb)
4507{
4508 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
4509 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
4510 IEMOP_BODY_BINARY_rm_r8(iemAImpl_test_u8, IEM_ACCESS_DATA_R);
4511 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
4512}
4513
4514
4515/**
4516 * @opcode 0x85
4517 */
4518FNIEMOP_DEF(iemOp_test_Ev_Gv)
4519{
4520 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
4521 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
4522 IEMOP_BODY_BINARY_rm_rv(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, IEM_ACCESS_DATA_R);
4523 IEMOP_BODY_BINARY_rm_rv_NO_LOCK();
4524}
4525
4526
4527/**
4528 * @opcode 0x86
4529 */
4530FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
4531{
4532 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4533 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
4534
4535 /*
4536 * If rm is denoting a register, no more instruction bytes.
4537 */
4538 if (IEM_IS_MODRM_REG_MODE(bRm))
4539 {
4540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4541
4542 IEM_MC_BEGIN(0, 2);
4543 IEM_MC_LOCAL(uint8_t, uTmp1);
4544 IEM_MC_LOCAL(uint8_t, uTmp2);
4545
4546 IEM_MC_FETCH_GREG_U8(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
4547 IEM_MC_FETCH_GREG_U8(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
4548 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
4549 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
4550
4551 IEM_MC_ADVANCE_RIP_AND_FINISH();
4552 IEM_MC_END();
4553 }
4554 else
4555 {
4556 /*
4557 * We're accessing memory.
4558 */
4559/** @todo the register must be committed separately! */
4560 IEM_MC_BEGIN(2, 2);
4561 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
4562 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
4563 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4564
4565 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4566 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
4567 IEM_MC_REF_GREG_U8(pu8Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
4568 if (!pVCpu->iem.s.fDisregardLock)
4569 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_locked, pu8Mem, pu8Reg);
4570 else
4571 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_unlocked, pu8Mem, pu8Reg);
4572 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
4573
4574 IEM_MC_ADVANCE_RIP_AND_FINISH();
4575 IEM_MC_END();
4576 }
4577}
4578
4579
4580/**
4581 * @opcode 0x87
4582 */
4583FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
4584{
4585 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
4586 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4587
4588 /*
4589 * If rm is denoting a register, no more instruction bytes.
4590 */
4591 if (IEM_IS_MODRM_REG_MODE(bRm))
4592 {
4593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4594
4595 switch (pVCpu->iem.s.enmEffOpSize)
4596 {
4597 case IEMMODE_16BIT:
4598 IEM_MC_BEGIN(0, 2);
4599 IEM_MC_LOCAL(uint16_t, uTmp1);
4600 IEM_MC_LOCAL(uint16_t, uTmp2);
4601
4602 IEM_MC_FETCH_GREG_U16(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
4603 IEM_MC_FETCH_GREG_U16(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
4604 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
4605 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
4606
4607 IEM_MC_ADVANCE_RIP_AND_FINISH();
4608 IEM_MC_END();
4609 break;
4610
4611 case IEMMODE_32BIT:
4612 IEM_MC_BEGIN(0, 2);
4613 IEM_MC_LOCAL(uint32_t, uTmp1);
4614 IEM_MC_LOCAL(uint32_t, uTmp2);
4615
4616 IEM_MC_FETCH_GREG_U32(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
4617 IEM_MC_FETCH_GREG_U32(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
4618 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
4619 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
4620
4621 IEM_MC_ADVANCE_RIP_AND_FINISH();
4622 IEM_MC_END();
4623 break;
4624
4625 case IEMMODE_64BIT:
4626 IEM_MC_BEGIN(0, 2);
4627 IEM_MC_LOCAL(uint64_t, uTmp1);
4628 IEM_MC_LOCAL(uint64_t, uTmp2);
4629
4630 IEM_MC_FETCH_GREG_U64(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
4631 IEM_MC_FETCH_GREG_U64(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
4632 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
4633 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
4634
4635 IEM_MC_ADVANCE_RIP_AND_FINISH();
4636 IEM_MC_END();
4637 break;
4638
4639 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4640 }
4641 }
4642 else
4643 {
4644 /*
4645 * We're accessing memory.
4646 */
4647 switch (pVCpu->iem.s.enmEffOpSize)
4648 {
4649/** @todo the register must be committed separately! */
4650 case IEMMODE_16BIT:
4651 IEM_MC_BEGIN(2, 2);
4652 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
4653 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
4654 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4655
4656 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4657 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
4658 IEM_MC_REF_GREG_U16(pu16Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
4659 if (!pVCpu->iem.s.fDisregardLock)
4660 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_locked, pu16Mem, pu16Reg);
4661 else
4662 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_unlocked, pu16Mem, pu16Reg);
4663 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
4664
4665 IEM_MC_ADVANCE_RIP_AND_FINISH();
4666 IEM_MC_END();
4667 break;
4668
4669 case IEMMODE_32BIT:
4670 IEM_MC_BEGIN(2, 2);
4671 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
4672 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
4673 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4674
4675 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4676 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
4677 IEM_MC_REF_GREG_U32(pu32Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
4678 if (!pVCpu->iem.s.fDisregardLock)
4679 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_locked, pu32Mem, pu32Reg);
4680 else
4681 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_unlocked, pu32Mem, pu32Reg);
4682 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
4683
4684 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
4685 IEM_MC_ADVANCE_RIP_AND_FINISH();
4686 IEM_MC_END();
4687 break;
4688
4689 case IEMMODE_64BIT:
4690 IEM_MC_BEGIN(2, 2);
4691 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
4692 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
4693 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4694
4695 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4696 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
4697 IEM_MC_REF_GREG_U64(pu64Reg, IEM_GET_MODRM_REG(pVCpu, bRm));
4698 if (!pVCpu->iem.s.fDisregardLock)
4699 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_locked, pu64Mem, pu64Reg);
4700 else
4701 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_unlocked, pu64Mem, pu64Reg);
4702 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
4703
4704 IEM_MC_ADVANCE_RIP_AND_FINISH();
4705 IEM_MC_END();
4706 break;
4707
4708 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4709 }
4710 }
4711}
4712
4713
4714/**
4715 * @opcode 0x88
4716 */
4717FNIEMOP_DEF(iemOp_mov_Eb_Gb)
4718{
4719 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
4720
4721 uint8_t bRm;
4722 IEM_OPCODE_GET_NEXT_U8(&bRm);
4723
4724 /*
4725 * If rm is denoting a register, no more instruction bytes.
4726 */
4727 if (IEM_IS_MODRM_REG_MODE(bRm))
4728 {
4729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4730 IEM_MC_BEGIN(0, 1);
4731 IEM_MC_LOCAL(uint8_t, u8Value);
4732 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4733 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Value);
4734 IEM_MC_ADVANCE_RIP_AND_FINISH();
4735 IEM_MC_END();
4736 }
4737 else
4738 {
4739 /*
4740 * We're writing a register to memory.
4741 */
4742 IEM_MC_BEGIN(0, 2);
4743 IEM_MC_LOCAL(uint8_t, u8Value);
4744 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4745 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4747 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4748 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
4749 IEM_MC_ADVANCE_RIP_AND_FINISH();
4750 IEM_MC_END();
4751 }
4752}
4753
4754
4755/**
4756 * @opcode 0x89
4757 */
4758FNIEMOP_DEF(iemOp_mov_Ev_Gv)
4759{
4760 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
4761
4762 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4763
4764 /*
4765 * If rm is denoting a register, no more instruction bytes.
4766 */
4767 if (IEM_IS_MODRM_REG_MODE(bRm))
4768 {
4769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4770 switch (pVCpu->iem.s.enmEffOpSize)
4771 {
4772 case IEMMODE_16BIT:
4773 IEM_MC_BEGIN(0, 1);
4774 IEM_MC_LOCAL(uint16_t, u16Value);
4775 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4776 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
4777 IEM_MC_ADVANCE_RIP_AND_FINISH();
4778 IEM_MC_END();
4779 break;
4780
4781 case IEMMODE_32BIT:
4782 IEM_MC_BEGIN(0, 1);
4783 IEM_MC_LOCAL(uint32_t, u32Value);
4784 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4785 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
4786 IEM_MC_ADVANCE_RIP_AND_FINISH();
4787 IEM_MC_END();
4788 break;
4789
4790 case IEMMODE_64BIT:
4791 IEM_MC_BEGIN(0, 1);
4792 IEM_MC_LOCAL(uint64_t, u64Value);
4793 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4794 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
4795 IEM_MC_ADVANCE_RIP_AND_FINISH();
4796 IEM_MC_END();
4797 break;
4798
4799 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4800 }
4801 }
4802 else
4803 {
4804 /*
4805 * We're writing a register to memory.
4806 */
4807 switch (pVCpu->iem.s.enmEffOpSize)
4808 {
4809 case IEMMODE_16BIT:
4810 IEM_MC_BEGIN(0, 2);
4811 IEM_MC_LOCAL(uint16_t, u16Value);
4812 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4813 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4815 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4816 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
4817 IEM_MC_ADVANCE_RIP_AND_FINISH();
4818 IEM_MC_END();
4819 break;
4820
4821 case IEMMODE_32BIT:
4822 IEM_MC_BEGIN(0, 2);
4823 IEM_MC_LOCAL(uint32_t, u32Value);
4824 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4825 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4827 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4828 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
4829 IEM_MC_ADVANCE_RIP_AND_FINISH();
4830 IEM_MC_END();
4831 break;
4832
4833 case IEMMODE_64BIT:
4834 IEM_MC_BEGIN(0, 2);
4835 IEM_MC_LOCAL(uint64_t, u64Value);
4836 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4837 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4839 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
4840 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
4841 IEM_MC_ADVANCE_RIP_AND_FINISH();
4842 IEM_MC_END();
4843 break;
4844
4845 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4846 }
4847 }
4848}
4849
4850
4851/**
4852 * @opcode 0x8a
4853 */
4854FNIEMOP_DEF(iemOp_mov_Gb_Eb)
4855{
4856 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
4857
4858 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4859
4860 /*
4861 * If rm is denoting a register, no more instruction bytes.
4862 */
4863 if (IEM_IS_MODRM_REG_MODE(bRm))
4864 {
4865 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4866 IEM_MC_BEGIN(0, 1);
4867 IEM_MC_LOCAL(uint8_t, u8Value);
4868 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4869 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
4870 IEM_MC_ADVANCE_RIP_AND_FINISH();
4871 IEM_MC_END();
4872 }
4873 else
4874 {
4875 /*
4876 * We're loading a register from memory.
4877 */
4878 IEM_MC_BEGIN(0, 2);
4879 IEM_MC_LOCAL(uint8_t, u8Value);
4880 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4881 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4883 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4884 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
4885 IEM_MC_ADVANCE_RIP_AND_FINISH();
4886 IEM_MC_END();
4887 }
4888}
4889
4890
4891/**
4892 * @opcode 0x8b
4893 */
4894FNIEMOP_DEF(iemOp_mov_Gv_Ev)
4895{
4896 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
4897
4898 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4899
4900 /*
4901 * If rm is denoting a register, no more instruction bytes.
4902 */
4903 if (IEM_IS_MODRM_REG_MODE(bRm))
4904 {
4905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4906 switch (pVCpu->iem.s.enmEffOpSize)
4907 {
4908 case IEMMODE_16BIT:
4909 IEM_MC_BEGIN(0, 1);
4910 IEM_MC_LOCAL(uint16_t, u16Value);
4911 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4912 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
4913 IEM_MC_ADVANCE_RIP_AND_FINISH();
4914 IEM_MC_END();
4915 break;
4916
4917 case IEMMODE_32BIT:
4918 IEM_MC_BEGIN(0, 1);
4919 IEM_MC_LOCAL(uint32_t, u32Value);
4920 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4921 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
4922 IEM_MC_ADVANCE_RIP_AND_FINISH();
4923 IEM_MC_END();
4924 break;
4925
4926 case IEMMODE_64BIT:
4927 IEM_MC_BEGIN(0, 1);
4928 IEM_MC_LOCAL(uint64_t, u64Value);
4929 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
4930 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
4931 IEM_MC_ADVANCE_RIP_AND_FINISH();
4932 IEM_MC_END();
4933 break;
4934
4935 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4936 }
4937 }
4938 else
4939 {
4940 /*
4941 * We're loading a register from memory.
4942 */
4943 switch (pVCpu->iem.s.enmEffOpSize)
4944 {
4945 case IEMMODE_16BIT:
4946 IEM_MC_BEGIN(0, 2);
4947 IEM_MC_LOCAL(uint16_t, u16Value);
4948 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4949 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4951 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4952 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
4953 IEM_MC_ADVANCE_RIP_AND_FINISH();
4954 IEM_MC_END();
4955 break;
4956
4957 case IEMMODE_32BIT:
4958 IEM_MC_BEGIN(0, 2);
4959 IEM_MC_LOCAL(uint32_t, u32Value);
4960 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4961 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4963 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4964 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
4965 IEM_MC_ADVANCE_RIP_AND_FINISH();
4966 IEM_MC_END();
4967 break;
4968
4969 case IEMMODE_64BIT:
4970 IEM_MC_BEGIN(0, 2);
4971 IEM_MC_LOCAL(uint64_t, u64Value);
4972 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4973 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4975 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4976 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
4977 IEM_MC_ADVANCE_RIP_AND_FINISH();
4978 IEM_MC_END();
4979 break;
4980
4981 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4982 }
4983 }
4984}
4985
4986
4987/**
4988 * opcode 0x63
4989 * @todo Table fixme
4990 */
4991FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
4992{
4993 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
4994 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
4995 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
4996 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
4997 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
4998}
4999
5000
5001/**
5002 * @opcode 0x8c
5003 */
5004FNIEMOP_DEF(iemOp_mov_Ev_Sw)
5005{
5006 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
5007
5008 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5009
5010 /*
5011 * Check that the destination register exists. The REX.R prefix is ignored.
5012 */
5013 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5014 if ( iSegReg > X86_SREG_GS)
5015 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5016
5017 /*
5018 * If rm is denoting a register, no more instruction bytes.
5019 * In that case, the operand size is respected and the upper bits are
5020 * cleared (starting with some pentium).
5021 */
5022 if (IEM_IS_MODRM_REG_MODE(bRm))
5023 {
5024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5025 switch (pVCpu->iem.s.enmEffOpSize)
5026 {
5027 case IEMMODE_16BIT:
5028 IEM_MC_BEGIN(0, 1);
5029 IEM_MC_LOCAL(uint16_t, u16Value);
5030 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5031 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5032 IEM_MC_ADVANCE_RIP_AND_FINISH();
5033 IEM_MC_END();
5034 break;
5035
5036 case IEMMODE_32BIT:
5037 IEM_MC_BEGIN(0, 1);
5038 IEM_MC_LOCAL(uint32_t, u32Value);
5039 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
5040 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5041 IEM_MC_ADVANCE_RIP_AND_FINISH();
5042 IEM_MC_END();
5043 break;
5044
5045 case IEMMODE_64BIT:
5046 IEM_MC_BEGIN(0, 1);
5047 IEM_MC_LOCAL(uint64_t, u64Value);
5048 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
5049 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5050 IEM_MC_ADVANCE_RIP_AND_FINISH();
5051 IEM_MC_END();
5052 break;
5053
5054 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5055 }
5056 }
5057 else
5058 {
5059 /*
5060 * We're saving the register to memory. The access is word sized
5061 * regardless of operand size prefixes.
5062 */
5063#if 0 /* not necessary */
5064 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5065#endif
5066 IEM_MC_BEGIN(0, 2);
5067 IEM_MC_LOCAL(uint16_t, u16Value);
5068 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5069 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5071 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5072 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5073 IEM_MC_ADVANCE_RIP_AND_FINISH();
5074 IEM_MC_END();
5075 }
5076}
5077
5078
5079
5080
5081/**
5082 * @opcode 0x8d
5083 */
5084FNIEMOP_DEF(iemOp_lea_Gv_M)
5085{
5086 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
5087 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5088 if (IEM_IS_MODRM_REG_MODE(bRm))
5089 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
5090
5091 switch (pVCpu->iem.s.enmEffOpSize)
5092 {
5093 case IEMMODE_16BIT:
5094 IEM_MC_BEGIN(0, 2);
5095 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5096 IEM_MC_LOCAL(uint16_t, u16Cast);
5097 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5099 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
5100 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Cast);
5101 IEM_MC_ADVANCE_RIP_AND_FINISH();
5102 IEM_MC_END();
5103 break;
5104
5105 case IEMMODE_32BIT:
5106 IEM_MC_BEGIN(0, 2);
5107 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5108 IEM_MC_LOCAL(uint32_t, u32Cast);
5109 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5110 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5111 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
5112 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Cast);
5113 IEM_MC_ADVANCE_RIP_AND_FINISH();
5114 IEM_MC_END();
5115 break;
5116
5117 case IEMMODE_64BIT:
5118 IEM_MC_BEGIN(0, 1);
5119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5122 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), GCPtrEffSrc);
5123 IEM_MC_ADVANCE_RIP_AND_FINISH();
5124 IEM_MC_END();
5125 break;
5126
5127 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5128 }
5129}
5130
5131
5132/**
5133 * @opcode 0x8e
5134 */
5135FNIEMOP_DEF(iemOp_mov_Sw_Ev)
5136{
5137 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
5138
5139 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5140
5141 /*
5142 * The practical operand size is 16-bit.
5143 */
5144#if 0 /* not necessary */
5145 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5146#endif
5147
5148 /*
5149 * Check that the destination register exists and can be used with this
5150 * instruction. The REX.R prefix is ignored.
5151 */
5152 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5153 if ( iSegReg == X86_SREG_CS
5154 || iSegReg > X86_SREG_GS)
5155 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5156
5157 /*
5158 * If rm is denoting a register, no more instruction bytes.
5159 */
5160 if (IEM_IS_MODRM_REG_MODE(bRm))
5161 {
5162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5163 IEM_MC_BEGIN(2, 0);
5164 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
5165 IEM_MC_ARG(uint16_t, u16Value, 1);
5166 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5167 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
5168 IEM_MC_END();
5169 }
5170 else
5171 {
5172 /*
5173 * We're loading the register from memory. The access is word sized
5174 * regardless of operand size prefixes.
5175 */
5176 IEM_MC_BEGIN(2, 1);
5177 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
5178 IEM_MC_ARG(uint16_t, u16Value, 1);
5179 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5180 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5181 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5182 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5183 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
5184 IEM_MC_END();
5185 }
5186}
5187
5188
5189/** Opcode 0x8f /0. */
5190FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
5191{
5192 /* This bugger is rather annoying as it requires rSP to be updated before
5193 doing the effective address calculations. Will eventually require a
5194 split between the R/M+SIB decoding and the effective address
5195 calculation - which is something that is required for any attempt at
5196 reusing this code for a recompiler. It may also be good to have if we
5197 need to delay #UD exception caused by invalid lock prefixes.
5198
5199 For now, we'll do a mostly safe interpreter-only implementation here. */
5200 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
5201 * now until tests show it's checked.. */
5202 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
5203
5204 /* Register access is relatively easy and can share code. */
5205 if (IEM_IS_MODRM_REG_MODE(bRm))
5206 return FNIEMOP_CALL_1(iemOpCommonPopGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
5207
5208 /*
5209 * Memory target.
5210 *
5211 * Intel says that RSP is incremented before it's used in any effective
5212 * address calcuations. This means some serious extra annoyance here since
5213 * we decode and calculate the effective address in one step and like to
5214 * delay committing registers till everything is done.
5215 *
5216 * So, we'll decode and calculate the effective address twice. This will
5217 * require some recoding if turned into a recompiler.
5218 */
5219 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
5220
5221#ifndef TST_IEM_CHECK_MC
5222 /* Calc effective address with modified ESP. */
5223/** @todo testcase */
5224 RTGCPTR GCPtrEff;
5225 VBOXSTRICTRC rcStrict;
5226 switch (pVCpu->iem.s.enmEffOpSize)
5227 {
5228 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 2); break;
5229 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 4); break;
5230 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 8); break;
5231 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5232 }
5233 if (rcStrict != VINF_SUCCESS)
5234 return rcStrict;
5235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5236
5237 /* Perform the operation - this should be CImpl. */
5238 RTUINT64U TmpRsp;
5239 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
5240 switch (pVCpu->iem.s.enmEffOpSize)
5241 {
5242 case IEMMODE_16BIT:
5243 {
5244 uint16_t u16Value;
5245 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
5246 if (rcStrict == VINF_SUCCESS)
5247 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
5248 break;
5249 }
5250
5251 case IEMMODE_32BIT:
5252 {
5253 uint32_t u32Value;
5254 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
5255 if (rcStrict == VINF_SUCCESS)
5256 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
5257 break;
5258 }
5259
5260 case IEMMODE_64BIT:
5261 {
5262 uint64_t u64Value;
5263 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
5264 if (rcStrict == VINF_SUCCESS)
5265 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
5266 break;
5267 }
5268
5269 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5270 }
5271 if (rcStrict == VINF_SUCCESS)
5272 {
5273 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
5274 return iemRegUpdateRipAndFinishClearingRF(pVCpu);
5275 }
5276 return rcStrict;
5277
5278#else
5279 return VERR_IEM_IPE_2;
5280#endif
5281}
5282
5283
5284/**
5285 * @opcode 0x8f
5286 */
5287FNIEMOP_DEF(iemOp_Grp1A__xop)
5288{
5289 /*
5290 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
5291 * three byte VEX prefix, except that the mmmmm field cannot have the values
5292 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
5293 */
5294 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5295 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
5296 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
5297
5298 IEMOP_MNEMONIC(xop, "xop");
5299 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
5300 {
5301 /** @todo Test when exctly the XOP conformance checks kick in during
5302 * instruction decoding and fetching (using \#PF). */
5303 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
5304 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
5305 if ( ( pVCpu->iem.s.fPrefixes
5306 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
5307 == 0)
5308 {
5309 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
5310 if ((bXop2 & 0x80 /* XOP.W */) && pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
5311 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
5312 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
5313 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
5314 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
5315 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
5316 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
5317 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
5318
5319 /** @todo XOP: Just use new tables and decoders. */
5320 switch (bRm & 0x1f)
5321 {
5322 case 8: /* xop opcode map 8. */
5323 IEMOP_BITCH_ABOUT_STUB();
5324 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5325
5326 case 9: /* xop opcode map 9. */
5327 IEMOP_BITCH_ABOUT_STUB();
5328 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5329
5330 case 10: /* xop opcode map 10. */
5331 IEMOP_BITCH_ABOUT_STUB();
5332 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5333
5334 default:
5335 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
5336 return IEMOP_RAISE_INVALID_OPCODE();
5337 }
5338 }
5339 else
5340 Log(("XOP: Invalid prefix mix!\n"));
5341 }
5342 else
5343 Log(("XOP: XOP support disabled!\n"));
5344 return IEMOP_RAISE_INVALID_OPCODE();
5345}
5346
5347
5348/**
5349 * Common 'xchg reg,rAX' helper.
5350 */
5351FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
5352{
5353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5354
5355 iReg |= pVCpu->iem.s.uRexB;
5356 switch (pVCpu->iem.s.enmEffOpSize)
5357 {
5358 case IEMMODE_16BIT:
5359 IEM_MC_BEGIN(0, 2);
5360 IEM_MC_LOCAL(uint16_t, u16Tmp1);
5361 IEM_MC_LOCAL(uint16_t, u16Tmp2);
5362 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
5363 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
5364 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
5365 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
5366 IEM_MC_ADVANCE_RIP_AND_FINISH();
5367 IEM_MC_END();
5368 break;
5369
5370 case IEMMODE_32BIT:
5371 IEM_MC_BEGIN(0, 2);
5372 IEM_MC_LOCAL(uint32_t, u32Tmp1);
5373 IEM_MC_LOCAL(uint32_t, u32Tmp2);
5374 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
5375 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
5376 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
5377 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
5378 IEM_MC_ADVANCE_RIP_AND_FINISH();
5379 IEM_MC_END();
5380 break;
5381
5382 case IEMMODE_64BIT:
5383 IEM_MC_BEGIN(0, 2);
5384 IEM_MC_LOCAL(uint64_t, u64Tmp1);
5385 IEM_MC_LOCAL(uint64_t, u64Tmp2);
5386 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
5387 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
5388 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
5389 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
5390 IEM_MC_ADVANCE_RIP_AND_FINISH();
5391 IEM_MC_END();
5392 break;
5393
5394 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5395 }
5396}
5397
5398
5399/**
5400 * @opcode 0x90
5401 */
5402FNIEMOP_DEF(iemOp_nop)
5403{
5404 /* R8/R8D and RAX/EAX can be exchanged. */
5405 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
5406 {
5407 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
5408 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
5409 }
5410
5411 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
5412 {
5413 IEMOP_MNEMONIC(pause, "pause");
5414#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
5415 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVmx)
5416 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmx_pause);
5417#endif
5418#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
5419 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSvm)
5420 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_svm_pause);
5421#endif
5422 }
5423 else
5424 IEMOP_MNEMONIC(nop, "nop");
5425 IEM_MC_BEGIN(0, 0);
5426 IEM_MC_ADVANCE_RIP_AND_FINISH();
5427 IEM_MC_END();
5428}
5429
5430
5431/**
5432 * @opcode 0x91
5433 */
5434FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
5435{
5436 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
5437 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
5438}
5439
5440
5441/**
5442 * @opcode 0x92
5443 */
5444FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
5445{
5446 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
5447 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
5448}
5449
5450
5451/**
5452 * @opcode 0x93
5453 */
5454FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
5455{
5456 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
5457 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
5458}
5459
5460
5461/**
5462 * @opcode 0x94
5463 */
5464FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
5465{
5466 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
5467 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
5468}
5469
5470
5471/**
5472 * @opcode 0x95
5473 */
5474FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
5475{
5476 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
5477 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
5478}
5479
5480
5481/**
5482 * @opcode 0x96
5483 */
5484FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
5485{
5486 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
5487 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
5488}
5489
5490
5491/**
5492 * @opcode 0x97
5493 */
5494FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
5495{
5496 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
5497 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
5498}
5499
5500
5501/**
5502 * @opcode 0x98
5503 */
5504FNIEMOP_DEF(iemOp_cbw)
5505{
5506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5507 switch (pVCpu->iem.s.enmEffOpSize)
5508 {
5509 case IEMMODE_16BIT:
5510 IEMOP_MNEMONIC(cbw, "cbw");
5511 IEM_MC_BEGIN(0, 1);
5512 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
5513 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
5514 } IEM_MC_ELSE() {
5515 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
5516 } IEM_MC_ENDIF();
5517 IEM_MC_ADVANCE_RIP_AND_FINISH();
5518 IEM_MC_END();
5519 break;
5520
5521 case IEMMODE_32BIT:
5522 IEMOP_MNEMONIC(cwde, "cwde");
5523 IEM_MC_BEGIN(0, 1);
5524 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
5525 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
5526 } IEM_MC_ELSE() {
5527 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
5528 } IEM_MC_ENDIF();
5529 IEM_MC_ADVANCE_RIP_AND_FINISH();
5530 IEM_MC_END();
5531 break;
5532
5533 case IEMMODE_64BIT:
5534 IEMOP_MNEMONIC(cdqe, "cdqe");
5535 IEM_MC_BEGIN(0, 1);
5536 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
5537 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
5538 } IEM_MC_ELSE() {
5539 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
5540 } IEM_MC_ENDIF();
5541 IEM_MC_ADVANCE_RIP_AND_FINISH();
5542 IEM_MC_END();
5543 break;
5544
5545 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5546 }
5547}
5548
5549
5550/**
5551 * @opcode 0x99
5552 */
5553FNIEMOP_DEF(iemOp_cwd)
5554{
5555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5556 switch (pVCpu->iem.s.enmEffOpSize)
5557 {
5558 case IEMMODE_16BIT:
5559 IEMOP_MNEMONIC(cwd, "cwd");
5560 IEM_MC_BEGIN(0, 1);
5561 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
5562 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
5563 } IEM_MC_ELSE() {
5564 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
5565 } IEM_MC_ENDIF();
5566 IEM_MC_ADVANCE_RIP_AND_FINISH();
5567 IEM_MC_END();
5568 break;
5569
5570 case IEMMODE_32BIT:
5571 IEMOP_MNEMONIC(cdq, "cdq");
5572 IEM_MC_BEGIN(0, 1);
5573 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
5574 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
5575 } IEM_MC_ELSE() {
5576 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
5577 } IEM_MC_ENDIF();
5578 IEM_MC_ADVANCE_RIP_AND_FINISH();
5579 IEM_MC_END();
5580 break;
5581
5582 case IEMMODE_64BIT:
5583 IEMOP_MNEMONIC(cqo, "cqo");
5584 IEM_MC_BEGIN(0, 1);
5585 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
5586 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
5587 } IEM_MC_ELSE() {
5588 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
5589 } IEM_MC_ENDIF();
5590 IEM_MC_ADVANCE_RIP_AND_FINISH();
5591 IEM_MC_END();
5592 break;
5593
5594 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5595 }
5596}
5597
5598
5599/**
5600 * @opcode 0x9a
5601 */
5602FNIEMOP_DEF(iemOp_call_Ap)
5603{
5604 IEMOP_MNEMONIC(call_Ap, "call Ap");
5605 IEMOP_HLP_NO_64BIT();
5606
5607 /* Decode the far pointer address and pass it on to the far call C implementation. */
5608 uint32_t offSeg;
5609 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
5610 IEM_OPCODE_GET_NEXT_U32(&offSeg);
5611 else
5612 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
5613 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
5614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5615 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
5616}
5617
5618
5619/** Opcode 0x9b. (aka fwait) */
5620FNIEMOP_DEF(iemOp_wait)
5621{
5622 IEMOP_MNEMONIC(wait, "wait");
5623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5624
5625 IEM_MC_BEGIN(0, 0);
5626 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
5627 IEM_MC_MAYBE_RAISE_FPU_XCPT();
5628 IEM_MC_ADVANCE_RIP_AND_FINISH();
5629 IEM_MC_END();
5630}
5631
5632
5633/**
5634 * @opcode 0x9c
5635 */
5636FNIEMOP_DEF(iemOp_pushf_Fv)
5637{
5638 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
5639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5640 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5641 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
5642}
5643
5644
5645/**
5646 * @opcode 0x9d
5647 */
5648FNIEMOP_DEF(iemOp_popf_Fv)
5649{
5650 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
5651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5652 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5653 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
5654}
5655
5656
5657/**
5658 * @opcode 0x9e
5659 */
5660FNIEMOP_DEF(iemOp_sahf)
5661{
5662 IEMOP_MNEMONIC(sahf, "sahf");
5663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5664 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
5665 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
5666 return IEMOP_RAISE_INVALID_OPCODE();
5667 IEM_MC_BEGIN(0, 2);
5668 IEM_MC_LOCAL(uint32_t, u32Flags);
5669 IEM_MC_LOCAL(uint32_t, EFlags);
5670 IEM_MC_FETCH_EFLAGS(EFlags);
5671 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
5672 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
5673 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
5674 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
5675 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
5676 IEM_MC_COMMIT_EFLAGS(EFlags);
5677 IEM_MC_ADVANCE_RIP_AND_FINISH();
5678 IEM_MC_END();
5679}
5680
5681
5682/**
5683 * @opcode 0x9f
5684 */
5685FNIEMOP_DEF(iemOp_lahf)
5686{
5687 IEMOP_MNEMONIC(lahf, "lahf");
5688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5689 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
5690 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
5691 return IEMOP_RAISE_INVALID_OPCODE();
5692 IEM_MC_BEGIN(0, 1);
5693 IEM_MC_LOCAL(uint8_t, u8Flags);
5694 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
5695 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
5696 IEM_MC_ADVANCE_RIP_AND_FINISH();
5697 IEM_MC_END();
5698}
5699
5700
5701/**
5702 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
5703 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend off lock
5704 * prefixes. Will return on failures.
5705 * @param a_GCPtrMemOff The variable to store the offset in.
5706 */
5707#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
5708 do \
5709 { \
5710 switch (pVCpu->iem.s.enmEffAddrMode) \
5711 { \
5712 case IEMMODE_16BIT: \
5713 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
5714 break; \
5715 case IEMMODE_32BIT: \
5716 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
5717 break; \
5718 case IEMMODE_64BIT: \
5719 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
5720 break; \
5721 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5722 } \
5723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5724 } while (0)
5725
5726/**
5727 * @opcode 0xa0
5728 */
5729FNIEMOP_DEF(iemOp_mov_AL_Ob)
5730{
5731 /*
5732 * Get the offset and fend off lock prefixes.
5733 */
5734 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
5735 RTGCPTR GCPtrMemOff;
5736 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
5737
5738 /*
5739 * Fetch AL.
5740 */
5741 IEM_MC_BEGIN(0,1);
5742 IEM_MC_LOCAL(uint8_t, u8Tmp);
5743 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
5744 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
5745 IEM_MC_ADVANCE_RIP_AND_FINISH();
5746 IEM_MC_END();
5747}
5748
5749
5750/**
5751 * @opcode 0xa1
5752 */
5753FNIEMOP_DEF(iemOp_mov_rAX_Ov)
5754{
5755 /*
5756 * Get the offset and fend off lock prefixes.
5757 */
5758 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
5759 RTGCPTR GCPtrMemOff;
5760 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
5761
5762 /*
5763 * Fetch rAX.
5764 */
5765 switch (pVCpu->iem.s.enmEffOpSize)
5766 {
5767 case IEMMODE_16BIT:
5768 IEM_MC_BEGIN(0,1);
5769 IEM_MC_LOCAL(uint16_t, u16Tmp);
5770 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
5771 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
5772 IEM_MC_ADVANCE_RIP_AND_FINISH();
5773 IEM_MC_END();
5774 break;
5775
5776 case IEMMODE_32BIT:
5777 IEM_MC_BEGIN(0,1);
5778 IEM_MC_LOCAL(uint32_t, u32Tmp);
5779 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
5780 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
5781 IEM_MC_ADVANCE_RIP_AND_FINISH();
5782 IEM_MC_END();
5783 break;
5784
5785 case IEMMODE_64BIT:
5786 IEM_MC_BEGIN(0,1);
5787 IEM_MC_LOCAL(uint64_t, u64Tmp);
5788 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
5789 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
5790 IEM_MC_ADVANCE_RIP_AND_FINISH();
5791 IEM_MC_END();
5792 break;
5793
5794 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5795 }
5796}
5797
5798
5799/**
5800 * @opcode 0xa2
5801 */
5802FNIEMOP_DEF(iemOp_mov_Ob_AL)
5803{
5804 /*
5805 * Get the offset and fend off lock prefixes.
5806 */
5807 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
5808 RTGCPTR GCPtrMemOff;
5809 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
5810
5811 /*
5812 * Store AL.
5813 */
5814 IEM_MC_BEGIN(0,1);
5815 IEM_MC_LOCAL(uint8_t, u8Tmp);
5816 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
5817 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
5818 IEM_MC_ADVANCE_RIP_AND_FINISH();
5819 IEM_MC_END();
5820}
5821
5822
5823/**
5824 * @opcode 0xa3
5825 */
5826FNIEMOP_DEF(iemOp_mov_Ov_rAX)
5827{
5828 /*
5829 * Get the offset and fend off lock prefixes.
5830 */
5831 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
5832 RTGCPTR GCPtrMemOff;
5833 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
5834
5835 /*
5836 * Store rAX.
5837 */
5838 switch (pVCpu->iem.s.enmEffOpSize)
5839 {
5840 case IEMMODE_16BIT:
5841 IEM_MC_BEGIN(0,1);
5842 IEM_MC_LOCAL(uint16_t, u16Tmp);
5843 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
5844 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
5845 IEM_MC_ADVANCE_RIP_AND_FINISH();
5846 IEM_MC_END();
5847 break;
5848
5849 case IEMMODE_32BIT:
5850 IEM_MC_BEGIN(0,1);
5851 IEM_MC_LOCAL(uint32_t, u32Tmp);
5852 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
5853 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
5854 IEM_MC_ADVANCE_RIP_AND_FINISH();
5855 IEM_MC_END();
5856 break;
5857
5858 case IEMMODE_64BIT:
5859 IEM_MC_BEGIN(0,1);
5860 IEM_MC_LOCAL(uint64_t, u64Tmp);
5861 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
5862 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
5863 IEM_MC_ADVANCE_RIP_AND_FINISH();
5864 IEM_MC_END();
5865 break;
5866
5867 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5868 }
5869}
5870
5871/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
5872#define IEM_MOVS_CASE(ValBits, AddrBits) \
5873 IEM_MC_BEGIN(0, 2); \
5874 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5875 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5876 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
5877 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
5878 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5879 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
5880 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5881 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5882 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5883 } IEM_MC_ELSE() { \
5884 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5885 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5886 } IEM_MC_ENDIF(); \
5887 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5888 IEM_MC_END();
5889
5890/**
5891 * @opcode 0xa4
5892 */
5893FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
5894{
5895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5896
5897 /*
5898 * Use the C implementation if a repeat prefix is encountered.
5899 */
5900 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5901 {
5902 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
5903 switch (pVCpu->iem.s.enmEffAddrMode)
5904 {
5905 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
5906 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
5907 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
5908 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5909 }
5910 }
5911 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
5912
5913 /*
5914 * Sharing case implementation with movs[wdq] below.
5915 */
5916 switch (pVCpu->iem.s.enmEffAddrMode)
5917 {
5918 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
5919 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
5920 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
5921 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5922 }
5923}
5924
5925
5926/**
5927 * @opcode 0xa5
5928 */
5929FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
5930{
5931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5932
5933 /*
5934 * Use the C implementation if a repeat prefix is encountered.
5935 */
5936 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5937 {
5938 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
5939 switch (pVCpu->iem.s.enmEffOpSize)
5940 {
5941 case IEMMODE_16BIT:
5942 switch (pVCpu->iem.s.enmEffAddrMode)
5943 {
5944 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
5945 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
5946 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
5947 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5948 }
5949 break;
5950 case IEMMODE_32BIT:
5951 switch (pVCpu->iem.s.enmEffAddrMode)
5952 {
5953 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
5954 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
5955 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
5956 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5957 }
5958 case IEMMODE_64BIT:
5959 switch (pVCpu->iem.s.enmEffAddrMode)
5960 {
5961 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
5962 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
5963 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
5964 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5965 }
5966 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5967 }
5968 }
5969 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
5970
5971 /*
5972 * Annoying double switch here.
5973 * Using ugly macro for implementing the cases, sharing it with movsb.
5974 */
5975 switch (pVCpu->iem.s.enmEffOpSize)
5976 {
5977 case IEMMODE_16BIT:
5978 switch (pVCpu->iem.s.enmEffAddrMode)
5979 {
5980 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
5981 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
5982 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
5983 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5984 }
5985 break;
5986
5987 case IEMMODE_32BIT:
5988 switch (pVCpu->iem.s.enmEffAddrMode)
5989 {
5990 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
5991 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
5992 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
5993 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5994 }
5995 break;
5996
5997 case IEMMODE_64BIT:
5998 switch (pVCpu->iem.s.enmEffAddrMode)
5999 {
6000 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6001 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
6002 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
6003 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6004 }
6005 break;
6006 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6007 }
6008}
6009
6010#undef IEM_MOVS_CASE
6011
6012/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
6013#define IEM_CMPS_CASE(ValBits, AddrBits) \
6014 IEM_MC_BEGIN(3, 3); \
6015 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
6016 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
6017 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
6018 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
6019 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6020 \
6021 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6022 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
6023 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6024 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
6025 IEM_MC_REF_LOCAL(puValue1, uValue1); \
6026 IEM_MC_REF_EFLAGS(pEFlags); \
6027 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
6028 \
6029 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6030 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6031 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6032 } IEM_MC_ELSE() { \
6033 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6034 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6035 } IEM_MC_ENDIF(); \
6036 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6037 IEM_MC_END();
6038
6039/**
6040 * @opcode 0xa6
6041 */
6042FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
6043{
6044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6045
6046 /*
6047 * Use the C implementation if a repeat prefix is encountered.
6048 */
6049 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6050 {
6051 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
6052 switch (pVCpu->iem.s.enmEffAddrMode)
6053 {
6054 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6055 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6056 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6057 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6058 }
6059 }
6060 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6061 {
6062 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
6063 switch (pVCpu->iem.s.enmEffAddrMode)
6064 {
6065 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6066 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6067 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6068 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6069 }
6070 }
6071 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
6072
6073 /*
6074 * Sharing case implementation with cmps[wdq] below.
6075 */
6076 switch (pVCpu->iem.s.enmEffAddrMode)
6077 {
6078 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
6079 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
6080 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
6081 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6082 }
6083}
6084
6085
6086/**
6087 * @opcode 0xa7
6088 */
6089FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
6090{
6091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6092
6093 /*
6094 * Use the C implementation if a repeat prefix is encountered.
6095 */
6096 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6097 {
6098 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
6099 switch (pVCpu->iem.s.enmEffOpSize)
6100 {
6101 case IEMMODE_16BIT:
6102 switch (pVCpu->iem.s.enmEffAddrMode)
6103 {
6104 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
6105 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
6106 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
6107 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6108 }
6109 break;
6110 case IEMMODE_32BIT:
6111 switch (pVCpu->iem.s.enmEffAddrMode)
6112 {
6113 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
6114 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
6115 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
6116 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6117 }
6118 case IEMMODE_64BIT:
6119 switch (pVCpu->iem.s.enmEffAddrMode)
6120 {
6121 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
6122 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
6123 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
6124 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6125 }
6126 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6127 }
6128 }
6129
6130 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6131 {
6132 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
6133 switch (pVCpu->iem.s.enmEffOpSize)
6134 {
6135 case IEMMODE_16BIT:
6136 switch (pVCpu->iem.s.enmEffAddrMode)
6137 {
6138 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
6139 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
6140 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
6141 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6142 }
6143 break;
6144 case IEMMODE_32BIT:
6145 switch (pVCpu->iem.s.enmEffAddrMode)
6146 {
6147 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
6148 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
6149 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
6150 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6151 }
6152 case IEMMODE_64BIT:
6153 switch (pVCpu->iem.s.enmEffAddrMode)
6154 {
6155 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
6156 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
6157 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
6158 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6159 }
6160 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6161 }
6162 }
6163
6164 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
6165
6166 /*
6167 * Annoying double switch here.
6168 * Using ugly macro for implementing the cases, sharing it with cmpsb.
6169 */
6170 switch (pVCpu->iem.s.enmEffOpSize)
6171 {
6172 case IEMMODE_16BIT:
6173 switch (pVCpu->iem.s.enmEffAddrMode)
6174 {
6175 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
6176 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
6177 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
6178 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6179 }
6180 break;
6181
6182 case IEMMODE_32BIT:
6183 switch (pVCpu->iem.s.enmEffAddrMode)
6184 {
6185 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
6186 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
6187 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
6188 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6189 }
6190 break;
6191
6192 case IEMMODE_64BIT:
6193 switch (pVCpu->iem.s.enmEffAddrMode)
6194 {
6195 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6196 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
6197 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
6198 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6199 }
6200 break;
6201 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6202 }
6203}
6204
6205#undef IEM_CMPS_CASE
6206
6207/**
6208 * @opcode 0xa8
6209 */
6210FNIEMOP_DEF(iemOp_test_AL_Ib)
6211{
6212 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
6213 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6214 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_test_u8);
6215}
6216
6217
6218/**
6219 * @opcode 0xa9
6220 */
6221FNIEMOP_DEF(iemOp_test_eAX_Iz)
6222{
6223 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
6224 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
6225 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, 0);
6226}
6227
6228
6229/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
6230#define IEM_STOS_CASE(ValBits, AddrBits) \
6231 IEM_MC_BEGIN(0, 2); \
6232 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6233 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6234 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
6235 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6236 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
6237 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6238 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6239 } IEM_MC_ELSE() { \
6240 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6241 } IEM_MC_ENDIF(); \
6242 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6243 IEM_MC_END();
6244
6245/**
6246 * @opcode 0xaa
6247 */
6248FNIEMOP_DEF(iemOp_stosb_Yb_AL)
6249{
6250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6251
6252 /*
6253 * Use the C implementation if a repeat prefix is encountered.
6254 */
6255 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6256 {
6257 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
6258 switch (pVCpu->iem.s.enmEffAddrMode)
6259 {
6260 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
6261 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
6262 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
6263 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6264 }
6265 }
6266 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
6267
6268 /*
6269 * Sharing case implementation with stos[wdq] below.
6270 */
6271 switch (pVCpu->iem.s.enmEffAddrMode)
6272 {
6273 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
6274 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
6275 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
6276 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6277 }
6278}
6279
6280
6281/**
6282 * @opcode 0xab
6283 */
6284FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
6285{
6286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6287
6288 /*
6289 * Use the C implementation if a repeat prefix is encountered.
6290 */
6291 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6292 {
6293 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
6294 switch (pVCpu->iem.s.enmEffOpSize)
6295 {
6296 case IEMMODE_16BIT:
6297 switch (pVCpu->iem.s.enmEffAddrMode)
6298 {
6299 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
6300 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
6301 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
6302 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6303 }
6304 break;
6305 case IEMMODE_32BIT:
6306 switch (pVCpu->iem.s.enmEffAddrMode)
6307 {
6308 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
6309 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
6310 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
6311 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6312 }
6313 case IEMMODE_64BIT:
6314 switch (pVCpu->iem.s.enmEffAddrMode)
6315 {
6316 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
6317 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
6318 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
6319 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6320 }
6321 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6322 }
6323 }
6324 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
6325
6326 /*
6327 * Annoying double switch here.
6328 * Using ugly macro for implementing the cases, sharing it with stosb.
6329 */
6330 switch (pVCpu->iem.s.enmEffOpSize)
6331 {
6332 case IEMMODE_16BIT:
6333 switch (pVCpu->iem.s.enmEffAddrMode)
6334 {
6335 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
6336 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
6337 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
6338 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6339 }
6340 break;
6341
6342 case IEMMODE_32BIT:
6343 switch (pVCpu->iem.s.enmEffAddrMode)
6344 {
6345 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
6346 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
6347 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
6348 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6349 }
6350 break;
6351
6352 case IEMMODE_64BIT:
6353 switch (pVCpu->iem.s.enmEffAddrMode)
6354 {
6355 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6356 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
6357 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
6358 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6359 }
6360 break;
6361 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6362 }
6363}
6364
6365#undef IEM_STOS_CASE
6366
6367/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
6368#define IEM_LODS_CASE(ValBits, AddrBits) \
6369 IEM_MC_BEGIN(0, 2); \
6370 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6371 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6372 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6373 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
6374 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
6375 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6376 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6377 } IEM_MC_ELSE() { \
6378 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6379 } IEM_MC_ENDIF(); \
6380 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6381 IEM_MC_END();
6382
6383/**
6384 * @opcode 0xac
6385 */
6386FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
6387{
6388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6389
6390 /*
6391 * Use the C implementation if a repeat prefix is encountered.
6392 */
6393 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6394 {
6395 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
6396 switch (pVCpu->iem.s.enmEffAddrMode)
6397 {
6398 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
6399 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
6400 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
6401 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6402 }
6403 }
6404 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
6405
6406 /*
6407 * Sharing case implementation with stos[wdq] below.
6408 */
6409 switch (pVCpu->iem.s.enmEffAddrMode)
6410 {
6411 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
6412 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
6413 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
6414 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6415 }
6416}
6417
6418
6419/**
6420 * @opcode 0xad
6421 */
6422FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
6423{
6424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6425
6426 /*
6427 * Use the C implementation if a repeat prefix is encountered.
6428 */
6429 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6430 {
6431 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
6432 switch (pVCpu->iem.s.enmEffOpSize)
6433 {
6434 case IEMMODE_16BIT:
6435 switch (pVCpu->iem.s.enmEffAddrMode)
6436 {
6437 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
6438 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
6439 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
6440 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6441 }
6442 break;
6443 case IEMMODE_32BIT:
6444 switch (pVCpu->iem.s.enmEffAddrMode)
6445 {
6446 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
6447 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
6448 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
6449 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6450 }
6451 case IEMMODE_64BIT:
6452 switch (pVCpu->iem.s.enmEffAddrMode)
6453 {
6454 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
6455 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
6456 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
6457 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6458 }
6459 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6460 }
6461 }
6462 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
6463
6464 /*
6465 * Annoying double switch here.
6466 * Using ugly macro for implementing the cases, sharing it with lodsb.
6467 */
6468 switch (pVCpu->iem.s.enmEffOpSize)
6469 {
6470 case IEMMODE_16BIT:
6471 switch (pVCpu->iem.s.enmEffAddrMode)
6472 {
6473 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
6474 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
6475 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
6476 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6477 }
6478 break;
6479
6480 case IEMMODE_32BIT:
6481 switch (pVCpu->iem.s.enmEffAddrMode)
6482 {
6483 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
6484 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
6485 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
6486 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6487 }
6488 break;
6489
6490 case IEMMODE_64BIT:
6491 switch (pVCpu->iem.s.enmEffAddrMode)
6492 {
6493 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6494 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
6495 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
6496 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6497 }
6498 break;
6499 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6500 }
6501}
6502
6503#undef IEM_LODS_CASE
6504
6505/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
6506#define IEM_SCAS_CASE(ValBits, AddrBits) \
6507 IEM_MC_BEGIN(3, 2); \
6508 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
6509 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
6510 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
6511 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6512 \
6513 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6514 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
6515 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
6516 IEM_MC_REF_EFLAGS(pEFlags); \
6517 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
6518 \
6519 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6520 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6521 } IEM_MC_ELSE() { \
6522 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6523 } IEM_MC_ENDIF(); \
6524 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6525 IEM_MC_END();
6526
6527/**
6528 * @opcode 0xae
6529 */
6530FNIEMOP_DEF(iemOp_scasb_AL_Xb)
6531{
6532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6533
6534 /*
6535 * Use the C implementation if a repeat prefix is encountered.
6536 */
6537 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6538 {
6539 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
6540 switch (pVCpu->iem.s.enmEffAddrMode)
6541 {
6542 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
6543 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
6544 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
6545 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6546 }
6547 }
6548 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6549 {
6550 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
6551 switch (pVCpu->iem.s.enmEffAddrMode)
6552 {
6553 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
6554 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
6555 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
6556 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6557 }
6558 }
6559 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
6560
6561 /*
6562 * Sharing case implementation with stos[wdq] below.
6563 */
6564 switch (pVCpu->iem.s.enmEffAddrMode)
6565 {
6566 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
6567 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
6568 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
6569 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6570 }
6571}
6572
6573
6574/**
6575 * @opcode 0xaf
6576 */
6577FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
6578{
6579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6580
6581 /*
6582 * Use the C implementation if a repeat prefix is encountered.
6583 */
6584 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6585 {
6586 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
6587 switch (pVCpu->iem.s.enmEffOpSize)
6588 {
6589 case IEMMODE_16BIT:
6590 switch (pVCpu->iem.s.enmEffAddrMode)
6591 {
6592 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
6593 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
6594 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
6595 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6596 }
6597 break;
6598 case IEMMODE_32BIT:
6599 switch (pVCpu->iem.s.enmEffAddrMode)
6600 {
6601 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
6602 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
6603 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
6604 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6605 }
6606 case IEMMODE_64BIT:
6607 switch (pVCpu->iem.s.enmEffAddrMode)
6608 {
6609 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
6610 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
6611 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
6612 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6613 }
6614 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6615 }
6616 }
6617 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6618 {
6619 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
6620 switch (pVCpu->iem.s.enmEffOpSize)
6621 {
6622 case IEMMODE_16BIT:
6623 switch (pVCpu->iem.s.enmEffAddrMode)
6624 {
6625 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
6626 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
6627 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
6628 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6629 }
6630 break;
6631 case IEMMODE_32BIT:
6632 switch (pVCpu->iem.s.enmEffAddrMode)
6633 {
6634 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
6635 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
6636 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
6637 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6638 }
6639 case IEMMODE_64BIT:
6640 switch (pVCpu->iem.s.enmEffAddrMode)
6641 {
6642 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
6643 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
6644 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
6645 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6646 }
6647 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6648 }
6649 }
6650 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
6651
6652 /*
6653 * Annoying double switch here.
6654 * Using ugly macro for implementing the cases, sharing it with scasb.
6655 */
6656 switch (pVCpu->iem.s.enmEffOpSize)
6657 {
6658 case IEMMODE_16BIT:
6659 switch (pVCpu->iem.s.enmEffAddrMode)
6660 {
6661 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
6662 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
6663 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
6664 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6665 }
6666 break;
6667
6668 case IEMMODE_32BIT:
6669 switch (pVCpu->iem.s.enmEffAddrMode)
6670 {
6671 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
6672 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
6673 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
6674 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6675 }
6676 break;
6677
6678 case IEMMODE_64BIT:
6679 switch (pVCpu->iem.s.enmEffAddrMode)
6680 {
6681 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6682 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
6683 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
6684 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6685 }
6686 break;
6687 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6688 }
6689}
6690
6691#undef IEM_SCAS_CASE
6692
6693/**
6694 * Common 'mov r8, imm8' helper.
6695 */
6696FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
6697{
6698 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6700
6701 IEM_MC_BEGIN(0, 1);
6702 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
6703 IEM_MC_STORE_GREG_U8(iReg, u8Value);
6704 IEM_MC_ADVANCE_RIP_AND_FINISH();
6705 IEM_MC_END();
6706}
6707
6708
6709/**
6710 * @opcode 0xb0
6711 */
6712FNIEMOP_DEF(iemOp_mov_AL_Ib)
6713{
6714 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
6715 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
6716}
6717
6718
6719/**
6720 * @opcode 0xb1
6721 */
6722FNIEMOP_DEF(iemOp_CL_Ib)
6723{
6724 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
6725 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
6726}
6727
6728
6729/**
6730 * @opcode 0xb2
6731 */
6732FNIEMOP_DEF(iemOp_DL_Ib)
6733{
6734 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
6735 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
6736}
6737
6738
6739/**
6740 * @opcode 0xb3
6741 */
6742FNIEMOP_DEF(iemOp_BL_Ib)
6743{
6744 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
6745 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
6746}
6747
6748
6749/**
6750 * @opcode 0xb4
6751 */
6752FNIEMOP_DEF(iemOp_mov_AH_Ib)
6753{
6754 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
6755 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
6756}
6757
6758
6759/**
6760 * @opcode 0xb5
6761 */
6762FNIEMOP_DEF(iemOp_CH_Ib)
6763{
6764 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
6765 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
6766}
6767
6768
6769/**
6770 * @opcode 0xb6
6771 */
6772FNIEMOP_DEF(iemOp_DH_Ib)
6773{
6774 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
6775 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
6776}
6777
6778
6779/**
6780 * @opcode 0xb7
6781 */
6782FNIEMOP_DEF(iemOp_BH_Ib)
6783{
6784 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
6785 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
6786}
6787
6788
6789/**
6790 * Common 'mov regX,immX' helper.
6791 */
6792FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
6793{
6794 switch (pVCpu->iem.s.enmEffOpSize)
6795 {
6796 case IEMMODE_16BIT:
6797 {
6798 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6800
6801 IEM_MC_BEGIN(0, 1);
6802 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
6803 IEM_MC_STORE_GREG_U16(iReg, u16Value);
6804 IEM_MC_ADVANCE_RIP_AND_FINISH();
6805 IEM_MC_END();
6806 break;
6807 }
6808
6809 case IEMMODE_32BIT:
6810 {
6811 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6813
6814 IEM_MC_BEGIN(0, 1);
6815 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
6816 IEM_MC_STORE_GREG_U32(iReg, u32Value);
6817 IEM_MC_ADVANCE_RIP_AND_FINISH();
6818 IEM_MC_END();
6819 break;
6820 }
6821 case IEMMODE_64BIT:
6822 {
6823 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
6824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6825
6826 IEM_MC_BEGIN(0, 1);
6827 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
6828 IEM_MC_STORE_GREG_U64(iReg, u64Value);
6829 IEM_MC_ADVANCE_RIP_AND_FINISH();
6830 IEM_MC_END();
6831 break;
6832 }
6833 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6834 }
6835}
6836
6837
6838/**
6839 * @opcode 0xb8
6840 */
6841FNIEMOP_DEF(iemOp_eAX_Iv)
6842{
6843 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
6844 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
6845}
6846
6847
6848/**
6849 * @opcode 0xb9
6850 */
6851FNIEMOP_DEF(iemOp_eCX_Iv)
6852{
6853 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
6854 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
6855}
6856
6857
6858/**
6859 * @opcode 0xba
6860 */
6861FNIEMOP_DEF(iemOp_eDX_Iv)
6862{
6863 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
6864 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
6865}
6866
6867
6868/**
6869 * @opcode 0xbb
6870 */
6871FNIEMOP_DEF(iemOp_eBX_Iv)
6872{
6873 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
6874 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
6875}
6876
6877
6878/**
6879 * @opcode 0xbc
6880 */
6881FNIEMOP_DEF(iemOp_eSP_Iv)
6882{
6883 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
6884 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
6885}
6886
6887
6888/**
6889 * @opcode 0xbd
6890 */
6891FNIEMOP_DEF(iemOp_eBP_Iv)
6892{
6893 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
6894 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
6895}
6896
6897
6898/**
6899 * @opcode 0xbe
6900 */
6901FNIEMOP_DEF(iemOp_eSI_Iv)
6902{
6903 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
6904 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
6905}
6906
6907
6908/**
6909 * @opcode 0xbf
6910 */
6911FNIEMOP_DEF(iemOp_eDI_Iv)
6912{
6913 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
6914 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
6915}
6916
6917
6918/**
6919 * @opcode 0xc0
6920 */
6921FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
6922{
6923 IEMOP_HLP_MIN_186();
6924 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6925 PCIEMOPSHIFTSIZES pImpl;
6926 switch (IEM_GET_MODRM_REG_8(bRm))
6927 {
6928 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
6929 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
6930 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
6931 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
6932 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
6933 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
6934 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
6935 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6936 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6937 }
6938 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6939
6940 if (IEM_IS_MODRM_REG_MODE(bRm))
6941 {
6942 /* register */
6943 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6945 IEM_MC_BEGIN(3, 0);
6946 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6947 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6948 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6949 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
6950 IEM_MC_REF_EFLAGS(pEFlags);
6951 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6952 IEM_MC_ADVANCE_RIP_AND_FINISH();
6953 IEM_MC_END();
6954 }
6955 else
6956 {
6957 /* memory */
6958 IEM_MC_BEGIN(3, 2);
6959 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6960 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6961 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6962 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6963
6964 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6965 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6966 IEM_MC_ASSIGN(cShiftArg, cShift);
6967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6968 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6969 IEM_MC_FETCH_EFLAGS(EFlags);
6970 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6971
6972 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6973 IEM_MC_COMMIT_EFLAGS(EFlags);
6974 IEM_MC_ADVANCE_RIP_AND_FINISH();
6975 IEM_MC_END();
6976 }
6977}
6978
6979
6980/**
6981 * @opcode 0xc1
6982 */
6983FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
6984{
6985 IEMOP_HLP_MIN_186();
6986 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6987 PCIEMOPSHIFTSIZES pImpl;
6988 switch (IEM_GET_MODRM_REG_8(bRm))
6989 {
6990 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
6991 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
6992 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
6993 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
6994 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
6995 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
6996 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
6997 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6998 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6999 }
7000 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7001
7002 if (IEM_IS_MODRM_REG_MODE(bRm))
7003 {
7004 /* register */
7005 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7007 switch (pVCpu->iem.s.enmEffOpSize)
7008 {
7009 case IEMMODE_16BIT:
7010 IEM_MC_BEGIN(3, 0);
7011 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7012 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7013 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7014 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7015 IEM_MC_REF_EFLAGS(pEFlags);
7016 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7017 IEM_MC_ADVANCE_RIP_AND_FINISH();
7018 IEM_MC_END();
7019 break;
7020
7021 case IEMMODE_32BIT:
7022 IEM_MC_BEGIN(3, 0);
7023 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7024 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7025 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7026 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7027 IEM_MC_REF_EFLAGS(pEFlags);
7028 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7029 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7030 IEM_MC_ADVANCE_RIP_AND_FINISH();
7031 IEM_MC_END();
7032 break;
7033
7034 case IEMMODE_64BIT:
7035 IEM_MC_BEGIN(3, 0);
7036 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7037 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
7038 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7039 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7040 IEM_MC_REF_EFLAGS(pEFlags);
7041 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7042 IEM_MC_ADVANCE_RIP_AND_FINISH();
7043 IEM_MC_END();
7044 break;
7045
7046 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7047 }
7048 }
7049 else
7050 {
7051 /* memory */
7052 switch (pVCpu->iem.s.enmEffOpSize)
7053 {
7054 case IEMMODE_16BIT:
7055 IEM_MC_BEGIN(3, 2);
7056 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7057 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7058 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7059 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7060
7061 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7062 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7063 IEM_MC_ASSIGN(cShiftArg, cShift);
7064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7065 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7066 IEM_MC_FETCH_EFLAGS(EFlags);
7067 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7068
7069 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7070 IEM_MC_COMMIT_EFLAGS(EFlags);
7071 IEM_MC_ADVANCE_RIP_AND_FINISH();
7072 IEM_MC_END();
7073 break;
7074
7075 case IEMMODE_32BIT:
7076 IEM_MC_BEGIN(3, 2);
7077 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7078 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7079 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7080 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7081
7082 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7083 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7084 IEM_MC_ASSIGN(cShiftArg, cShift);
7085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7086 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7087 IEM_MC_FETCH_EFLAGS(EFlags);
7088 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7089
7090 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7091 IEM_MC_COMMIT_EFLAGS(EFlags);
7092 IEM_MC_ADVANCE_RIP_AND_FINISH();
7093 IEM_MC_END();
7094 break;
7095
7096 case IEMMODE_64BIT:
7097 IEM_MC_BEGIN(3, 2);
7098 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7099 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7100 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7102
7103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7104 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
7105 IEM_MC_ASSIGN(cShiftArg, cShift);
7106 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7107 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7108 IEM_MC_FETCH_EFLAGS(EFlags);
7109 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7110
7111 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7112 IEM_MC_COMMIT_EFLAGS(EFlags);
7113 IEM_MC_ADVANCE_RIP_AND_FINISH();
7114 IEM_MC_END();
7115 break;
7116
7117 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7118 }
7119 }
7120}
7121
7122
7123/**
7124 * @opcode 0xc2
7125 */
7126FNIEMOP_DEF(iemOp_retn_Iw)
7127{
7128 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
7129 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7130 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7131 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7132 switch (pVCpu->iem.s.enmEffOpSize)
7133 {
7134 case IEMMODE_16BIT:
7135 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_retn_iw_16, u16Imm);
7136 case IEMMODE_32BIT:
7137 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_retn_iw_32, u16Imm);
7138 case IEMMODE_64BIT:
7139 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_retn_iw_64, u16Imm);
7140 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7141 }
7142}
7143
7144
7145/**
7146 * @opcode 0xc3
7147 */
7148FNIEMOP_DEF(iemOp_retn)
7149{
7150 IEMOP_MNEMONIC(retn, "retn");
7151 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
7152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7153 switch (pVCpu->iem.s.enmEffOpSize)
7154 {
7155 case IEMMODE_16BIT:
7156 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_retn_16);
7157 case IEMMODE_32BIT:
7158 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_retn_32);
7159 case IEMMODE_64BIT:
7160 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_retn_64);
7161 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7162 }
7163}
7164
7165
7166/**
7167 * @opcode 0xc4
7168 */
7169FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
7170{
7171 /* The LDS instruction is invalid 64-bit mode. In legacy and
7172 compatability mode it is invalid with MOD=3.
7173 The use as a VEX prefix is made possible by assigning the inverted
7174 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
7175 outside of 64-bit mode. VEX is not available in real or v86 mode. */
7176 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7177 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
7178 || IEM_IS_MODRM_REG_MODE(bRm) )
7179 {
7180 IEMOP_MNEMONIC(vex3_prefix, "vex3");
7181 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
7182 {
7183 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
7184 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
7185 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
7186 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
7187 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
7188 if ((bVex2 & 0x80 /* VEX.W */) && pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
7189 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
7190 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
7191 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
7192 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
7193 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
7194 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
7195 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
7196
7197 switch (bRm & 0x1f)
7198 {
7199 case 1: /* 0x0f lead opcode byte. */
7200#ifdef IEM_WITH_VEX
7201 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7202#else
7203 IEMOP_BITCH_ABOUT_STUB();
7204 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7205#endif
7206
7207 case 2: /* 0x0f 0x38 lead opcode bytes. */
7208#ifdef IEM_WITH_VEX
7209 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7210#else
7211 IEMOP_BITCH_ABOUT_STUB();
7212 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7213#endif
7214
7215 case 3: /* 0x0f 0x3a lead opcode bytes. */
7216#ifdef IEM_WITH_VEX
7217 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7218#else
7219 IEMOP_BITCH_ABOUT_STUB();
7220 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7221#endif
7222
7223 default:
7224 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
7225 return IEMOP_RAISE_INVALID_OPCODE();
7226 }
7227 }
7228 Log(("VEX3: AVX support disabled!\n"));
7229 return IEMOP_RAISE_INVALID_OPCODE();
7230 }
7231
7232 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
7233 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
7234}
7235
7236
7237/**
7238 * @opcode 0xc5
7239 */
7240FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
7241{
7242 /* The LES instruction is invalid 64-bit mode. In legacy and
7243 compatability mode it is invalid with MOD=3.
7244 The use as a VEX prefix is made possible by assigning the inverted
7245 REX.R to the top MOD bit, and the top bit in the inverted register
7246 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
7247 to accessing registers 0..7 in this VEX form. */
7248 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7249 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
7250 || IEM_IS_MODRM_REG_MODE(bRm))
7251 {
7252 IEMOP_MNEMONIC(vex2_prefix, "vex2");
7253 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
7254 {
7255 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
7256 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
7257 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
7258 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
7259 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
7260 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
7261 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
7262 pVCpu->iem.s.idxPrefix = bRm & 0x3;
7263
7264#ifdef IEM_WITH_VEX
7265 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
7266#else
7267 IEMOP_BITCH_ABOUT_STUB();
7268 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
7269#endif
7270 }
7271
7272 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
7273 Log(("VEX2: AVX support disabled!\n"));
7274 return IEMOP_RAISE_INVALID_OPCODE();
7275 }
7276
7277 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
7278 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
7279}
7280
7281
7282/**
7283 * @opcode 0xc6
7284 */
7285FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
7286{
7287 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7288 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
7289 return IEMOP_RAISE_INVALID_OPCODE();
7290 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
7291
7292 if (IEM_IS_MODRM_REG_MODE(bRm))
7293 {
7294 /* register access */
7295 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7297 IEM_MC_BEGIN(0, 0);
7298 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Imm);
7299 IEM_MC_ADVANCE_RIP_AND_FINISH();
7300 IEM_MC_END();
7301 }
7302 else
7303 {
7304 /* memory access. */
7305 IEM_MC_BEGIN(0, 1);
7306 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7307 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
7308 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7310 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
7311 IEM_MC_ADVANCE_RIP_AND_FINISH();
7312 IEM_MC_END();
7313 }
7314}
7315
7316
7317/**
7318 * @opcode 0xc7
7319 */
7320FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
7321{
7322 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7323 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
7324 return IEMOP_RAISE_INVALID_OPCODE();
7325 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
7326
7327 if (IEM_IS_MODRM_REG_MODE(bRm))
7328 {
7329 /* register access */
7330 switch (pVCpu->iem.s.enmEffOpSize)
7331 {
7332 case IEMMODE_16BIT:
7333 IEM_MC_BEGIN(0, 0);
7334 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7336 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Imm);
7337 IEM_MC_ADVANCE_RIP_AND_FINISH();
7338 IEM_MC_END();
7339 break;
7340
7341 case IEMMODE_32BIT:
7342 IEM_MC_BEGIN(0, 0);
7343 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
7344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7345 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Imm);
7346 IEM_MC_ADVANCE_RIP_AND_FINISH();
7347 IEM_MC_END();
7348 break;
7349
7350 case IEMMODE_64BIT:
7351 IEM_MC_BEGIN(0, 0);
7352 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
7353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7354 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Imm);
7355 IEM_MC_ADVANCE_RIP_AND_FINISH();
7356 IEM_MC_END();
7357 break;
7358
7359 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7360 }
7361 }
7362 else
7363 {
7364 /* memory access. */
7365 switch (pVCpu->iem.s.enmEffOpSize)
7366 {
7367 case IEMMODE_16BIT:
7368 IEM_MC_BEGIN(0, 1);
7369 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7370 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
7371 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7373 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
7374 IEM_MC_ADVANCE_RIP_AND_FINISH();
7375 IEM_MC_END();
7376 break;
7377
7378 case IEMMODE_32BIT:
7379 IEM_MC_BEGIN(0, 1);
7380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
7382 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
7383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7384 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
7385 IEM_MC_ADVANCE_RIP_AND_FINISH();
7386 IEM_MC_END();
7387 break;
7388
7389 case IEMMODE_64BIT:
7390 IEM_MC_BEGIN(0, 1);
7391 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7392 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
7393 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
7394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7395 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
7396 IEM_MC_ADVANCE_RIP_AND_FINISH();
7397 IEM_MC_END();
7398 break;
7399
7400 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7401 }
7402 }
7403}
7404
7405
7406
7407
7408/**
7409 * @opcode 0xc8
7410 */
7411FNIEMOP_DEF(iemOp_enter_Iw_Ib)
7412{
7413 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
7414 IEMOP_HLP_MIN_186();
7415 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7416 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
7417 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
7418 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7419 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
7420}
7421
7422
7423/**
7424 * @opcode 0xc9
7425 */
7426FNIEMOP_DEF(iemOp_leave)
7427{
7428 IEMOP_MNEMONIC(leave, "leave");
7429 IEMOP_HLP_MIN_186();
7430 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
7431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7432 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
7433}
7434
7435
7436/**
7437 * @opcode 0xca
7438 */
7439FNIEMOP_DEF(iemOp_retf_Iw)
7440{
7441 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
7442 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7444 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
7445}
7446
7447
7448/**
7449 * @opcode 0xcb
7450 */
7451FNIEMOP_DEF(iemOp_retf)
7452{
7453 IEMOP_MNEMONIC(retf, "retf");
7454 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7455 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
7456}
7457
7458
7459/**
7460 * @opcode 0xcc
7461 */
7462FNIEMOP_DEF(iemOp_int3)
7463{
7464 IEMOP_MNEMONIC(int3, "int3");
7465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7466 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
7467}
7468
7469
7470/**
7471 * @opcode 0xcd
7472 */
7473FNIEMOP_DEF(iemOp_int_Ib)
7474{
7475 IEMOP_MNEMONIC(int_Ib, "int Ib");
7476 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
7477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7478 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, IEMINT_INTN);
7479}
7480
7481
7482/**
7483 * @opcode 0xce
7484 */
7485FNIEMOP_DEF(iemOp_into)
7486{
7487 IEMOP_MNEMONIC(into, "into");
7488 IEMOP_HLP_NO_64BIT();
7489
7490 IEM_MC_BEGIN(2, 0);
7491 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
7492 IEM_MC_ARG_CONST(IEMINT, enmInt, /*=*/ IEMINT_INTO, 1);
7493 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, enmInt);
7494 IEM_MC_END();
7495}
7496
7497
7498/**
7499 * @opcode 0xcf
7500 */
7501FNIEMOP_DEF(iemOp_iret)
7502{
7503 IEMOP_MNEMONIC(iret, "iret");
7504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7505 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
7506}
7507
7508
7509/**
7510 * @opcode 0xd0
7511 */
7512FNIEMOP_DEF(iemOp_Grp2_Eb_1)
7513{
7514 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7515 PCIEMOPSHIFTSIZES pImpl;
7516 switch (IEM_GET_MODRM_REG_8(bRm))
7517 {
7518 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
7519 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
7520 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
7521 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
7522 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
7523 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
7524 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
7525 case 6: return IEMOP_RAISE_INVALID_OPCODE();
7526 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
7527 }
7528 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7529
7530 if (IEM_IS_MODRM_REG_MODE(bRm))
7531 {
7532 /* register */
7533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7534 IEM_MC_BEGIN(3, 0);
7535 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7536 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
7537 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7538 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7539 IEM_MC_REF_EFLAGS(pEFlags);
7540 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7541 IEM_MC_ADVANCE_RIP_AND_FINISH();
7542 IEM_MC_END();
7543 }
7544 else
7545 {
7546 /* memory */
7547 IEM_MC_BEGIN(3, 2);
7548 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7549 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
7550 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7551 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7552
7553 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7555 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7556 IEM_MC_FETCH_EFLAGS(EFlags);
7557 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7558
7559 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7560 IEM_MC_COMMIT_EFLAGS(EFlags);
7561 IEM_MC_ADVANCE_RIP_AND_FINISH();
7562 IEM_MC_END();
7563 }
7564}
7565
7566
7567
7568/**
7569 * @opcode 0xd1
7570 */
7571FNIEMOP_DEF(iemOp_Grp2_Ev_1)
7572{
7573 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7574 PCIEMOPSHIFTSIZES pImpl;
7575 switch (IEM_GET_MODRM_REG_8(bRm))
7576 {
7577 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
7578 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
7579 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
7580 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
7581 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
7582 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
7583 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
7584 case 6: return IEMOP_RAISE_INVALID_OPCODE();
7585 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
7586 }
7587 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7588
7589 if (IEM_IS_MODRM_REG_MODE(bRm))
7590 {
7591 /* register */
7592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7593 switch (pVCpu->iem.s.enmEffOpSize)
7594 {
7595 case IEMMODE_16BIT:
7596 IEM_MC_BEGIN(3, 0);
7597 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7598 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7599 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7600 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7601 IEM_MC_REF_EFLAGS(pEFlags);
7602 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7603 IEM_MC_ADVANCE_RIP_AND_FINISH();
7604 IEM_MC_END();
7605 break;
7606
7607 case IEMMODE_32BIT:
7608 IEM_MC_BEGIN(3, 0);
7609 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7610 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7611 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7612 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7613 IEM_MC_REF_EFLAGS(pEFlags);
7614 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7615 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7616 IEM_MC_ADVANCE_RIP_AND_FINISH();
7617 IEM_MC_END();
7618 break;
7619
7620 case IEMMODE_64BIT:
7621 IEM_MC_BEGIN(3, 0);
7622 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7623 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7624 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7625 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7626 IEM_MC_REF_EFLAGS(pEFlags);
7627 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7628 IEM_MC_ADVANCE_RIP_AND_FINISH();
7629 IEM_MC_END();
7630 break;
7631
7632 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7633 }
7634 }
7635 else
7636 {
7637 /* memory */
7638 switch (pVCpu->iem.s.enmEffOpSize)
7639 {
7640 case IEMMODE_16BIT:
7641 IEM_MC_BEGIN(3, 2);
7642 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7643 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7644 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7645 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7646
7647 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7649 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7650 IEM_MC_FETCH_EFLAGS(EFlags);
7651 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7652
7653 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7654 IEM_MC_COMMIT_EFLAGS(EFlags);
7655 IEM_MC_ADVANCE_RIP_AND_FINISH();
7656 IEM_MC_END();
7657 break;
7658
7659 case IEMMODE_32BIT:
7660 IEM_MC_BEGIN(3, 2);
7661 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7662 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7663 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7664 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7665
7666 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7668 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7669 IEM_MC_FETCH_EFLAGS(EFlags);
7670 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7671
7672 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7673 IEM_MC_COMMIT_EFLAGS(EFlags);
7674 IEM_MC_ADVANCE_RIP_AND_FINISH();
7675 IEM_MC_END();
7676 break;
7677
7678 case IEMMODE_64BIT:
7679 IEM_MC_BEGIN(3, 2);
7680 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7681 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
7682 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7683 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7684
7685 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7687 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7688 IEM_MC_FETCH_EFLAGS(EFlags);
7689 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7690
7691 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7692 IEM_MC_COMMIT_EFLAGS(EFlags);
7693 IEM_MC_ADVANCE_RIP_AND_FINISH();
7694 IEM_MC_END();
7695 break;
7696
7697 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7698 }
7699 }
7700}
7701
7702
7703/**
7704 * @opcode 0xd2
7705 */
7706FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
7707{
7708 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7709 PCIEMOPSHIFTSIZES pImpl;
7710 switch (IEM_GET_MODRM_REG_8(bRm))
7711 {
7712 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
7713 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
7714 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
7715 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
7716 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
7717 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
7718 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
7719 case 6: return IEMOP_RAISE_INVALID_OPCODE();
7720 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
7721 }
7722 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7723
7724 if (IEM_IS_MODRM_REG_MODE(bRm))
7725 {
7726 /* register */
7727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7728 IEM_MC_BEGIN(3, 0);
7729 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7730 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7731 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7732 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7733 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7734 IEM_MC_REF_EFLAGS(pEFlags);
7735 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7736 IEM_MC_ADVANCE_RIP_AND_FINISH();
7737 IEM_MC_END();
7738 }
7739 else
7740 {
7741 /* memory */
7742 IEM_MC_BEGIN(3, 2);
7743 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
7744 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7745 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7746 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7747
7748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7750 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7751 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7752 IEM_MC_FETCH_EFLAGS(EFlags);
7753 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
7754
7755 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
7756 IEM_MC_COMMIT_EFLAGS(EFlags);
7757 IEM_MC_ADVANCE_RIP_AND_FINISH();
7758 IEM_MC_END();
7759 }
7760}
7761
7762
7763/**
7764 * @opcode 0xd3
7765 */
7766FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
7767{
7768 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7769 PCIEMOPSHIFTSIZES pImpl;
7770 switch (IEM_GET_MODRM_REG_8(bRm))
7771 {
7772 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
7773 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
7774 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
7775 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
7776 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
7777 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
7778 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
7779 case 6: return IEMOP_RAISE_INVALID_OPCODE();
7780 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
7781 }
7782 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
7783
7784 if (IEM_IS_MODRM_REG_MODE(bRm))
7785 {
7786 /* register */
7787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7788 switch (pVCpu->iem.s.enmEffOpSize)
7789 {
7790 case IEMMODE_16BIT:
7791 IEM_MC_BEGIN(3, 0);
7792 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7793 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7794 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7795 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7796 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7797 IEM_MC_REF_EFLAGS(pEFlags);
7798 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7799 IEM_MC_ADVANCE_RIP_AND_FINISH();
7800 IEM_MC_END();
7801 break;
7802
7803 case IEMMODE_32BIT:
7804 IEM_MC_BEGIN(3, 0);
7805 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7806 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7807 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7808 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7809 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7810 IEM_MC_REF_EFLAGS(pEFlags);
7811 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7812 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
7813 IEM_MC_ADVANCE_RIP_AND_FINISH();
7814 IEM_MC_END();
7815 break;
7816
7817 case IEMMODE_64BIT:
7818 IEM_MC_BEGIN(3, 0);
7819 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7820 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7821 IEM_MC_ARG(uint32_t *, pEFlags, 2);
7822 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
7823 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7824 IEM_MC_REF_EFLAGS(pEFlags);
7825 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7826 IEM_MC_ADVANCE_RIP_AND_FINISH();
7827 IEM_MC_END();
7828 break;
7829
7830 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7831 }
7832 }
7833 else
7834 {
7835 /* memory */
7836 switch (pVCpu->iem.s.enmEffOpSize)
7837 {
7838 case IEMMODE_16BIT:
7839 IEM_MC_BEGIN(3, 2);
7840 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
7841 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7842 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7843 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7844
7845 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7847 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7848 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7849 IEM_MC_FETCH_EFLAGS(EFlags);
7850 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
7851
7852 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
7853 IEM_MC_COMMIT_EFLAGS(EFlags);
7854 IEM_MC_ADVANCE_RIP_AND_FINISH();
7855 IEM_MC_END();
7856 break;
7857
7858 case IEMMODE_32BIT:
7859 IEM_MC_BEGIN(3, 2);
7860 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
7861 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7862 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7863 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7864
7865 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7867 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7868 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7869 IEM_MC_FETCH_EFLAGS(EFlags);
7870 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
7871
7872 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
7873 IEM_MC_COMMIT_EFLAGS(EFlags);
7874 IEM_MC_ADVANCE_RIP_AND_FINISH();
7875 IEM_MC_END();
7876 break;
7877
7878 case IEMMODE_64BIT:
7879 IEM_MC_BEGIN(3, 2);
7880 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
7881 IEM_MC_ARG(uint8_t, cShiftArg, 1);
7882 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
7883 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7884
7885 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7887 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
7888 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
7889 IEM_MC_FETCH_EFLAGS(EFlags);
7890 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
7891
7892 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
7893 IEM_MC_COMMIT_EFLAGS(EFlags);
7894 IEM_MC_ADVANCE_RIP_AND_FINISH();
7895 IEM_MC_END();
7896 break;
7897
7898 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7899 }
7900 }
7901}
7902
7903/**
7904 * @opcode 0xd4
7905 */
7906FNIEMOP_DEF(iemOp_aam_Ib)
7907{
7908 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
7909 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
7910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7911 IEMOP_HLP_NO_64BIT();
7912 if (!bImm)
7913 return IEMOP_RAISE_DIVIDE_ERROR();
7914 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
7915}
7916
7917
7918/**
7919 * @opcode 0xd5
7920 */
7921FNIEMOP_DEF(iemOp_aad_Ib)
7922{
7923 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
7924 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
7925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7926 IEMOP_HLP_NO_64BIT();
7927 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
7928}
7929
7930
7931/**
7932 * @opcode 0xd6
7933 */
7934FNIEMOP_DEF(iemOp_salc)
7935{
7936 IEMOP_MNEMONIC(salc, "salc");
7937 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7938 IEMOP_HLP_NO_64BIT();
7939
7940 IEM_MC_BEGIN(0, 0);
7941 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7942 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
7943 } IEM_MC_ELSE() {
7944 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
7945 } IEM_MC_ENDIF();
7946 IEM_MC_ADVANCE_RIP_AND_FINISH();
7947 IEM_MC_END();
7948}
7949
7950
7951/**
7952 * @opcode 0xd7
7953 */
7954FNIEMOP_DEF(iemOp_xlat)
7955{
7956 IEMOP_MNEMONIC(xlat, "xlat");
7957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7958 switch (pVCpu->iem.s.enmEffAddrMode)
7959 {
7960 case IEMMODE_16BIT:
7961 IEM_MC_BEGIN(2, 0);
7962 IEM_MC_LOCAL(uint8_t, u8Tmp);
7963 IEM_MC_LOCAL(uint16_t, u16Addr);
7964 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
7965 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
7966 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
7967 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7968 IEM_MC_ADVANCE_RIP_AND_FINISH();
7969 IEM_MC_END();
7970 break;
7971
7972 case IEMMODE_32BIT:
7973 IEM_MC_BEGIN(2, 0);
7974 IEM_MC_LOCAL(uint8_t, u8Tmp);
7975 IEM_MC_LOCAL(uint32_t, u32Addr);
7976 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
7977 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
7978 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
7979 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7980 IEM_MC_ADVANCE_RIP_AND_FINISH();
7981 IEM_MC_END();
7982 break;
7983
7984 case IEMMODE_64BIT:
7985 IEM_MC_BEGIN(2, 0);
7986 IEM_MC_LOCAL(uint8_t, u8Tmp);
7987 IEM_MC_LOCAL(uint64_t, u64Addr);
7988 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
7989 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
7990 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
7991 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7992 IEM_MC_ADVANCE_RIP_AND_FINISH();
7993 IEM_MC_END();
7994 break;
7995
7996 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7997 }
7998}
7999
8000
8001/**
8002 * Common worker for FPU instructions working on ST0 and STn, and storing the
8003 * result in ST0.
8004 *
8005 * @param bRm Mod R/M byte.
8006 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8007 */
8008FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
8009{
8010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8011
8012 IEM_MC_BEGIN(3, 1);
8013 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8014 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8015 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8016 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8017
8018 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8019 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8020 IEM_MC_PREPARE_FPU_USAGE();
8021 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8022 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
8023 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
8024 } IEM_MC_ELSE() {
8025 IEM_MC_FPU_STACK_UNDERFLOW(0);
8026 } IEM_MC_ENDIF();
8027 IEM_MC_ADVANCE_RIP_AND_FINISH();
8028
8029 IEM_MC_END();
8030}
8031
8032
8033/**
8034 * Common worker for FPU instructions working on ST0 and STn, and only affecting
8035 * flags.
8036 *
8037 * @param bRm Mod R/M byte.
8038 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8039 */
8040FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8041{
8042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8043
8044 IEM_MC_BEGIN(3, 1);
8045 IEM_MC_LOCAL(uint16_t, u16Fsw);
8046 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8047 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8048 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8049
8050 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8051 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8052 IEM_MC_PREPARE_FPU_USAGE();
8053 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8054 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8055 IEM_MC_UPDATE_FSW(u16Fsw);
8056 } IEM_MC_ELSE() {
8057 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
8058 } IEM_MC_ENDIF();
8059 IEM_MC_ADVANCE_RIP_AND_FINISH();
8060
8061 IEM_MC_END();
8062}
8063
8064
8065/**
8066 * Common worker for FPU instructions working on ST0 and STn, only affecting
8067 * flags, and popping when done.
8068 *
8069 * @param bRm Mod R/M byte.
8070 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8071 */
8072FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8073{
8074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8075
8076 IEM_MC_BEGIN(3, 1);
8077 IEM_MC_LOCAL(uint16_t, u16Fsw);
8078 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8079 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8080 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8081
8082 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8083 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8084 IEM_MC_PREPARE_FPU_USAGE();
8085 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8086 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8087 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
8088 } IEM_MC_ELSE() {
8089 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
8090 } IEM_MC_ENDIF();
8091 IEM_MC_ADVANCE_RIP_AND_FINISH();
8092
8093 IEM_MC_END();
8094}
8095
8096
8097/** Opcode 0xd8 11/0. */
8098FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
8099{
8100 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
8101 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
8102}
8103
8104
8105/** Opcode 0xd8 11/1. */
8106FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
8107{
8108 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
8109 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
8110}
8111
8112
8113/** Opcode 0xd8 11/2. */
8114FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
8115{
8116 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
8117 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
8118}
8119
8120
8121/** Opcode 0xd8 11/3. */
8122FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
8123{
8124 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
8125 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
8126}
8127
8128
8129/** Opcode 0xd8 11/4. */
8130FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
8131{
8132 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
8133 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
8134}
8135
8136
8137/** Opcode 0xd8 11/5. */
8138FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
8139{
8140 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
8141 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
8142}
8143
8144
8145/** Opcode 0xd8 11/6. */
8146FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
8147{
8148 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
8149 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
8150}
8151
8152
8153/** Opcode 0xd8 11/7. */
8154FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
8155{
8156 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
8157 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
8158}
8159
8160
8161/**
8162 * Common worker for FPU instructions working on ST0 and an m32r, and storing
8163 * the result in ST0.
8164 *
8165 * @param bRm Mod R/M byte.
8166 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8167 */
8168FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
8169{
8170 IEM_MC_BEGIN(3, 3);
8171 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8172 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8173 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8174 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8175 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8176 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8177
8178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8180
8181 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8182 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8183 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8184
8185 IEM_MC_PREPARE_FPU_USAGE();
8186 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8187 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
8188 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
8189 } IEM_MC_ELSE() {
8190 IEM_MC_FPU_STACK_UNDERFLOW(0);
8191 } IEM_MC_ENDIF();
8192 IEM_MC_ADVANCE_RIP_AND_FINISH();
8193
8194 IEM_MC_END();
8195}
8196
8197
8198/** Opcode 0xd8 !11/0. */
8199FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
8200{
8201 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
8202 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
8203}
8204
8205
8206/** Opcode 0xd8 !11/1. */
8207FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
8208{
8209 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
8210 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
8211}
8212
8213
8214/** Opcode 0xd8 !11/2. */
8215FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
8216{
8217 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
8218
8219 IEM_MC_BEGIN(3, 3);
8220 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8221 IEM_MC_LOCAL(uint16_t, u16Fsw);
8222 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8223 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8224 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8225 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8226
8227 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8229
8230 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8231 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8232 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8233
8234 IEM_MC_PREPARE_FPU_USAGE();
8235 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8236 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
8237 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8238 } IEM_MC_ELSE() {
8239 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8240 } IEM_MC_ENDIF();
8241 IEM_MC_ADVANCE_RIP_AND_FINISH();
8242
8243 IEM_MC_END();
8244}
8245
8246
8247/** Opcode 0xd8 !11/3. */
8248FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
8249{
8250 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
8251
8252 IEM_MC_BEGIN(3, 3);
8253 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8254 IEM_MC_LOCAL(uint16_t, u16Fsw);
8255 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
8256 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8257 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8258 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
8259
8260 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8262
8263 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8264 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8265 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8266
8267 IEM_MC_PREPARE_FPU_USAGE();
8268 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
8269 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
8270 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8271 } IEM_MC_ELSE() {
8272 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8273 } IEM_MC_ENDIF();
8274 IEM_MC_ADVANCE_RIP_AND_FINISH();
8275
8276 IEM_MC_END();
8277}
8278
8279
8280/** Opcode 0xd8 !11/4. */
8281FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
8282{
8283 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
8284 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
8285}
8286
8287
8288/** Opcode 0xd8 !11/5. */
8289FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
8290{
8291 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
8292 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
8293}
8294
8295
8296/** Opcode 0xd8 !11/6. */
8297FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
8298{
8299 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
8300 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
8301}
8302
8303
8304/** Opcode 0xd8 !11/7. */
8305FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
8306{
8307 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
8308 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
8309}
8310
8311
8312/**
8313 * @opcode 0xd8
8314 */
8315FNIEMOP_DEF(iemOp_EscF0)
8316{
8317 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8318 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
8319
8320 if (IEM_IS_MODRM_REG_MODE(bRm))
8321 {
8322 switch (IEM_GET_MODRM_REG_8(bRm))
8323 {
8324 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
8325 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
8326 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
8327 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
8328 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
8329 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
8330 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
8331 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
8332 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8333 }
8334 }
8335 else
8336 {
8337 switch (IEM_GET_MODRM_REG_8(bRm))
8338 {
8339 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
8340 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
8341 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
8342 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
8343 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
8344 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
8345 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
8346 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
8347 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8348 }
8349 }
8350}
8351
8352
8353/** Opcode 0xd9 /0 mem32real
8354 * @sa iemOp_fld_m64r */
8355FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
8356{
8357 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
8358
8359 IEM_MC_BEGIN(2, 3);
8360 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8361 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8362 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
8363 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8364 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
8365
8366 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8368
8369 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8370 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8371 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8372
8373 IEM_MC_PREPARE_FPU_USAGE();
8374 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
8375 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r32, pFpuRes, pr32Val);
8376 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8377 } IEM_MC_ELSE() {
8378 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8379 } IEM_MC_ENDIF();
8380 IEM_MC_ADVANCE_RIP_AND_FINISH();
8381
8382 IEM_MC_END();
8383}
8384
8385
8386/** Opcode 0xd9 !11/2 mem32real */
8387FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
8388{
8389 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
8390 IEM_MC_BEGIN(3, 2);
8391 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8392 IEM_MC_LOCAL(uint16_t, u16Fsw);
8393 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8394 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
8395 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8396
8397 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8399 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8400 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8401
8402 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8403 IEM_MC_PREPARE_FPU_USAGE();
8404 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8405 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
8406 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8407 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8408 } IEM_MC_ELSE() {
8409 IEM_MC_IF_FCW_IM() {
8410 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
8411 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
8412 } IEM_MC_ENDIF();
8413 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8414 } IEM_MC_ENDIF();
8415 IEM_MC_ADVANCE_RIP_AND_FINISH();
8416
8417 IEM_MC_END();
8418}
8419
8420
8421/** Opcode 0xd9 !11/3 */
8422FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
8423{
8424 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
8425 IEM_MC_BEGIN(3, 2);
8426 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8427 IEM_MC_LOCAL(uint16_t, u16Fsw);
8428 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8429 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
8430 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8431
8432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8434 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8435 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8436
8437 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8438 IEM_MC_PREPARE_FPU_USAGE();
8439 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8440 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
8441 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8442 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8443 } IEM_MC_ELSE() {
8444 IEM_MC_IF_FCW_IM() {
8445 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
8446 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
8447 } IEM_MC_ENDIF();
8448 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8449 } IEM_MC_ENDIF();
8450 IEM_MC_ADVANCE_RIP_AND_FINISH();
8451
8452 IEM_MC_END();
8453}
8454
8455
8456/** Opcode 0xd9 !11/4 */
8457FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
8458{
8459 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
8460 IEM_MC_BEGIN(3, 0);
8461 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
8462 IEM_MC_ARG(uint8_t, iEffSeg, 1);
8463 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
8464 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8466 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8467 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8468 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8469 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
8470 IEM_MC_END();
8471}
8472
8473
8474/** Opcode 0xd9 !11/5 */
8475FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
8476{
8477 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
8478 IEM_MC_BEGIN(1, 1);
8479 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8480 IEM_MC_ARG(uint16_t, u16Fsw, 0);
8481 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8483 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8484 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8485 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8486 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
8487 IEM_MC_END();
8488}
8489
8490
8491/** Opcode 0xd9 !11/6 */
8492FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
8493{
8494 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
8495 IEM_MC_BEGIN(3, 0);
8496 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
8497 IEM_MC_ARG(uint8_t, iEffSeg, 1);
8498 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
8499 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8501 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8502 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8503 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8504 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
8505 IEM_MC_END();
8506}
8507
8508
8509/** Opcode 0xd9 !11/7 */
8510FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
8511{
8512 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
8513 IEM_MC_BEGIN(2, 0);
8514 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8515 IEM_MC_LOCAL(uint16_t, u16Fcw);
8516 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8518 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8519 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8520 IEM_MC_FETCH_FCW(u16Fcw);
8521 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
8522 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
8523 IEM_MC_END();
8524}
8525
8526
8527/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
8528FNIEMOP_DEF(iemOp_fnop)
8529{
8530 IEMOP_MNEMONIC(fnop, "fnop");
8531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8532
8533 IEM_MC_BEGIN(0, 0);
8534 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8535 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8536 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8537 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
8538 * intel optimizations. Investigate. */
8539 IEM_MC_UPDATE_FPU_OPCODE_IP();
8540 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
8541 IEM_MC_END();
8542}
8543
8544
8545/** Opcode 0xd9 11/0 stN */
8546FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
8547{
8548 IEMOP_MNEMONIC(fld_stN, "fld stN");
8549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8550
8551 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
8552 * indicates that it does. */
8553 IEM_MC_BEGIN(0, 2);
8554 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
8555 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8556 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8557 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8558
8559 IEM_MC_PREPARE_FPU_USAGE();
8560 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, IEM_GET_MODRM_RM_8(bRm)) {
8561 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
8562 IEM_MC_PUSH_FPU_RESULT(FpuRes);
8563 } IEM_MC_ELSE() {
8564 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
8565 } IEM_MC_ENDIF();
8566
8567 IEM_MC_ADVANCE_RIP_AND_FINISH();
8568 IEM_MC_END();
8569}
8570
8571
8572/** Opcode 0xd9 11/3 stN */
8573FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
8574{
8575 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
8576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8577
8578 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
8579 * indicates that it does. */
8580 IEM_MC_BEGIN(1, 3);
8581 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
8582 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
8583 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8584 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ IEM_GET_MODRM_RM_8(bRm), 0);
8585 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8586 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8587
8588 IEM_MC_PREPARE_FPU_USAGE();
8589 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
8590 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
8591 IEM_MC_STORE_FPUREG_R80_SRC_REF(IEM_GET_MODRM_RM_8(bRm), pr80Value1);
8592 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
8593 } IEM_MC_ELSE() {
8594 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
8595 } IEM_MC_ENDIF();
8596
8597 IEM_MC_ADVANCE_RIP_AND_FINISH();
8598 IEM_MC_END();
8599}
8600
8601
8602/** Opcode 0xd9 11/4, 0xdd 11/2. */
8603FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
8604{
8605 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
8606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8607
8608 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
8609 uint8_t const iDstReg = IEM_GET_MODRM_RM_8(bRm);
8610 if (!iDstReg)
8611 {
8612 IEM_MC_BEGIN(0, 1);
8613 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
8614 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8615 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8616
8617 IEM_MC_PREPARE_FPU_USAGE();
8618 IEM_MC_IF_FPUREG_NOT_EMPTY(0) {
8619 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
8620 } IEM_MC_ELSE() {
8621 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
8622 } IEM_MC_ENDIF();
8623
8624 IEM_MC_ADVANCE_RIP_AND_FINISH();
8625 IEM_MC_END();
8626 }
8627 else
8628 {
8629 IEM_MC_BEGIN(0, 2);
8630 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
8631 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8632 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8633 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8634
8635 IEM_MC_PREPARE_FPU_USAGE();
8636 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8637 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
8638 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
8639 } IEM_MC_ELSE() {
8640 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
8641 } IEM_MC_ENDIF();
8642
8643 IEM_MC_ADVANCE_RIP_AND_FINISH();
8644 IEM_MC_END();
8645 }
8646}
8647
8648
8649/**
8650 * Common worker for FPU instructions working on ST0 and replaces it with the
8651 * result, i.e. unary operators.
8652 *
8653 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8654 */
8655FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
8656{
8657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8658
8659 IEM_MC_BEGIN(2, 1);
8660 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8661 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8662 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
8663
8664 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8665 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8666 IEM_MC_PREPARE_FPU_USAGE();
8667 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8668 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
8669 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
8670 } IEM_MC_ELSE() {
8671 IEM_MC_FPU_STACK_UNDERFLOW(0);
8672 } IEM_MC_ENDIF();
8673 IEM_MC_ADVANCE_RIP_AND_FINISH();
8674
8675 IEM_MC_END();
8676}
8677
8678
8679/** Opcode 0xd9 0xe0. */
8680FNIEMOP_DEF(iemOp_fchs)
8681{
8682 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
8683 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
8684}
8685
8686
8687/** Opcode 0xd9 0xe1. */
8688FNIEMOP_DEF(iemOp_fabs)
8689{
8690 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
8691 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
8692}
8693
8694
8695/** Opcode 0xd9 0xe4. */
8696FNIEMOP_DEF(iemOp_ftst)
8697{
8698 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
8699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8700
8701 IEM_MC_BEGIN(2, 1);
8702 IEM_MC_LOCAL(uint16_t, u16Fsw);
8703 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8704 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
8705
8706 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8707 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8708 IEM_MC_PREPARE_FPU_USAGE();
8709 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8710 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_ftst_r80, pu16Fsw, pr80Value);
8711 IEM_MC_UPDATE_FSW(u16Fsw);
8712 } IEM_MC_ELSE() {
8713 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
8714 } IEM_MC_ENDIF();
8715 IEM_MC_ADVANCE_RIP_AND_FINISH();
8716
8717 IEM_MC_END();
8718}
8719
8720
8721/** Opcode 0xd9 0xe5. */
8722FNIEMOP_DEF(iemOp_fxam)
8723{
8724 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
8725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8726
8727 IEM_MC_BEGIN(2, 1);
8728 IEM_MC_LOCAL(uint16_t, u16Fsw);
8729 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8730 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
8731
8732 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8733 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8734 IEM_MC_PREPARE_FPU_USAGE();
8735 IEM_MC_REF_FPUREG(pr80Value, 0);
8736 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fxam_r80, pu16Fsw, pr80Value);
8737 IEM_MC_UPDATE_FSW(u16Fsw);
8738 IEM_MC_ADVANCE_RIP_AND_FINISH();
8739
8740 IEM_MC_END();
8741}
8742
8743
8744/**
8745 * Common worker for FPU instructions pushing a constant onto the FPU stack.
8746 *
8747 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8748 */
8749FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
8750{
8751 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8752
8753 IEM_MC_BEGIN(1, 1);
8754 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8755 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8756
8757 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8758 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8759 IEM_MC_PREPARE_FPU_USAGE();
8760 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
8761 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
8762 IEM_MC_PUSH_FPU_RESULT(FpuRes);
8763 } IEM_MC_ELSE() {
8764 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
8765 } IEM_MC_ENDIF();
8766 IEM_MC_ADVANCE_RIP_AND_FINISH();
8767
8768 IEM_MC_END();
8769}
8770
8771
8772/** Opcode 0xd9 0xe8. */
8773FNIEMOP_DEF(iemOp_fld1)
8774{
8775 IEMOP_MNEMONIC(fld1, "fld1");
8776 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
8777}
8778
8779
8780/** Opcode 0xd9 0xe9. */
8781FNIEMOP_DEF(iemOp_fldl2t)
8782{
8783 IEMOP_MNEMONIC(fldl2t, "fldl2t");
8784 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
8785}
8786
8787
8788/** Opcode 0xd9 0xea. */
8789FNIEMOP_DEF(iemOp_fldl2e)
8790{
8791 IEMOP_MNEMONIC(fldl2e, "fldl2e");
8792 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
8793}
8794
8795/** Opcode 0xd9 0xeb. */
8796FNIEMOP_DEF(iemOp_fldpi)
8797{
8798 IEMOP_MNEMONIC(fldpi, "fldpi");
8799 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
8800}
8801
8802
8803/** Opcode 0xd9 0xec. */
8804FNIEMOP_DEF(iemOp_fldlg2)
8805{
8806 IEMOP_MNEMONIC(fldlg2, "fldlg2");
8807 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
8808}
8809
8810/** Opcode 0xd9 0xed. */
8811FNIEMOP_DEF(iemOp_fldln2)
8812{
8813 IEMOP_MNEMONIC(fldln2, "fldln2");
8814 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
8815}
8816
8817
8818/** Opcode 0xd9 0xee. */
8819FNIEMOP_DEF(iemOp_fldz)
8820{
8821 IEMOP_MNEMONIC(fldz, "fldz");
8822 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
8823}
8824
8825
8826/** Opcode 0xd9 0xf0.
8827 *
8828 * The f2xm1 instruction works on values +1.0 thru -1.0, currently (the range on
8829 * 287 & 8087 was +0.5 thru 0.0 according to docs). In addition is does appear
8830 * to produce proper results for +Inf and -Inf.
8831 *
8832 * This is probably usful in the implementation pow() and similar.
8833 */
8834FNIEMOP_DEF(iemOp_f2xm1)
8835{
8836 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
8837 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
8838}
8839
8840
8841/**
8842 * Common worker for FPU instructions working on STn and ST0, storing the result
8843 * in STn, and popping the stack unless IE, DE or ZE was raised.
8844 *
8845 * @param bRm Mod R/M byte.
8846 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8847 */
8848FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
8849{
8850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8851
8852 IEM_MC_BEGIN(3, 1);
8853 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8854 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8855 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8856 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8857
8858 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8859 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8860
8861 IEM_MC_PREPARE_FPU_USAGE();
8862 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
8863 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
8864 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, IEM_GET_MODRM_RM_8(bRm));
8865 } IEM_MC_ELSE() {
8866 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(IEM_GET_MODRM_RM_8(bRm));
8867 } IEM_MC_ENDIF();
8868 IEM_MC_ADVANCE_RIP_AND_FINISH();
8869
8870 IEM_MC_END();
8871}
8872
8873
8874/** Opcode 0xd9 0xf1. */
8875FNIEMOP_DEF(iemOp_fyl2x)
8876{
8877 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
8878 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
8879}
8880
8881
8882/**
8883 * Common worker for FPU instructions working on ST0 and having two outputs, one
8884 * replacing ST0 and one pushed onto the stack.
8885 *
8886 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8887 */
8888FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
8889{
8890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8891
8892 IEM_MC_BEGIN(2, 1);
8893 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
8894 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
8895 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
8896
8897 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8898 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8899 IEM_MC_PREPARE_FPU_USAGE();
8900 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
8901 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
8902 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
8903 } IEM_MC_ELSE() {
8904 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
8905 } IEM_MC_ENDIF();
8906 IEM_MC_ADVANCE_RIP_AND_FINISH();
8907
8908 IEM_MC_END();
8909}
8910
8911
8912/** Opcode 0xd9 0xf2. */
8913FNIEMOP_DEF(iemOp_fptan)
8914{
8915 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
8916 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
8917}
8918
8919
8920/** Opcode 0xd9 0xf3. */
8921FNIEMOP_DEF(iemOp_fpatan)
8922{
8923 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
8924 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
8925}
8926
8927
8928/** Opcode 0xd9 0xf4. */
8929FNIEMOP_DEF(iemOp_fxtract)
8930{
8931 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
8932 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
8933}
8934
8935
8936/** Opcode 0xd9 0xf5. */
8937FNIEMOP_DEF(iemOp_fprem1)
8938{
8939 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
8940 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
8941}
8942
8943
8944/** Opcode 0xd9 0xf6. */
8945FNIEMOP_DEF(iemOp_fdecstp)
8946{
8947 IEMOP_MNEMONIC(fdecstp, "fdecstp");
8948 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8949 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
8950 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
8951 * FINCSTP and FDECSTP. */
8952
8953 IEM_MC_BEGIN(0,0);
8954
8955 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8956 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8957
8958 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8959 IEM_MC_FPU_STACK_DEC_TOP();
8960 IEM_MC_UPDATE_FSW_CONST(0);
8961
8962 IEM_MC_ADVANCE_RIP_AND_FINISH();
8963 IEM_MC_END();
8964}
8965
8966
8967/** Opcode 0xd9 0xf7. */
8968FNIEMOP_DEF(iemOp_fincstp)
8969{
8970 IEMOP_MNEMONIC(fincstp, "fincstp");
8971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8972 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
8973 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
8974 * FINCSTP and FDECSTP. */
8975
8976 IEM_MC_BEGIN(0,0);
8977
8978 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8979 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8980
8981 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8982 IEM_MC_FPU_STACK_INC_TOP();
8983 IEM_MC_UPDATE_FSW_CONST(0);
8984
8985 IEM_MC_ADVANCE_RIP_AND_FINISH();
8986 IEM_MC_END();
8987}
8988
8989
8990/** Opcode 0xd9 0xf8. */
8991FNIEMOP_DEF(iemOp_fprem)
8992{
8993 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
8994 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
8995}
8996
8997
8998/** Opcode 0xd9 0xf9. */
8999FNIEMOP_DEF(iemOp_fyl2xp1)
9000{
9001 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
9002 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
9003}
9004
9005
9006/** Opcode 0xd9 0xfa. */
9007FNIEMOP_DEF(iemOp_fsqrt)
9008{
9009 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
9010 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
9011}
9012
9013
9014/** Opcode 0xd9 0xfb. */
9015FNIEMOP_DEF(iemOp_fsincos)
9016{
9017 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
9018 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
9019}
9020
9021
9022/** Opcode 0xd9 0xfc. */
9023FNIEMOP_DEF(iemOp_frndint)
9024{
9025 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
9026 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
9027}
9028
9029
9030/** Opcode 0xd9 0xfd. */
9031FNIEMOP_DEF(iemOp_fscale)
9032{
9033 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
9034 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
9035}
9036
9037
9038/** Opcode 0xd9 0xfe. */
9039FNIEMOP_DEF(iemOp_fsin)
9040{
9041 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
9042 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
9043}
9044
9045
9046/** Opcode 0xd9 0xff. */
9047FNIEMOP_DEF(iemOp_fcos)
9048{
9049 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
9050 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
9051}
9052
9053
9054/** Used by iemOp_EscF1. */
9055IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
9056{
9057 /* 0xe0 */ iemOp_fchs,
9058 /* 0xe1 */ iemOp_fabs,
9059 /* 0xe2 */ iemOp_Invalid,
9060 /* 0xe3 */ iemOp_Invalid,
9061 /* 0xe4 */ iemOp_ftst,
9062 /* 0xe5 */ iemOp_fxam,
9063 /* 0xe6 */ iemOp_Invalid,
9064 /* 0xe7 */ iemOp_Invalid,
9065 /* 0xe8 */ iemOp_fld1,
9066 /* 0xe9 */ iemOp_fldl2t,
9067 /* 0xea */ iemOp_fldl2e,
9068 /* 0xeb */ iemOp_fldpi,
9069 /* 0xec */ iemOp_fldlg2,
9070 /* 0xed */ iemOp_fldln2,
9071 /* 0xee */ iemOp_fldz,
9072 /* 0xef */ iemOp_Invalid,
9073 /* 0xf0 */ iemOp_f2xm1,
9074 /* 0xf1 */ iemOp_fyl2x,
9075 /* 0xf2 */ iemOp_fptan,
9076 /* 0xf3 */ iemOp_fpatan,
9077 /* 0xf4 */ iemOp_fxtract,
9078 /* 0xf5 */ iemOp_fprem1,
9079 /* 0xf6 */ iemOp_fdecstp,
9080 /* 0xf7 */ iemOp_fincstp,
9081 /* 0xf8 */ iemOp_fprem,
9082 /* 0xf9 */ iemOp_fyl2xp1,
9083 /* 0xfa */ iemOp_fsqrt,
9084 /* 0xfb */ iemOp_fsincos,
9085 /* 0xfc */ iemOp_frndint,
9086 /* 0xfd */ iemOp_fscale,
9087 /* 0xfe */ iemOp_fsin,
9088 /* 0xff */ iemOp_fcos
9089};
9090
9091
9092/**
9093 * @opcode 0xd9
9094 */
9095FNIEMOP_DEF(iemOp_EscF1)
9096{
9097 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9098 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
9099
9100 if (IEM_IS_MODRM_REG_MODE(bRm))
9101 {
9102 switch (IEM_GET_MODRM_REG_8(bRm))
9103 {
9104 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
9105 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
9106 case 2:
9107 if (bRm == 0xd0)
9108 return FNIEMOP_CALL(iemOp_fnop);
9109 return IEMOP_RAISE_INVALID_OPCODE();
9110 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
9111 case 4:
9112 case 5:
9113 case 6:
9114 case 7:
9115 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
9116 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
9117 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9118 }
9119 }
9120 else
9121 {
9122 switch (IEM_GET_MODRM_REG_8(bRm))
9123 {
9124 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
9125 case 1: return IEMOP_RAISE_INVALID_OPCODE();
9126 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
9127 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
9128 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
9129 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
9130 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
9131 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
9132 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9133 }
9134 }
9135}
9136
9137
9138/** Opcode 0xda 11/0. */
9139FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
9140{
9141 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
9142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9143
9144 IEM_MC_BEGIN(0, 1);
9145 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9146
9147 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9148 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9149
9150 IEM_MC_PREPARE_FPU_USAGE();
9151 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9152 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9153 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9154 } IEM_MC_ENDIF();
9155 IEM_MC_UPDATE_FPU_OPCODE_IP();
9156 } IEM_MC_ELSE() {
9157 IEM_MC_FPU_STACK_UNDERFLOW(0);
9158 } IEM_MC_ENDIF();
9159 IEM_MC_ADVANCE_RIP_AND_FINISH();
9160
9161 IEM_MC_END();
9162}
9163
9164
9165/** Opcode 0xda 11/1. */
9166FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
9167{
9168 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
9169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9170
9171 IEM_MC_BEGIN(0, 1);
9172 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9173
9174 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9175 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9176
9177 IEM_MC_PREPARE_FPU_USAGE();
9178 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9179 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
9180 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9181 } IEM_MC_ENDIF();
9182 IEM_MC_UPDATE_FPU_OPCODE_IP();
9183 } IEM_MC_ELSE() {
9184 IEM_MC_FPU_STACK_UNDERFLOW(0);
9185 } IEM_MC_ENDIF();
9186 IEM_MC_ADVANCE_RIP_AND_FINISH();
9187
9188 IEM_MC_END();
9189}
9190
9191
9192/** Opcode 0xda 11/2. */
9193FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
9194{
9195 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
9196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9197
9198 IEM_MC_BEGIN(0, 1);
9199 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9200
9201 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9202 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9203
9204 IEM_MC_PREPARE_FPU_USAGE();
9205 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9206 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9207 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9208 } IEM_MC_ENDIF();
9209 IEM_MC_UPDATE_FPU_OPCODE_IP();
9210 } IEM_MC_ELSE() {
9211 IEM_MC_FPU_STACK_UNDERFLOW(0);
9212 } IEM_MC_ENDIF();
9213 IEM_MC_ADVANCE_RIP_AND_FINISH();
9214
9215 IEM_MC_END();
9216}
9217
9218
9219/** Opcode 0xda 11/3. */
9220FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
9221{
9222 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
9223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9224
9225 IEM_MC_BEGIN(0, 1);
9226 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9227
9228 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9229 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9230
9231 IEM_MC_PREPARE_FPU_USAGE();
9232 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9233 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
9234 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9235 } IEM_MC_ENDIF();
9236 IEM_MC_UPDATE_FPU_OPCODE_IP();
9237 } IEM_MC_ELSE() {
9238 IEM_MC_FPU_STACK_UNDERFLOW(0);
9239 } IEM_MC_ENDIF();
9240 IEM_MC_ADVANCE_RIP_AND_FINISH();
9241
9242 IEM_MC_END();
9243}
9244
9245
9246/**
9247 * Common worker for FPU instructions working on ST0 and ST1, only affecting
9248 * flags, and popping twice when done.
9249 *
9250 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9251 */
9252FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
9253{
9254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9255
9256 IEM_MC_BEGIN(3, 1);
9257 IEM_MC_LOCAL(uint16_t, u16Fsw);
9258 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9259 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9260 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9261
9262 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9263 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9264
9265 IEM_MC_PREPARE_FPU_USAGE();
9266 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1) {
9267 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
9268 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
9269 } IEM_MC_ELSE() {
9270 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
9271 } IEM_MC_ENDIF();
9272 IEM_MC_ADVANCE_RIP_AND_FINISH();
9273
9274 IEM_MC_END();
9275}
9276
9277
9278/** Opcode 0xda 0xe9. */
9279FNIEMOP_DEF(iemOp_fucompp)
9280{
9281 IEMOP_MNEMONIC(fucompp, "fucompp");
9282 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fucom_r80_by_r80);
9283}
9284
9285
9286/**
9287 * Common worker for FPU instructions working on ST0 and an m32i, and storing
9288 * the result in ST0.
9289 *
9290 * @param bRm Mod R/M byte.
9291 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9292 */
9293FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
9294{
9295 IEM_MC_BEGIN(3, 3);
9296 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9297 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9298 IEM_MC_LOCAL(int32_t, i32Val2);
9299 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9300 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9301 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
9302
9303 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9304 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9305
9306 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9307 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9308 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9309
9310 IEM_MC_PREPARE_FPU_USAGE();
9311 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9312 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
9313 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
9314 } IEM_MC_ELSE() {
9315 IEM_MC_FPU_STACK_UNDERFLOW(0);
9316 } IEM_MC_ENDIF();
9317 IEM_MC_ADVANCE_RIP_AND_FINISH();
9318
9319 IEM_MC_END();
9320}
9321
9322
9323/** Opcode 0xda !11/0. */
9324FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
9325{
9326 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
9327 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
9328}
9329
9330
9331/** Opcode 0xda !11/1. */
9332FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
9333{
9334 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
9335 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
9336}
9337
9338
9339/** Opcode 0xda !11/2. */
9340FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
9341{
9342 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
9343
9344 IEM_MC_BEGIN(3, 3);
9345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9346 IEM_MC_LOCAL(uint16_t, u16Fsw);
9347 IEM_MC_LOCAL(int32_t, i32Val2);
9348 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9349 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9350 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
9351
9352 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9354
9355 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9356 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9357 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9358
9359 IEM_MC_PREPARE_FPU_USAGE();
9360 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9361 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
9362 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9363 } IEM_MC_ELSE() {
9364 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9365 } IEM_MC_ENDIF();
9366 IEM_MC_ADVANCE_RIP_AND_FINISH();
9367
9368 IEM_MC_END();
9369}
9370
9371
9372/** Opcode 0xda !11/3. */
9373FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
9374{
9375 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
9376
9377 IEM_MC_BEGIN(3, 3);
9378 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9379 IEM_MC_LOCAL(uint16_t, u16Fsw);
9380 IEM_MC_LOCAL(int32_t, i32Val2);
9381 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9382 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9383 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
9384
9385 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9387
9388 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9389 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9390 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9391
9392 IEM_MC_PREPARE_FPU_USAGE();
9393 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9394 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
9395 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9396 } IEM_MC_ELSE() {
9397 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9398 } IEM_MC_ENDIF();
9399 IEM_MC_ADVANCE_RIP_AND_FINISH();
9400
9401 IEM_MC_END();
9402}
9403
9404
9405/** Opcode 0xda !11/4. */
9406FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
9407{
9408 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
9409 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
9410}
9411
9412
9413/** Opcode 0xda !11/5. */
9414FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
9415{
9416 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
9417 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
9418}
9419
9420
9421/** Opcode 0xda !11/6. */
9422FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
9423{
9424 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
9425 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
9426}
9427
9428
9429/** Opcode 0xda !11/7. */
9430FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
9431{
9432 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
9433 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
9434}
9435
9436
9437/**
9438 * @opcode 0xda
9439 */
9440FNIEMOP_DEF(iemOp_EscF2)
9441{
9442 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9443 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
9444 if (IEM_IS_MODRM_REG_MODE(bRm))
9445 {
9446 switch (IEM_GET_MODRM_REG_8(bRm))
9447 {
9448 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
9449 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
9450 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
9451 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
9452 case 4: return IEMOP_RAISE_INVALID_OPCODE();
9453 case 5:
9454 if (bRm == 0xe9)
9455 return FNIEMOP_CALL(iemOp_fucompp);
9456 return IEMOP_RAISE_INVALID_OPCODE();
9457 case 6: return IEMOP_RAISE_INVALID_OPCODE();
9458 case 7: return IEMOP_RAISE_INVALID_OPCODE();
9459 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9460 }
9461 }
9462 else
9463 {
9464 switch (IEM_GET_MODRM_REG_8(bRm))
9465 {
9466 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
9467 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
9468 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
9469 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
9470 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
9471 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
9472 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
9473 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
9474 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9475 }
9476 }
9477}
9478
9479
9480/** Opcode 0xdb !11/0. */
9481FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
9482{
9483 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
9484
9485 IEM_MC_BEGIN(2, 3);
9486 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9487 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9488 IEM_MC_LOCAL(int32_t, i32Val);
9489 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9490 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
9491
9492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9494
9495 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9496 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9497 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9498
9499 IEM_MC_PREPARE_FPU_USAGE();
9500 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
9501 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i32, pFpuRes, pi32Val);
9502 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9503 } IEM_MC_ELSE() {
9504 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9505 } IEM_MC_ENDIF();
9506 IEM_MC_ADVANCE_RIP_AND_FINISH();
9507
9508 IEM_MC_END();
9509}
9510
9511
9512/** Opcode 0xdb !11/1. */
9513FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
9514{
9515 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
9516 IEM_MC_BEGIN(3, 2);
9517 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9518 IEM_MC_LOCAL(uint16_t, u16Fsw);
9519 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9520 IEM_MC_ARG(int32_t *, pi32Dst, 1);
9521 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9522
9523 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9525 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9526 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9527
9528 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9529 IEM_MC_PREPARE_FPU_USAGE();
9530 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9531 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
9532 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9533 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9534 } IEM_MC_ELSE() {
9535 IEM_MC_IF_FCW_IM() {
9536 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
9537 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
9538 } IEM_MC_ENDIF();
9539 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9540 } IEM_MC_ENDIF();
9541 IEM_MC_ADVANCE_RIP_AND_FINISH();
9542
9543 IEM_MC_END();
9544}
9545
9546
9547/** Opcode 0xdb !11/2. */
9548FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
9549{
9550 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
9551 IEM_MC_BEGIN(3, 2);
9552 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9553 IEM_MC_LOCAL(uint16_t, u16Fsw);
9554 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9555 IEM_MC_ARG(int32_t *, pi32Dst, 1);
9556 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9557
9558 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9559 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9560 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9561 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9562
9563 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9564 IEM_MC_PREPARE_FPU_USAGE();
9565 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9566 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
9567 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9568 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9569 } IEM_MC_ELSE() {
9570 IEM_MC_IF_FCW_IM() {
9571 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
9572 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
9573 } IEM_MC_ENDIF();
9574 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9575 } IEM_MC_ENDIF();
9576 IEM_MC_ADVANCE_RIP_AND_FINISH();
9577
9578 IEM_MC_END();
9579}
9580
9581
9582/** Opcode 0xdb !11/3. */
9583FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
9584{
9585 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
9586 IEM_MC_BEGIN(3, 2);
9587 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9588 IEM_MC_LOCAL(uint16_t, u16Fsw);
9589 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9590 IEM_MC_ARG(int32_t *, pi32Dst, 1);
9591 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9592
9593 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9595 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9596 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9597
9598 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9599 IEM_MC_PREPARE_FPU_USAGE();
9600 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9601 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
9602 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9603 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9604 } IEM_MC_ELSE() {
9605 IEM_MC_IF_FCW_IM() {
9606 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
9607 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
9608 } IEM_MC_ENDIF();
9609 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9610 } IEM_MC_ENDIF();
9611 IEM_MC_ADVANCE_RIP_AND_FINISH();
9612
9613 IEM_MC_END();
9614}
9615
9616
9617/** Opcode 0xdb !11/5. */
9618FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
9619{
9620 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
9621
9622 IEM_MC_BEGIN(2, 3);
9623 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9624 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9625 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
9626 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9627 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
9628
9629 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9631
9632 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9633 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9634 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9635
9636 IEM_MC_PREPARE_FPU_USAGE();
9637 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
9638 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
9639 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9640 } IEM_MC_ELSE() {
9641 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9642 } IEM_MC_ENDIF();
9643 IEM_MC_ADVANCE_RIP_AND_FINISH();
9644
9645 IEM_MC_END();
9646}
9647
9648
9649/** Opcode 0xdb !11/7. */
9650FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
9651{
9652 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
9653 IEM_MC_BEGIN(3, 2);
9654 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9655 IEM_MC_LOCAL(uint16_t, u16Fsw);
9656 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9657 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
9658 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9659
9660 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9662 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9663 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9664
9665 IEM_MC_MEM_MAP_EX(pr80Dst, IEM_ACCESS_DATA_W, sizeof(*pr80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
9666 IEM_MC_PREPARE_FPU_USAGE();
9667 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9668 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
9669 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
9670 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9671 } IEM_MC_ELSE() {
9672 IEM_MC_IF_FCW_IM() {
9673 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
9674 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
9675 } IEM_MC_ENDIF();
9676 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9677 } IEM_MC_ENDIF();
9678 IEM_MC_ADVANCE_RIP_AND_FINISH();
9679
9680 IEM_MC_END();
9681}
9682
9683
9684/** Opcode 0xdb 11/0. */
9685FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
9686{
9687 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
9688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9689
9690 IEM_MC_BEGIN(0, 1);
9691 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9692
9693 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9694 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9695
9696 IEM_MC_PREPARE_FPU_USAGE();
9697 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9698 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF) {
9699 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9700 } IEM_MC_ENDIF();
9701 IEM_MC_UPDATE_FPU_OPCODE_IP();
9702 } IEM_MC_ELSE() {
9703 IEM_MC_FPU_STACK_UNDERFLOW(0);
9704 } IEM_MC_ENDIF();
9705 IEM_MC_ADVANCE_RIP_AND_FINISH();
9706
9707 IEM_MC_END();
9708}
9709
9710
9711/** Opcode 0xdb 11/1. */
9712FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
9713{
9714 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
9715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9716
9717 IEM_MC_BEGIN(0, 1);
9718 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9719
9720 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9721 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9722
9723 IEM_MC_PREPARE_FPU_USAGE();
9724 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9725 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
9726 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9727 } IEM_MC_ENDIF();
9728 IEM_MC_UPDATE_FPU_OPCODE_IP();
9729 } IEM_MC_ELSE() {
9730 IEM_MC_FPU_STACK_UNDERFLOW(0);
9731 } IEM_MC_ENDIF();
9732 IEM_MC_ADVANCE_RIP_AND_FINISH();
9733
9734 IEM_MC_END();
9735}
9736
9737
9738/** Opcode 0xdb 11/2. */
9739FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
9740{
9741 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
9742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9743
9744 IEM_MC_BEGIN(0, 1);
9745 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9746
9747 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9748 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9749
9750 IEM_MC_PREPARE_FPU_USAGE();
9751 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9752 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
9753 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9754 } IEM_MC_ENDIF();
9755 IEM_MC_UPDATE_FPU_OPCODE_IP();
9756 } IEM_MC_ELSE() {
9757 IEM_MC_FPU_STACK_UNDERFLOW(0);
9758 } IEM_MC_ENDIF();
9759 IEM_MC_ADVANCE_RIP_AND_FINISH();
9760
9761 IEM_MC_END();
9762}
9763
9764
9765/** Opcode 0xdb 11/3. */
9766FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
9767{
9768 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
9769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9770
9771 IEM_MC_BEGIN(0, 1);
9772 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
9773
9774 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9775 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9776
9777 IEM_MC_PREPARE_FPU_USAGE();
9778 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
9779 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF) {
9780 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
9781 } IEM_MC_ENDIF();
9782 IEM_MC_UPDATE_FPU_OPCODE_IP();
9783 } IEM_MC_ELSE() {
9784 IEM_MC_FPU_STACK_UNDERFLOW(0);
9785 } IEM_MC_ENDIF();
9786 IEM_MC_ADVANCE_RIP_AND_FINISH();
9787
9788 IEM_MC_END();
9789}
9790
9791
9792/** Opcode 0xdb 0xe0. */
9793FNIEMOP_DEF(iemOp_fneni)
9794{
9795 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
9796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9797 IEM_MC_BEGIN(0,0);
9798 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9799 IEM_MC_ADVANCE_RIP_AND_FINISH();
9800 IEM_MC_END();
9801}
9802
9803
9804/** Opcode 0xdb 0xe1. */
9805FNIEMOP_DEF(iemOp_fndisi)
9806{
9807 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
9808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9809 IEM_MC_BEGIN(0,0);
9810 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9811 IEM_MC_ADVANCE_RIP_AND_FINISH();
9812 IEM_MC_END();
9813}
9814
9815
9816/** Opcode 0xdb 0xe2. */
9817FNIEMOP_DEF(iemOp_fnclex)
9818{
9819 IEMOP_MNEMONIC(fnclex, "fnclex");
9820 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9821
9822 IEM_MC_BEGIN(0,0);
9823 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9824 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9825 IEM_MC_CLEAR_FSW_EX();
9826 IEM_MC_ADVANCE_RIP_AND_FINISH();
9827 IEM_MC_END();
9828}
9829
9830
9831/** Opcode 0xdb 0xe3. */
9832FNIEMOP_DEF(iemOp_fninit)
9833{
9834 IEMOP_MNEMONIC(fninit, "fninit");
9835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9836 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
9837}
9838
9839
9840/** Opcode 0xdb 0xe4. */
9841FNIEMOP_DEF(iemOp_fnsetpm)
9842{
9843 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
9844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9845 IEM_MC_BEGIN(0,0);
9846 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9847 IEM_MC_ADVANCE_RIP_AND_FINISH();
9848 IEM_MC_END();
9849}
9850
9851
9852/** Opcode 0xdb 0xe5. */
9853FNIEMOP_DEF(iemOp_frstpm)
9854{
9855 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
9856#if 0 /* #UDs on newer CPUs */
9857 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9858 IEM_MC_BEGIN(0,0);
9859 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9860 IEM_MC_ADVANCE_RIP_AND_FINISH();
9861 IEM_MC_END();
9862 return VINF_SUCCESS;
9863#else
9864 return IEMOP_RAISE_INVALID_OPCODE();
9865#endif
9866}
9867
9868
9869/** Opcode 0xdb 11/5. */
9870FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
9871{
9872 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
9873 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
9874}
9875
9876
9877/** Opcode 0xdb 11/6. */
9878FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
9879{
9880 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
9881 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
9882}
9883
9884
9885/**
9886 * @opcode 0xdb
9887 */
9888FNIEMOP_DEF(iemOp_EscF3)
9889{
9890 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9891 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
9892 if (IEM_IS_MODRM_REG_MODE(bRm))
9893 {
9894 switch (IEM_GET_MODRM_REG_8(bRm))
9895 {
9896 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
9897 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
9898 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
9899 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
9900 case 4:
9901 switch (bRm)
9902 {
9903 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
9904 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
9905 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
9906 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
9907 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
9908 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
9909 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
9910 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
9911 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9912 }
9913 break;
9914 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
9915 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
9916 case 7: return IEMOP_RAISE_INVALID_OPCODE();
9917 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9918 }
9919 }
9920 else
9921 {
9922 switch (IEM_GET_MODRM_REG_8(bRm))
9923 {
9924 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
9925 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
9926 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
9927 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
9928 case 4: return IEMOP_RAISE_INVALID_OPCODE();
9929 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
9930 case 6: return IEMOP_RAISE_INVALID_OPCODE();
9931 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
9932 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9933 }
9934 }
9935}
9936
9937
9938/**
9939 * Common worker for FPU instructions working on STn and ST0, and storing the
9940 * result in STn unless IE, DE or ZE was raised.
9941 *
9942 * @param bRm Mod R/M byte.
9943 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9944 */
9945FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
9946{
9947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9948
9949 IEM_MC_BEGIN(3, 1);
9950 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9951 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9952 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9953 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9954
9955 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9956 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9957
9958 IEM_MC_PREPARE_FPU_USAGE();
9959 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
9960 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
9961 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm));
9962 } IEM_MC_ELSE() {
9963 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm));
9964 } IEM_MC_ENDIF();
9965 IEM_MC_ADVANCE_RIP_AND_FINISH();
9966
9967 IEM_MC_END();
9968}
9969
9970
9971/** Opcode 0xdc 11/0. */
9972FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
9973{
9974 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
9975 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
9976}
9977
9978
9979/** Opcode 0xdc 11/1. */
9980FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
9981{
9982 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
9983 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
9984}
9985
9986
9987/** Opcode 0xdc 11/4. */
9988FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
9989{
9990 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
9991 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
9992}
9993
9994
9995/** Opcode 0xdc 11/5. */
9996FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
9997{
9998 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
9999 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
10000}
10001
10002
10003/** Opcode 0xdc 11/6. */
10004FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
10005{
10006 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
10007 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
10008}
10009
10010
10011/** Opcode 0xdc 11/7. */
10012FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
10013{
10014 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
10015 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
10016}
10017
10018
10019/**
10020 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
10021 * memory operand, and storing the result in ST0.
10022 *
10023 * @param bRm Mod R/M byte.
10024 * @param pfnImpl Pointer to the instruction implementation (assembly).
10025 */
10026FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
10027{
10028 IEM_MC_BEGIN(3, 3);
10029 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10030 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10031 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
10032 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10033 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
10034 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
10035
10036 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10038 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10039 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10040
10041 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10042 IEM_MC_PREPARE_FPU_USAGE();
10043 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0) {
10044 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
10045 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10046 } IEM_MC_ELSE() {
10047 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10048 } IEM_MC_ENDIF();
10049 IEM_MC_ADVANCE_RIP_AND_FINISH();
10050
10051 IEM_MC_END();
10052}
10053
10054
10055/** Opcode 0xdc !11/0. */
10056FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
10057{
10058 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
10059 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
10060}
10061
10062
10063/** Opcode 0xdc !11/1. */
10064FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
10065{
10066 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
10067 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
10068}
10069
10070
10071/** Opcode 0xdc !11/2. */
10072FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
10073{
10074 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
10075
10076 IEM_MC_BEGIN(3, 3);
10077 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10078 IEM_MC_LOCAL(uint16_t, u16Fsw);
10079 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
10080 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10081 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10082 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
10083
10084 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10086
10087 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10088 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10089 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10090
10091 IEM_MC_PREPARE_FPU_USAGE();
10092 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10093 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
10094 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10095 } IEM_MC_ELSE() {
10096 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10097 } IEM_MC_ENDIF();
10098 IEM_MC_ADVANCE_RIP_AND_FINISH();
10099
10100 IEM_MC_END();
10101}
10102
10103
10104/** Opcode 0xdc !11/3. */
10105FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
10106{
10107 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
10108
10109 IEM_MC_BEGIN(3, 3);
10110 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10111 IEM_MC_LOCAL(uint16_t, u16Fsw);
10112 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
10113 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10114 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10115 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
10116
10117 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10119
10120 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10121 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10122 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10123
10124 IEM_MC_PREPARE_FPU_USAGE();
10125 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10126 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
10127 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10128 } IEM_MC_ELSE() {
10129 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10130 } IEM_MC_ENDIF();
10131 IEM_MC_ADVANCE_RIP_AND_FINISH();
10132
10133 IEM_MC_END();
10134}
10135
10136
10137/** Opcode 0xdc !11/4. */
10138FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
10139{
10140 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
10141 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
10142}
10143
10144
10145/** Opcode 0xdc !11/5. */
10146FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
10147{
10148 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
10149 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
10150}
10151
10152
10153/** Opcode 0xdc !11/6. */
10154FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
10155{
10156 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
10157 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
10158}
10159
10160
10161/** Opcode 0xdc !11/7. */
10162FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
10163{
10164 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
10165 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
10166}
10167
10168
10169/**
10170 * @opcode 0xdc
10171 */
10172FNIEMOP_DEF(iemOp_EscF4)
10173{
10174 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10175 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
10176 if (IEM_IS_MODRM_REG_MODE(bRm))
10177 {
10178 switch (IEM_GET_MODRM_REG_8(bRm))
10179 {
10180 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
10181 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
10182 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
10183 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
10184 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
10185 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
10186 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
10187 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
10188 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10189 }
10190 }
10191 else
10192 {
10193 switch (IEM_GET_MODRM_REG_8(bRm))
10194 {
10195 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
10196 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
10197 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
10198 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
10199 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
10200 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
10201 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
10202 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
10203 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10204 }
10205 }
10206}
10207
10208
10209/** Opcode 0xdd !11/0.
10210 * @sa iemOp_fld_m32r */
10211FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
10212{
10213 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
10214
10215 IEM_MC_BEGIN(2, 3);
10216 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10217 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10218 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
10219 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10220 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
10221
10222 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10224 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10225 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10226
10227 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10228 IEM_MC_PREPARE_FPU_USAGE();
10229 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10230 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r64, pFpuRes, pr64Val);
10231 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10232 } IEM_MC_ELSE() {
10233 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10234 } IEM_MC_ENDIF();
10235 IEM_MC_ADVANCE_RIP_AND_FINISH();
10236
10237 IEM_MC_END();
10238}
10239
10240
10241/** Opcode 0xdd !11/0. */
10242FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
10243{
10244 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
10245 IEM_MC_BEGIN(3, 2);
10246 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10247 IEM_MC_LOCAL(uint16_t, u16Fsw);
10248 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10249 IEM_MC_ARG(int64_t *, pi64Dst, 1);
10250 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10251
10252 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10254 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10255 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10256
10257 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10258 IEM_MC_PREPARE_FPU_USAGE();
10259 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10260 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
10261 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10262 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10263 } IEM_MC_ELSE() {
10264 IEM_MC_IF_FCW_IM() {
10265 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
10266 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
10267 } IEM_MC_ENDIF();
10268 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10269 } IEM_MC_ENDIF();
10270 IEM_MC_ADVANCE_RIP_AND_FINISH();
10271
10272 IEM_MC_END();
10273}
10274
10275
10276/** Opcode 0xdd !11/0. */
10277FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
10278{
10279 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
10280 IEM_MC_BEGIN(3, 2);
10281 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10282 IEM_MC_LOCAL(uint16_t, u16Fsw);
10283 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10284 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
10285 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10286
10287 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10289 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10290 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10291
10292 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10293 IEM_MC_PREPARE_FPU_USAGE();
10294 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10295 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
10296 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10297 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10298 } IEM_MC_ELSE() {
10299 IEM_MC_IF_FCW_IM() {
10300 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
10301 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
10302 } IEM_MC_ENDIF();
10303 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10304 } IEM_MC_ENDIF();
10305 IEM_MC_ADVANCE_RIP_AND_FINISH();
10306
10307 IEM_MC_END();
10308}
10309
10310
10311
10312
10313/** Opcode 0xdd !11/0. */
10314FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
10315{
10316 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
10317 IEM_MC_BEGIN(3, 2);
10318 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10319 IEM_MC_LOCAL(uint16_t, u16Fsw);
10320 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10321 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
10322 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10323
10324 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10326 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10327 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10328
10329 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10330 IEM_MC_PREPARE_FPU_USAGE();
10331 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10332 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
10333 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10334 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10335 } IEM_MC_ELSE() {
10336 IEM_MC_IF_FCW_IM() {
10337 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
10338 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
10339 } IEM_MC_ENDIF();
10340 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10341 } IEM_MC_ENDIF();
10342 IEM_MC_ADVANCE_RIP_AND_FINISH();
10343
10344 IEM_MC_END();
10345}
10346
10347
10348/** Opcode 0xdd !11/0. */
10349FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
10350{
10351 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
10352 IEM_MC_BEGIN(3, 0);
10353 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
10354 IEM_MC_ARG(uint8_t, iEffSeg, 1);
10355 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
10356 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10358 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10359 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10360 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
10361 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
10362 IEM_MC_END();
10363}
10364
10365
10366/** Opcode 0xdd !11/0. */
10367FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
10368{
10369 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
10370 IEM_MC_BEGIN(3, 0);
10371 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
10372 IEM_MC_ARG(uint8_t, iEffSeg, 1);
10373 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
10374 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10376 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10377 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); /* Note! Implicit fninit after the save, do not use FOR_READ here! */
10378 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
10379 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
10380 IEM_MC_END();
10381}
10382
10383/** Opcode 0xdd !11/0. */
10384FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
10385{
10386 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
10387
10388 IEM_MC_BEGIN(0, 2);
10389 IEM_MC_LOCAL(uint16_t, u16Tmp);
10390 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10391
10392 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10394 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10395
10396 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
10397 IEM_MC_FETCH_FSW(u16Tmp);
10398 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
10399 IEM_MC_ADVANCE_RIP_AND_FINISH();
10400
10401/** @todo Debug / drop a hint to the verifier that things may differ
10402 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
10403 * NT4SP1. (X86_FSW_PE) */
10404 IEM_MC_END();
10405}
10406
10407
10408/** Opcode 0xdd 11/0. */
10409FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
10410{
10411 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
10412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10413 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
10414 unmodified. */
10415
10416 IEM_MC_BEGIN(0, 0);
10417
10418 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10419 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10420
10421 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10422 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
10423 IEM_MC_UPDATE_FPU_OPCODE_IP();
10424
10425 IEM_MC_ADVANCE_RIP_AND_FINISH();
10426 IEM_MC_END();
10427}
10428
10429
10430/** Opcode 0xdd 11/1. */
10431FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
10432{
10433 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
10434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10435
10436 IEM_MC_BEGIN(0, 2);
10437 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
10438 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10439 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10440 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10441
10442 IEM_MC_PREPARE_FPU_USAGE();
10443 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10444 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
10445 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm));
10446 } IEM_MC_ELSE() {
10447 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm));
10448 } IEM_MC_ENDIF();
10449
10450 IEM_MC_ADVANCE_RIP_AND_FINISH();
10451 IEM_MC_END();
10452}
10453
10454
10455/** Opcode 0xdd 11/3. */
10456FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
10457{
10458 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
10459 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
10460}
10461
10462
10463/** Opcode 0xdd 11/4. */
10464FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
10465{
10466 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
10467 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
10468}
10469
10470
10471/**
10472 * @opcode 0xdd
10473 */
10474FNIEMOP_DEF(iemOp_EscF5)
10475{
10476 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10477 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
10478 if (IEM_IS_MODRM_REG_MODE(bRm))
10479 {
10480 switch (IEM_GET_MODRM_REG_8(bRm))
10481 {
10482 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
10483 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
10484 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
10485 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
10486 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
10487 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
10488 case 6: return IEMOP_RAISE_INVALID_OPCODE();
10489 case 7: return IEMOP_RAISE_INVALID_OPCODE();
10490 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10491 }
10492 }
10493 else
10494 {
10495 switch (IEM_GET_MODRM_REG_8(bRm))
10496 {
10497 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
10498 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
10499 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
10500 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
10501 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
10502 case 5: return IEMOP_RAISE_INVALID_OPCODE();
10503 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
10504 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
10505 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10506 }
10507 }
10508}
10509
10510
10511/** Opcode 0xde 11/0. */
10512FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
10513{
10514 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
10515 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
10516}
10517
10518
10519/** Opcode 0xde 11/0. */
10520FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
10521{
10522 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
10523 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
10524}
10525
10526
10527/** Opcode 0xde 0xd9. */
10528FNIEMOP_DEF(iemOp_fcompp)
10529{
10530 IEMOP_MNEMONIC(fcompp, "fcompp");
10531 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fcom_r80_by_r80);
10532}
10533
10534
10535/** Opcode 0xde 11/4. */
10536FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
10537{
10538 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
10539 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
10540}
10541
10542
10543/** Opcode 0xde 11/5. */
10544FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
10545{
10546 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
10547 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
10548}
10549
10550
10551/** Opcode 0xde 11/6. */
10552FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
10553{
10554 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
10555 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
10556}
10557
10558
10559/** Opcode 0xde 11/7. */
10560FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
10561{
10562 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
10563 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
10564}
10565
10566
10567/**
10568 * Common worker for FPU instructions working on ST0 and an m16i, and storing
10569 * the result in ST0.
10570 *
10571 * @param bRm Mod R/M byte.
10572 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10573 */
10574FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
10575{
10576 IEM_MC_BEGIN(3, 3);
10577 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10578 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10579 IEM_MC_LOCAL(int16_t, i16Val2);
10580 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10581 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10582 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
10583
10584 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10586
10587 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10588 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10589 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10590
10591 IEM_MC_PREPARE_FPU_USAGE();
10592 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10593 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
10594 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
10595 } IEM_MC_ELSE() {
10596 IEM_MC_FPU_STACK_UNDERFLOW(0);
10597 } IEM_MC_ENDIF();
10598 IEM_MC_ADVANCE_RIP_AND_FINISH();
10599
10600 IEM_MC_END();
10601}
10602
10603
10604/** Opcode 0xde !11/0. */
10605FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
10606{
10607 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
10608 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
10609}
10610
10611
10612/** Opcode 0xde !11/1. */
10613FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
10614{
10615 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
10616 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
10617}
10618
10619
10620/** Opcode 0xde !11/2. */
10621FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
10622{
10623 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
10624
10625 IEM_MC_BEGIN(3, 3);
10626 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10627 IEM_MC_LOCAL(uint16_t, u16Fsw);
10628 IEM_MC_LOCAL(int16_t, i16Val2);
10629 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10630 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10631 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
10632
10633 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10635
10636 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10637 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10638 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10639
10640 IEM_MC_PREPARE_FPU_USAGE();
10641 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10642 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
10643 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10644 } IEM_MC_ELSE() {
10645 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10646 } IEM_MC_ENDIF();
10647 IEM_MC_ADVANCE_RIP_AND_FINISH();
10648
10649 IEM_MC_END();
10650}
10651
10652
10653/** Opcode 0xde !11/3. */
10654FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
10655{
10656 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
10657
10658 IEM_MC_BEGIN(3, 3);
10659 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10660 IEM_MC_LOCAL(uint16_t, u16Fsw);
10661 IEM_MC_LOCAL(int16_t, i16Val2);
10662 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10663 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10664 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
10665
10666 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10668
10669 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10670 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10671 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10672
10673 IEM_MC_PREPARE_FPU_USAGE();
10674 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10675 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
10676 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10677 } IEM_MC_ELSE() {
10678 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10679 } IEM_MC_ENDIF();
10680 IEM_MC_ADVANCE_RIP_AND_FINISH();
10681
10682 IEM_MC_END();
10683}
10684
10685
10686/** Opcode 0xde !11/4. */
10687FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
10688{
10689 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
10690 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
10691}
10692
10693
10694/** Opcode 0xde !11/5. */
10695FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
10696{
10697 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
10698 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
10699}
10700
10701
10702/** Opcode 0xde !11/6. */
10703FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
10704{
10705 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
10706 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
10707}
10708
10709
10710/** Opcode 0xde !11/7. */
10711FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
10712{
10713 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
10714 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
10715}
10716
10717
10718/**
10719 * @opcode 0xde
10720 */
10721FNIEMOP_DEF(iemOp_EscF6)
10722{
10723 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10724 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
10725 if (IEM_IS_MODRM_REG_MODE(bRm))
10726 {
10727 switch (IEM_GET_MODRM_REG_8(bRm))
10728 {
10729 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
10730 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
10731 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
10732 case 3: if (bRm == 0xd9)
10733 return FNIEMOP_CALL(iemOp_fcompp);
10734 return IEMOP_RAISE_INVALID_OPCODE();
10735 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
10736 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
10737 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
10738 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
10739 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10740 }
10741 }
10742 else
10743 {
10744 switch (IEM_GET_MODRM_REG_8(bRm))
10745 {
10746 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
10747 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
10748 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
10749 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
10750 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
10751 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
10752 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
10753 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
10754 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10755 }
10756 }
10757}
10758
10759
10760/** Opcode 0xdf 11/0.
10761 * Undocument instruction, assumed to work like ffree + fincstp. */
10762FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
10763{
10764 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
10765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10766
10767 IEM_MC_BEGIN(0, 0);
10768
10769 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10770 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10771
10772 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10773 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
10774 IEM_MC_FPU_STACK_INC_TOP();
10775 IEM_MC_UPDATE_FPU_OPCODE_IP();
10776
10777 IEM_MC_ADVANCE_RIP_AND_FINISH();
10778 IEM_MC_END();
10779}
10780
10781
10782/** Opcode 0xdf 0xe0. */
10783FNIEMOP_DEF(iemOp_fnstsw_ax)
10784{
10785 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
10786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10787
10788 IEM_MC_BEGIN(0, 1);
10789 IEM_MC_LOCAL(uint16_t, u16Tmp);
10790 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10791 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
10792 IEM_MC_FETCH_FSW(u16Tmp);
10793 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
10794 IEM_MC_ADVANCE_RIP_AND_FINISH();
10795 IEM_MC_END();
10796}
10797
10798
10799/** Opcode 0xdf 11/5. */
10800FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
10801{
10802 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
10803 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
10804}
10805
10806
10807/** Opcode 0xdf 11/6. */
10808FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
10809{
10810 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
10811 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
10812}
10813
10814
10815/** Opcode 0xdf !11/0. */
10816FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
10817{
10818 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
10819
10820 IEM_MC_BEGIN(2, 3);
10821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10822 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10823 IEM_MC_LOCAL(int16_t, i16Val);
10824 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10825 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
10826
10827 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10829
10830 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10831 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10832 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10833
10834 IEM_MC_PREPARE_FPU_USAGE();
10835 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10836 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i16, pFpuRes, pi16Val);
10837 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10838 } IEM_MC_ELSE() {
10839 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10840 } IEM_MC_ENDIF();
10841 IEM_MC_ADVANCE_RIP_AND_FINISH();
10842
10843 IEM_MC_END();
10844}
10845
10846
10847/** Opcode 0xdf !11/1. */
10848FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
10849{
10850 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
10851 IEM_MC_BEGIN(3, 2);
10852 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10853 IEM_MC_LOCAL(uint16_t, u16Fsw);
10854 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10855 IEM_MC_ARG(int16_t *, pi16Dst, 1);
10856 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10857
10858 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10859 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10860 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10861 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10862
10863 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10864 IEM_MC_PREPARE_FPU_USAGE();
10865 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10866 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
10867 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
10868 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10869 } IEM_MC_ELSE() {
10870 IEM_MC_IF_FCW_IM() {
10871 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
10872 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
10873 } IEM_MC_ENDIF();
10874 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10875 } IEM_MC_ENDIF();
10876 IEM_MC_ADVANCE_RIP_AND_FINISH();
10877
10878 IEM_MC_END();
10879}
10880
10881
10882/** Opcode 0xdf !11/2. */
10883FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
10884{
10885 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
10886 IEM_MC_BEGIN(3, 2);
10887 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10888 IEM_MC_LOCAL(uint16_t, u16Fsw);
10889 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10890 IEM_MC_ARG(int16_t *, pi16Dst, 1);
10891 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10892
10893 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10895 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10896 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10897
10898 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10899 IEM_MC_PREPARE_FPU_USAGE();
10900 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10901 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
10902 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
10903 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10904 } IEM_MC_ELSE() {
10905 IEM_MC_IF_FCW_IM() {
10906 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
10907 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
10908 } IEM_MC_ENDIF();
10909 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10910 } IEM_MC_ENDIF();
10911 IEM_MC_ADVANCE_RIP_AND_FINISH();
10912
10913 IEM_MC_END();
10914}
10915
10916
10917/** Opcode 0xdf !11/3. */
10918FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
10919{
10920 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
10921 IEM_MC_BEGIN(3, 2);
10922 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10923 IEM_MC_LOCAL(uint16_t, u16Fsw);
10924 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10925 IEM_MC_ARG(int16_t *, pi16Dst, 1);
10926 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10927
10928 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10930 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10931 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10932
10933 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10934 IEM_MC_PREPARE_FPU_USAGE();
10935 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10936 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
10937 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
10938 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10939 } IEM_MC_ELSE() {
10940 IEM_MC_IF_FCW_IM() {
10941 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
10942 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
10943 } IEM_MC_ENDIF();
10944 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10945 } IEM_MC_ENDIF();
10946 IEM_MC_ADVANCE_RIP_AND_FINISH();
10947
10948 IEM_MC_END();
10949}
10950
10951
10952/** Opcode 0xdf !11/4. */
10953FNIEMOP_DEF_1(iemOp_fbld_m80d, uint8_t, bRm)
10954{
10955 IEMOP_MNEMONIC(fbld_m80d, "fbld m80d");
10956
10957 IEM_MC_BEGIN(2, 3);
10958 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10959 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10960 IEM_MC_LOCAL(RTPBCD80U, d80Val);
10961 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10962 IEM_MC_ARG_LOCAL_REF(PCRTPBCD80U, pd80Val, d80Val, 1);
10963
10964 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10966
10967 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10968 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10969 IEM_MC_FETCH_MEM_D80(d80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10970
10971 IEM_MC_PREPARE_FPU_USAGE();
10972 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10973 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_d80, pFpuRes, pd80Val);
10974 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10975 } IEM_MC_ELSE() {
10976 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10977 } IEM_MC_ENDIF();
10978 IEM_MC_ADVANCE_RIP_AND_FINISH();
10979
10980 IEM_MC_END();
10981}
10982
10983
10984/** Opcode 0xdf !11/5. */
10985FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
10986{
10987 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
10988
10989 IEM_MC_BEGIN(2, 3);
10990 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10991 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10992 IEM_MC_LOCAL(int64_t, i64Val);
10993 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10994 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
10995
10996 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10998
10999 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11000 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11001 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11002
11003 IEM_MC_PREPARE_FPU_USAGE();
11004 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11005 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i64, pFpuRes, pi64Val);
11006 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11007 } IEM_MC_ELSE() {
11008 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11009 } IEM_MC_ENDIF();
11010 IEM_MC_ADVANCE_RIP_AND_FINISH();
11011
11012 IEM_MC_END();
11013}
11014
11015
11016/** Opcode 0xdf !11/6. */
11017FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
11018{
11019 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
11020 IEM_MC_BEGIN(3, 2);
11021 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11022 IEM_MC_LOCAL(uint16_t, u16Fsw);
11023 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11024 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
11025 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11026
11027 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11029 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11030 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11031
11032 IEM_MC_MEM_MAP_EX(pd80Dst, IEM_ACCESS_DATA_W, sizeof(*pd80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
11033 IEM_MC_PREPARE_FPU_USAGE();
11034 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11035 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
11036 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pd80Dst, IEM_ACCESS_DATA_W, u16Fsw);
11037 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11038 } IEM_MC_ELSE() {
11039 IEM_MC_IF_FCW_IM() {
11040 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
11041 IEM_MC_MEM_COMMIT_AND_UNMAP(pd80Dst, IEM_ACCESS_DATA_W);
11042 } IEM_MC_ENDIF();
11043 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11044 } IEM_MC_ENDIF();
11045 IEM_MC_ADVANCE_RIP_AND_FINISH();
11046
11047 IEM_MC_END();
11048}
11049
11050
11051/** Opcode 0xdf !11/7. */
11052FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
11053{
11054 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
11055 IEM_MC_BEGIN(3, 2);
11056 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11057 IEM_MC_LOCAL(uint16_t, u16Fsw);
11058 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11059 IEM_MC_ARG(int64_t *, pi64Dst, 1);
11060 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11061
11062 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11064 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11065 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11066
11067 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11068 IEM_MC_PREPARE_FPU_USAGE();
11069 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11070 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
11071 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11072 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11073 } IEM_MC_ELSE() {
11074 IEM_MC_IF_FCW_IM() {
11075 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
11076 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
11077 } IEM_MC_ENDIF();
11078 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11079 } IEM_MC_ENDIF();
11080 IEM_MC_ADVANCE_RIP_AND_FINISH();
11081
11082 IEM_MC_END();
11083}
11084
11085
11086/**
11087 * @opcode 0xdf
11088 */
11089FNIEMOP_DEF(iemOp_EscF7)
11090{
11091 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11092 if (IEM_IS_MODRM_REG_MODE(bRm))
11093 {
11094 switch (IEM_GET_MODRM_REG_8(bRm))
11095 {
11096 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
11097 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
11098 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
11099 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
11100 case 4: if (bRm == 0xe0)
11101 return FNIEMOP_CALL(iemOp_fnstsw_ax);
11102 return IEMOP_RAISE_INVALID_OPCODE();
11103 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
11104 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
11105 case 7: return IEMOP_RAISE_INVALID_OPCODE();
11106 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11107 }
11108 }
11109 else
11110 {
11111 switch (IEM_GET_MODRM_REG_8(bRm))
11112 {
11113 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
11114 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
11115 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
11116 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
11117 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
11118 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
11119 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
11120 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
11121 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11122 }
11123 }
11124}
11125
11126
11127/**
11128 * @opcode 0xe0
11129 */
11130FNIEMOP_DEF(iemOp_loopne_Jb)
11131{
11132 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
11133 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11135 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11136
11137 switch (pVCpu->iem.s.enmEffAddrMode)
11138 {
11139 case IEMMODE_16BIT:
11140 IEM_MC_BEGIN(0,0);
11141 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11142 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11143 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11144 } IEM_MC_ELSE() {
11145 IEM_MC_ADVANCE_RIP_AND_FINISH();
11146 } IEM_MC_ENDIF();
11147 IEM_MC_END();
11148 break;
11149
11150 case IEMMODE_32BIT:
11151 IEM_MC_BEGIN(0,0);
11152 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11153 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11154 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11155 } IEM_MC_ELSE() {
11156 IEM_MC_ADVANCE_RIP_AND_FINISH();
11157 } IEM_MC_ENDIF();
11158 IEM_MC_END();
11159 break;
11160
11161 case IEMMODE_64BIT:
11162 IEM_MC_BEGIN(0,0);
11163 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
11164 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11165 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11166 } IEM_MC_ELSE() {
11167 IEM_MC_ADVANCE_RIP_AND_FINISH();
11168 } IEM_MC_ENDIF();
11169 IEM_MC_END();
11170 break;
11171
11172 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11173 }
11174}
11175
11176
11177/**
11178 * @opcode 0xe1
11179 */
11180FNIEMOP_DEF(iemOp_loope_Jb)
11181{
11182 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
11183 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11185 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11186
11187 switch (pVCpu->iem.s.enmEffAddrMode)
11188 {
11189 case IEMMODE_16BIT:
11190 IEM_MC_BEGIN(0,0);
11191 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11192 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11193 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11194 } IEM_MC_ELSE() {
11195 IEM_MC_ADVANCE_RIP_AND_FINISH();
11196 } IEM_MC_ENDIF();
11197 IEM_MC_END();
11198 break;
11199
11200 case IEMMODE_32BIT:
11201 IEM_MC_BEGIN(0,0);
11202 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11203 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11204 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11205 } IEM_MC_ELSE() {
11206 IEM_MC_ADVANCE_RIP_AND_FINISH();
11207 } IEM_MC_ENDIF();
11208 IEM_MC_END();
11209 break;
11210
11211 case IEMMODE_64BIT:
11212 IEM_MC_BEGIN(0,0);
11213 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
11214 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
11215 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11216 } IEM_MC_ELSE() {
11217 IEM_MC_ADVANCE_RIP_AND_FINISH();
11218 } IEM_MC_ENDIF();
11219 IEM_MC_END();
11220 break;
11221
11222 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11223 }
11224}
11225
11226
11227/**
11228 * @opcode 0xe2
11229 */
11230FNIEMOP_DEF(iemOp_loop_Jb)
11231{
11232 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
11233 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11235 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11236
11237 /** @todo Check out the \#GP case if EIP < CS.Base or EIP > CS.Limit when
11238 * using the 32-bit operand size override. How can that be restarted? See
11239 * weird pseudo code in intel manual. */
11240
11241 /* NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
11242 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
11243 * the loop causes guest crashes, but when logging it's nice to skip a few million
11244 * lines of useless output. */
11245#if defined(LOG_ENABLED)
11246 if ((LogIs3Enabled() || LogIs4Enabled()) && -(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm)
11247 switch (pVCpu->iem.s.enmEffAddrMode)
11248 {
11249 case IEMMODE_16BIT:
11250 IEM_MC_BEGIN(0,0);
11251 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
11252 IEM_MC_ADVANCE_RIP_AND_FINISH();
11253 IEM_MC_END();
11254 break;
11255
11256 case IEMMODE_32BIT:
11257 IEM_MC_BEGIN(0,0);
11258 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
11259 IEM_MC_ADVANCE_RIP_AND_FINISH();
11260 IEM_MC_END();
11261 break;
11262
11263 case IEMMODE_64BIT:
11264 IEM_MC_BEGIN(0,0);
11265 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
11266 IEM_MC_ADVANCE_RIP_AND_FINISH();
11267 IEM_MC_END();
11268 break;
11269
11270 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11271 }
11272#endif
11273
11274 switch (pVCpu->iem.s.enmEffAddrMode)
11275 {
11276 case IEMMODE_16BIT:
11277 IEM_MC_BEGIN(0,0);
11278
11279 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
11280 IEM_MC_IF_CX_IS_NZ() {
11281 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11282 } IEM_MC_ELSE() {
11283 IEM_MC_ADVANCE_RIP_AND_FINISH();
11284 } IEM_MC_ENDIF();
11285 IEM_MC_END();
11286 break;
11287
11288 case IEMMODE_32BIT:
11289 IEM_MC_BEGIN(0,0);
11290 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
11291 IEM_MC_IF_ECX_IS_NZ() {
11292 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11293 } IEM_MC_ELSE() {
11294 IEM_MC_ADVANCE_RIP_AND_FINISH();
11295 } IEM_MC_ENDIF();
11296 IEM_MC_END();
11297 break;
11298
11299 case IEMMODE_64BIT:
11300 IEM_MC_BEGIN(0,0);
11301 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
11302 IEM_MC_IF_RCX_IS_NZ() {
11303 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11304 } IEM_MC_ELSE() {
11305 IEM_MC_ADVANCE_RIP_AND_FINISH();
11306 } IEM_MC_ENDIF();
11307 IEM_MC_END();
11308 break;
11309
11310 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11311 }
11312}
11313
11314
11315/**
11316 * @opcode 0xe3
11317 */
11318FNIEMOP_DEF(iemOp_jecxz_Jb)
11319{
11320 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
11321 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11323 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11324
11325 switch (pVCpu->iem.s.enmEffAddrMode)
11326 {
11327 case IEMMODE_16BIT:
11328 IEM_MC_BEGIN(0,0);
11329 IEM_MC_IF_CX_IS_NZ() {
11330 IEM_MC_ADVANCE_RIP_AND_FINISH();
11331 } IEM_MC_ELSE() {
11332 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11333 } IEM_MC_ENDIF();
11334 IEM_MC_END();
11335 break;
11336
11337 case IEMMODE_32BIT:
11338 IEM_MC_BEGIN(0,0);
11339 IEM_MC_IF_ECX_IS_NZ() {
11340 IEM_MC_ADVANCE_RIP_AND_FINISH();
11341 } IEM_MC_ELSE() {
11342 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11343 } IEM_MC_ENDIF();
11344 IEM_MC_END();
11345 break;
11346
11347 case IEMMODE_64BIT:
11348 IEM_MC_BEGIN(0,0);
11349 IEM_MC_IF_RCX_IS_NZ() {
11350 IEM_MC_ADVANCE_RIP_AND_FINISH();
11351 } IEM_MC_ELSE() {
11352 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11353 } IEM_MC_ENDIF();
11354 IEM_MC_END();
11355 break;
11356
11357 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11358 }
11359}
11360
11361
11362/** Opcode 0xe4 */
11363FNIEMOP_DEF(iemOp_in_AL_Ib)
11364{
11365 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
11366 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11368 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_in, u8Imm, true /* fImm */, 1);
11369}
11370
11371
11372/** Opcode 0xe5 */
11373FNIEMOP_DEF(iemOp_in_eAX_Ib)
11374{
11375 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
11376 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11378 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_in, u8Imm, true /* fImm */, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
11379}
11380
11381
11382/** Opcode 0xe6 */
11383FNIEMOP_DEF(iemOp_out_Ib_AL)
11384{
11385 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
11386 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11388 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_out, u8Imm, true /* fImm */, 1);
11389}
11390
11391
11392/** Opcode 0xe7 */
11393FNIEMOP_DEF(iemOp_out_Ib_eAX)
11394{
11395 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
11396 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11398 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_out, u8Imm, true /* fImm */, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
11399}
11400
11401
11402/**
11403 * @opcode 0xe8
11404 */
11405FNIEMOP_DEF(iemOp_call_Jv)
11406{
11407 IEMOP_MNEMONIC(call_Jv, "call Jv");
11408 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
11409 switch (pVCpu->iem.s.enmEffOpSize)
11410 {
11411 case IEMMODE_16BIT:
11412 {
11413 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
11414 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
11415 }
11416
11417 case IEMMODE_32BIT:
11418 {
11419 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
11420 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
11421 }
11422
11423 case IEMMODE_64BIT:
11424 {
11425 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
11426 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
11427 }
11428
11429 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11430 }
11431}
11432
11433
11434/**
11435 * @opcode 0xe9
11436 */
11437FNIEMOP_DEF(iemOp_jmp_Jv)
11438{
11439 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
11440 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
11441 switch (pVCpu->iem.s.enmEffOpSize)
11442 {
11443 case IEMMODE_16BIT:
11444 {
11445 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
11446 IEM_MC_BEGIN(0, 0);
11447 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
11448 IEM_MC_END();
11449 break;
11450 }
11451
11452 case IEMMODE_64BIT:
11453 case IEMMODE_32BIT:
11454 {
11455 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
11456 IEM_MC_BEGIN(0, 0);
11457 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
11458 IEM_MC_END();
11459 break;
11460 }
11461
11462 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11463 }
11464}
11465
11466
11467/**
11468 * @opcode 0xea
11469 */
11470FNIEMOP_DEF(iemOp_jmp_Ap)
11471{
11472 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
11473 IEMOP_HLP_NO_64BIT();
11474
11475 /* Decode the far pointer address and pass it on to the far call C implementation. */
11476 uint32_t offSeg;
11477 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
11478 IEM_OPCODE_GET_NEXT_U32(&offSeg);
11479 else
11480 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
11481 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
11482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11483 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
11484}
11485
11486
11487/**
11488 * @opcode 0xeb
11489 */
11490FNIEMOP_DEF(iemOp_jmp_Jb)
11491{
11492 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
11493 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
11494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11495 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
11496
11497 IEM_MC_BEGIN(0, 0);
11498 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
11499 IEM_MC_END();
11500}
11501
11502
11503/** Opcode 0xec */
11504FNIEMOP_DEF(iemOp_in_AL_DX)
11505{
11506 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
11507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11508 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
11509}
11510
11511
11512/** Opcode 0xed */
11513FNIEMOP_DEF(iemOp_in_eAX_DX)
11514{
11515 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
11516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11517 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
11518}
11519
11520
11521/** Opcode 0xee */
11522FNIEMOP_DEF(iemOp_out_DX_AL)
11523{
11524 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
11525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11526 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
11527}
11528
11529
11530/** Opcode 0xef */
11531FNIEMOP_DEF(iemOp_out_DX_eAX)
11532{
11533 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
11534 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11535 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
11536}
11537
11538
11539/**
11540 * @opcode 0xf0
11541 */
11542FNIEMOP_DEF(iemOp_lock)
11543{
11544 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
11545 if (!pVCpu->iem.s.fDisregardLock)
11546 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
11547
11548 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
11549 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
11550}
11551
11552
11553/**
11554 * @opcode 0xf1
11555 */
11556FNIEMOP_DEF(iemOp_int1)
11557{
11558 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
11559 /** @todo Does not generate \#UD on 286, or so they say... Was allegedly a
11560 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
11561 * LOADALL memo. Needs some testing. */
11562 IEMOP_HLP_MIN_386();
11563 /** @todo testcase! */
11564 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
11565}
11566
11567
11568/**
11569 * @opcode 0xf2
11570 */
11571FNIEMOP_DEF(iemOp_repne)
11572{
11573 /* This overrides any previous REPE prefix. */
11574 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
11575 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
11576 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
11577
11578 /* For the 4 entry opcode tables, REPNZ overrides any previous
11579 REPZ and operand size prefixes. */
11580 pVCpu->iem.s.idxPrefix = 3;
11581
11582 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
11583 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
11584}
11585
11586
11587/**
11588 * @opcode 0xf3
11589 */
11590FNIEMOP_DEF(iemOp_repe)
11591{
11592 /* This overrides any previous REPNE prefix. */
11593 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
11594 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
11595 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
11596
11597 /* For the 4 entry opcode tables, REPNZ overrides any previous
11598 REPNZ and operand size prefixes. */
11599 pVCpu->iem.s.idxPrefix = 2;
11600
11601 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
11602 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
11603}
11604
11605
11606/**
11607 * @opcode 0xf4
11608 */
11609FNIEMOP_DEF(iemOp_hlt)
11610{
11611 IEMOP_MNEMONIC(hlt, "hlt");
11612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11613 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
11614}
11615
11616
11617/**
11618 * @opcode 0xf5
11619 */
11620FNIEMOP_DEF(iemOp_cmc)
11621{
11622 IEMOP_MNEMONIC(cmc, "cmc");
11623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11624 IEM_MC_BEGIN(0, 0);
11625 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
11626 IEM_MC_ADVANCE_RIP_AND_FINISH();
11627 IEM_MC_END();
11628}
11629
11630
11631/**
11632 * Body for of 'inc/dec/not/neg Eb'.
11633 */
11634#define IEMOP_BODY_UNARY_Eb(a_bRm, a_fnNormalU8, a_fnLockedU8) \
11635 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
11636 { \
11637 /* register access */ \
11638 IEMOP_HLP_DONE_DECODING(); \
11639 IEM_MC_BEGIN(2, 0); \
11640 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
11641 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
11642 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
11643 IEM_MC_REF_EFLAGS(pEFlags); \
11644 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
11645 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11646 IEM_MC_END(); \
11647 } \
11648 else \
11649 { \
11650 /* memory access. */ \
11651 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
11652 { \
11653 IEM_MC_BEGIN(2, 2); \
11654 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
11655 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11656 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11657 \
11658 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
11659 IEMOP_HLP_DONE_DECODING(); \
11660 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11661 IEM_MC_FETCH_EFLAGS(EFlags); \
11662 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
11663 \
11664 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW); \
11665 IEM_MC_COMMIT_EFLAGS(EFlags); \
11666 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11667 IEM_MC_END(); \
11668 } \
11669 else \
11670 { \
11671 IEM_MC_BEGIN(2, 2); \
11672 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
11673 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
11674 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
11675 \
11676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
11677 IEMOP_HLP_DONE_DECODING(); \
11678 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \
11679 IEM_MC_FETCH_EFLAGS(EFlags); \
11680 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU8, pu8Dst, pEFlags); \
11681 \
11682 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW); \
11683 IEM_MC_COMMIT_EFLAGS(EFlags); \
11684 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
11685 IEM_MC_END(); \
11686 } \
11687 } \
11688 (void)0
11689
11690
11691/**
11692 * Common implementation of 'inc/dec/not/neg Ev'.
11693 *
11694 * @param bRm The RM byte.
11695 * @param pImpl The instruction implementation.
11696 */
11697FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
11698{
11699 /* Registers are handled by a common worker. */
11700 if (IEM_IS_MODRM_REG_MODE(bRm))
11701 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, IEM_GET_MODRM_RM(pVCpu, bRm));
11702
11703 /* Memory we do here. */
11704 switch (pVCpu->iem.s.enmEffOpSize)
11705 {
11706 case IEMMODE_16BIT:
11707 IEM_MC_BEGIN(2, 2);
11708 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11709 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
11710 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11711
11712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11713 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11714 IEM_MC_FETCH_EFLAGS(EFlags);
11715 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11716 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
11717 else
11718 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
11719
11720 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
11721 IEM_MC_COMMIT_EFLAGS(EFlags);
11722 IEM_MC_ADVANCE_RIP_AND_FINISH();
11723 IEM_MC_END();
11724 break;
11725
11726 case IEMMODE_32BIT:
11727 IEM_MC_BEGIN(2, 2);
11728 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11729 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
11730 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11731
11732 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11733 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11734 IEM_MC_FETCH_EFLAGS(EFlags);
11735 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11736 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
11737 else
11738 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
11739
11740 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
11741 IEM_MC_COMMIT_EFLAGS(EFlags);
11742 IEM_MC_ADVANCE_RIP_AND_FINISH();
11743 IEM_MC_END();
11744 break;
11745
11746 case IEMMODE_64BIT:
11747 IEM_MC_BEGIN(2, 2);
11748 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11749 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
11750 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11751
11752 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11753 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11754 IEM_MC_FETCH_EFLAGS(EFlags);
11755 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
11756 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
11757 else
11758 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
11759
11760 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
11761 IEM_MC_COMMIT_EFLAGS(EFlags);
11762 IEM_MC_ADVANCE_RIP_AND_FINISH();
11763 IEM_MC_END();
11764 break;
11765
11766 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11767 }
11768}
11769
11770
11771/**
11772 * @opmaps grp3_f6
11773 * @opcode /0
11774 * @todo also /1
11775 */
11776FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
11777{
11778 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
11779 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11780
11781 if (IEM_IS_MODRM_REG_MODE(bRm))
11782 {
11783 /* register access */
11784 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11786
11787 IEM_MC_BEGIN(3, 0);
11788 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11789 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
11790 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11791 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11792 IEM_MC_REF_EFLAGS(pEFlags);
11793 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
11794 IEM_MC_ADVANCE_RIP_AND_FINISH();
11795 IEM_MC_END();
11796 }
11797 else
11798 {
11799 /* memory access. */
11800 IEM_MC_BEGIN(3, 2);
11801 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
11802 IEM_MC_ARG(uint8_t, u8Src, 1);
11803 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
11804 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11805
11806 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
11807 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
11808 IEM_MC_ASSIGN(u8Src, u8Imm);
11809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11810 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11811 IEM_MC_FETCH_EFLAGS(EFlags);
11812 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
11813
11814 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
11815 IEM_MC_COMMIT_EFLAGS(EFlags);
11816 IEM_MC_ADVANCE_RIP_AND_FINISH();
11817 IEM_MC_END();
11818 }
11819}
11820
11821
11822/** Opcode 0xf7 /0. */
11823FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
11824{
11825 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
11826 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
11827
11828 if (IEM_IS_MODRM_REG_MODE(bRm))
11829 {
11830 /* register access */
11831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11832 switch (pVCpu->iem.s.enmEffOpSize)
11833 {
11834 case IEMMODE_16BIT:
11835 {
11836 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
11837 IEM_MC_BEGIN(3, 0);
11838 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11839 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
11840 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11841 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11842 IEM_MC_REF_EFLAGS(pEFlags);
11843 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
11844 IEM_MC_ADVANCE_RIP_AND_FINISH();
11845 IEM_MC_END();
11846 break;
11847 }
11848
11849 case IEMMODE_32BIT:
11850 {
11851 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
11852 IEM_MC_BEGIN(3, 0);
11853 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11854 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
11855 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11856 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11857 IEM_MC_REF_EFLAGS(pEFlags);
11858 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
11859 /* No clearing the high dword here - test doesn't write back the result. */
11860 IEM_MC_ADVANCE_RIP_AND_FINISH();
11861 IEM_MC_END();
11862 break;
11863 }
11864
11865 case IEMMODE_64BIT:
11866 {
11867 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
11868 IEM_MC_BEGIN(3, 0);
11869 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11870 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
11871 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11872 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
11873 IEM_MC_REF_EFLAGS(pEFlags);
11874 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
11875 IEM_MC_ADVANCE_RIP_AND_FINISH();
11876 IEM_MC_END();
11877 break;
11878 }
11879
11880 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11881 }
11882 }
11883 else
11884 {
11885 /* memory access. */
11886 switch (pVCpu->iem.s.enmEffOpSize)
11887 {
11888 case IEMMODE_16BIT:
11889 {
11890 IEM_MC_BEGIN(3, 2);
11891 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
11892 IEM_MC_ARG(uint16_t, u16Src, 1);
11893 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
11894 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11895
11896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
11897 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
11898 IEM_MC_ASSIGN(u16Src, u16Imm);
11899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11900 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11901 IEM_MC_FETCH_EFLAGS(EFlags);
11902 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
11903
11904 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
11905 IEM_MC_COMMIT_EFLAGS(EFlags);
11906 IEM_MC_ADVANCE_RIP_AND_FINISH();
11907 IEM_MC_END();
11908 break;
11909 }
11910
11911 case IEMMODE_32BIT:
11912 {
11913 IEM_MC_BEGIN(3, 2);
11914 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11915 IEM_MC_ARG(uint32_t, u32Src, 1);
11916 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
11917 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11918
11919 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
11920 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
11921 IEM_MC_ASSIGN(u32Src, u32Imm);
11922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11923 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11924 IEM_MC_FETCH_EFLAGS(EFlags);
11925 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
11926
11927 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
11928 IEM_MC_COMMIT_EFLAGS(EFlags);
11929 IEM_MC_ADVANCE_RIP_AND_FINISH();
11930 IEM_MC_END();
11931 break;
11932 }
11933
11934 case IEMMODE_64BIT:
11935 {
11936 IEM_MC_BEGIN(3, 2);
11937 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11938 IEM_MC_ARG(uint64_t, u64Src, 1);
11939 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
11940 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11941
11942 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
11943 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
11944 IEM_MC_ASSIGN(u64Src, u64Imm);
11945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11946 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11947 IEM_MC_FETCH_EFLAGS(EFlags);
11948 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
11949
11950 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
11951 IEM_MC_COMMIT_EFLAGS(EFlags);
11952 IEM_MC_ADVANCE_RIP_AND_FINISH();
11953 IEM_MC_END();
11954 break;
11955 }
11956
11957 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11958 }
11959 }
11960}
11961
11962
11963/** Opcode 0xf6 /4, /5, /6 and /7. */
11964FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
11965{
11966 if (IEM_IS_MODRM_REG_MODE(bRm))
11967 {
11968 /* register access */
11969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11970 IEM_MC_BEGIN(3, 1);
11971 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11972 IEM_MC_ARG(uint8_t, u8Value, 1);
11973 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11974 IEM_MC_LOCAL(int32_t, rc);
11975
11976 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
11977 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11978 IEM_MC_REF_EFLAGS(pEFlags);
11979 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
11980 IEM_MC_IF_LOCAL_IS_Z(rc) {
11981 IEM_MC_ADVANCE_RIP_AND_FINISH();
11982 } IEM_MC_ELSE() {
11983 IEM_MC_RAISE_DIVIDE_ERROR();
11984 } IEM_MC_ENDIF();
11985
11986 IEM_MC_END();
11987 }
11988 else
11989 {
11990 /* memory access. */
11991 IEM_MC_BEGIN(3, 2);
11992 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11993 IEM_MC_ARG(uint8_t, u8Value, 1);
11994 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11995 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11996 IEM_MC_LOCAL(int32_t, rc);
11997
11998 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12000 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12001 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12002 IEM_MC_REF_EFLAGS(pEFlags);
12003 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
12004 IEM_MC_IF_LOCAL_IS_Z(rc) {
12005 IEM_MC_ADVANCE_RIP_AND_FINISH();
12006 } IEM_MC_ELSE() {
12007 IEM_MC_RAISE_DIVIDE_ERROR();
12008 } IEM_MC_ENDIF();
12009
12010 IEM_MC_END();
12011 }
12012}
12013
12014
12015/** Opcode 0xf7 /4, /5, /6 and /7. */
12016FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
12017{
12018 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12019
12020 if (IEM_IS_MODRM_REG_MODE(bRm))
12021 {
12022 /* register access */
12023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12024 switch (pVCpu->iem.s.enmEffOpSize)
12025 {
12026 case IEMMODE_16BIT:
12027 {
12028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12029 IEM_MC_BEGIN(4, 1);
12030 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12031 IEM_MC_ARG(uint16_t *, pu16DX, 1);
12032 IEM_MC_ARG(uint16_t, u16Value, 2);
12033 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12034 IEM_MC_LOCAL(int32_t, rc);
12035
12036 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12037 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12038 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
12039 IEM_MC_REF_EFLAGS(pEFlags);
12040 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
12041 IEM_MC_IF_LOCAL_IS_Z(rc) {
12042 IEM_MC_ADVANCE_RIP_AND_FINISH();
12043 } IEM_MC_ELSE() {
12044 IEM_MC_RAISE_DIVIDE_ERROR();
12045 } IEM_MC_ENDIF();
12046
12047 IEM_MC_END();
12048 break;
12049 }
12050
12051 case IEMMODE_32BIT:
12052 {
12053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12054 IEM_MC_BEGIN(4, 1);
12055 IEM_MC_ARG(uint32_t *, pu32AX, 0);
12056 IEM_MC_ARG(uint32_t *, pu32DX, 1);
12057 IEM_MC_ARG(uint32_t, u32Value, 2);
12058 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12059 IEM_MC_LOCAL(int32_t, rc);
12060
12061 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12062 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
12063 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
12064 IEM_MC_REF_EFLAGS(pEFlags);
12065 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
12066 IEM_MC_IF_LOCAL_IS_Z(rc) {
12067 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
12068 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
12069 IEM_MC_ADVANCE_RIP_AND_FINISH();
12070 } IEM_MC_ELSE() {
12071 IEM_MC_RAISE_DIVIDE_ERROR();
12072 } IEM_MC_ENDIF();
12073
12074 IEM_MC_END();
12075 break;
12076 }
12077
12078 case IEMMODE_64BIT:
12079 {
12080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12081 IEM_MC_BEGIN(4, 1);
12082 IEM_MC_ARG(uint64_t *, pu64AX, 0);
12083 IEM_MC_ARG(uint64_t *, pu64DX, 1);
12084 IEM_MC_ARG(uint64_t, u64Value, 2);
12085 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12086 IEM_MC_LOCAL(int32_t, rc);
12087
12088 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
12089 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
12090 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
12091 IEM_MC_REF_EFLAGS(pEFlags);
12092 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
12093 IEM_MC_IF_LOCAL_IS_Z(rc) {
12094 IEM_MC_ADVANCE_RIP_AND_FINISH();
12095 } IEM_MC_ELSE() {
12096 IEM_MC_RAISE_DIVIDE_ERROR();
12097 } IEM_MC_ENDIF();
12098
12099 IEM_MC_END();
12100 break;
12101 }
12102
12103 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12104 }
12105 }
12106 else
12107 {
12108 /* memory access. */
12109 switch (pVCpu->iem.s.enmEffOpSize)
12110 {
12111 case IEMMODE_16BIT:
12112 {
12113 IEM_MC_BEGIN(4, 2);
12114 IEM_MC_ARG(uint16_t *, pu16AX, 0);
12115 IEM_MC_ARG(uint16_t *, pu16DX, 1);
12116 IEM_MC_ARG(uint16_t, u16Value, 2);
12117 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12118 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12119 IEM_MC_LOCAL(int32_t, rc);
12120
12121 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12123 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12124 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
12125 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
12126 IEM_MC_REF_EFLAGS(pEFlags);
12127 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
12128 IEM_MC_IF_LOCAL_IS_Z(rc) {
12129 IEM_MC_ADVANCE_RIP_AND_FINISH();
12130 } IEM_MC_ELSE() {
12131 IEM_MC_RAISE_DIVIDE_ERROR();
12132 } IEM_MC_ENDIF();
12133
12134 IEM_MC_END();
12135 break;
12136 }
12137
12138 case IEMMODE_32BIT:
12139 {
12140 IEM_MC_BEGIN(4, 2);
12141 IEM_MC_ARG(uint32_t *, pu32AX, 0);
12142 IEM_MC_ARG(uint32_t *, pu32DX, 1);
12143 IEM_MC_ARG(uint32_t, u32Value, 2);
12144 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12145 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12146 IEM_MC_LOCAL(int32_t, rc);
12147
12148 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12149 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12150 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12151 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
12152 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
12153 IEM_MC_REF_EFLAGS(pEFlags);
12154 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
12155 IEM_MC_IF_LOCAL_IS_Z(rc) {
12156 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
12157 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
12158 IEM_MC_ADVANCE_RIP_AND_FINISH();
12159 } IEM_MC_ELSE() {
12160 IEM_MC_RAISE_DIVIDE_ERROR();
12161 } IEM_MC_ENDIF();
12162
12163 IEM_MC_END();
12164 break;
12165 }
12166
12167 case IEMMODE_64BIT:
12168 {
12169 IEM_MC_BEGIN(4, 2);
12170 IEM_MC_ARG(uint64_t *, pu64AX, 0);
12171 IEM_MC_ARG(uint64_t *, pu64DX, 1);
12172 IEM_MC_ARG(uint64_t, u64Value, 2);
12173 IEM_MC_ARG(uint32_t *, pEFlags, 3);
12174 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12175 IEM_MC_LOCAL(int32_t, rc);
12176
12177 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12179 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12180 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
12181 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
12182 IEM_MC_REF_EFLAGS(pEFlags);
12183 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
12184 IEM_MC_IF_LOCAL_IS_Z(rc) {
12185 IEM_MC_ADVANCE_RIP_AND_FINISH();
12186 } IEM_MC_ELSE() {
12187 IEM_MC_RAISE_DIVIDE_ERROR();
12188 } IEM_MC_ENDIF();
12189
12190 IEM_MC_END();
12191 break;
12192 }
12193
12194 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12195 }
12196 }
12197}
12198
12199
12200/**
12201 * @opmaps grp3_f6
12202 * @opcode /2
12203 */
12204FNIEMOP_DEF_1(iemOp_grp3_not_Eb, uint8_t, bRm)
12205{
12206 IEMOP_MNEMONIC(not_Eb, "not Eb");
12207 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_not_u8, iemAImpl_not_u8_locked);
12208}
12209
12210
12211/**
12212 * @opmaps grp3_f6
12213 * @opcode /3
12214 */
12215FNIEMOP_DEF_1(iemOp_grp3_neg_Eb, uint8_t, bRm)
12216{
12217 IEMOP_MNEMONIC(net_Eb, "neg Eb");
12218 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_neg_u8, iemAImpl_neg_u8_locked);
12219}
12220
12221
12222/**
12223 * @opcode 0xf6
12224 */
12225FNIEMOP_DEF(iemOp_Grp3_Eb)
12226{
12227 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12228 switch (IEM_GET_MODRM_REG_8(bRm))
12229 {
12230 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
12231 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
12232 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Eb, bRm);
12233 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Eb, bRm);
12234 case 4:
12235 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
12236 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12237 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
12238 case 5:
12239 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
12240 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12241 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
12242 case 6:
12243 IEMOP_MNEMONIC(div_Eb, "div Eb");
12244 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12245 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
12246 case 7:
12247 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
12248 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12249 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
12250 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12251 }
12252}
12253
12254
12255/**
12256 * @opcode 0xf7
12257 */
12258FNIEMOP_DEF(iemOp_Grp3_Ev)
12259{
12260 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12261 switch (IEM_GET_MODRM_REG_8(bRm))
12262 {
12263 case 0:
12264 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
12265 case 1:
12266/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
12267 return IEMOP_RAISE_INVALID_OPCODE();
12268 case 2:
12269 IEMOP_MNEMONIC(not_Ev, "not Ev");
12270 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
12271 case 3:
12272 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
12273 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
12274 case 4:
12275 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
12276 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12277 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
12278 case 5:
12279 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
12280 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
12281 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
12282 case 6:
12283 IEMOP_MNEMONIC(div_Ev, "div Ev");
12284 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12285 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
12286 case 7:
12287 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
12288 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
12289 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
12290 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12291 }
12292}
12293
12294
12295/**
12296 * @opcode 0xf8
12297 */
12298FNIEMOP_DEF(iemOp_clc)
12299{
12300 IEMOP_MNEMONIC(clc, "clc");
12301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12302 IEM_MC_BEGIN(0, 0);
12303 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
12304 IEM_MC_ADVANCE_RIP_AND_FINISH();
12305 IEM_MC_END();
12306}
12307
12308
12309/**
12310 * @opcode 0xf9
12311 */
12312FNIEMOP_DEF(iemOp_stc)
12313{
12314 IEMOP_MNEMONIC(stc, "stc");
12315 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12316 IEM_MC_BEGIN(0, 0);
12317 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
12318 IEM_MC_ADVANCE_RIP_AND_FINISH();
12319 IEM_MC_END();
12320}
12321
12322
12323/**
12324 * @opcode 0xfa
12325 */
12326FNIEMOP_DEF(iemOp_cli)
12327{
12328 IEMOP_MNEMONIC(cli, "cli");
12329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12330 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
12331}
12332
12333
12334FNIEMOP_DEF(iemOp_sti)
12335{
12336 IEMOP_MNEMONIC(sti, "sti");
12337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12338 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
12339}
12340
12341
12342/**
12343 * @opcode 0xfc
12344 */
12345FNIEMOP_DEF(iemOp_cld)
12346{
12347 IEMOP_MNEMONIC(cld, "cld");
12348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12349 IEM_MC_BEGIN(0, 0);
12350 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
12351 IEM_MC_ADVANCE_RIP_AND_FINISH();
12352 IEM_MC_END();
12353}
12354
12355
12356/**
12357 * @opcode 0xfd
12358 */
12359FNIEMOP_DEF(iemOp_std)
12360{
12361 IEMOP_MNEMONIC(std, "std");
12362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12363 IEM_MC_BEGIN(0, 0);
12364 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
12365 IEM_MC_ADVANCE_RIP_AND_FINISH();
12366 IEM_MC_END();
12367}
12368
12369
12370/**
12371 * @opmaps grp4
12372 * @opcode /0
12373 */
12374FNIEMOP_DEF_1(iemOp_Grp4_inc_Eb, uint8_t, bRm)
12375{
12376 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
12377 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_inc_u8, iemAImpl_inc_u8_locked);
12378}
12379
12380
12381/**
12382 * @opmaps grp4
12383 * @opcode /1
12384 */
12385FNIEMOP_DEF_1(iemOp_Grp4_dec_Eb, uint8_t, bRm)
12386{
12387 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
12388 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_dec_u8, iemAImpl_dec_u8_locked);
12389}
12390
12391
12392/**
12393 * @opcode 0xfe
12394 */
12395FNIEMOP_DEF(iemOp_Grp4)
12396{
12397 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12398 switch (IEM_GET_MODRM_REG_8(bRm))
12399 {
12400 case 0: return FNIEMOP_CALL_1(iemOp_Grp4_inc_Eb, bRm);
12401 case 1: return FNIEMOP_CALL_1(iemOp_Grp4_dec_Eb, bRm);
12402 default:
12403 /** @todo is the eff-addr decoded? */
12404 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
12405 return IEMOP_RAISE_INVALID_OPCODE();
12406 }
12407}
12408
12409
12410/**
12411 * Opcode 0xff /2.
12412 * @param bRm The RM byte.
12413 */
12414FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
12415{
12416 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
12417 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12418
12419 if (IEM_IS_MODRM_REG_MODE(bRm))
12420 {
12421 /* The new RIP is taken from a register. */
12422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12423 switch (pVCpu->iem.s.enmEffOpSize)
12424 {
12425 case IEMMODE_16BIT:
12426 IEM_MC_BEGIN(1, 0);
12427 IEM_MC_ARG(uint16_t, u16Target, 0);
12428 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12429 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
12430 IEM_MC_END();
12431 break;
12432
12433 case IEMMODE_32BIT:
12434 IEM_MC_BEGIN(1, 0);
12435 IEM_MC_ARG(uint32_t, u32Target, 0);
12436 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12437 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
12438 IEM_MC_END();
12439 break;
12440
12441 case IEMMODE_64BIT:
12442 IEM_MC_BEGIN(1, 0);
12443 IEM_MC_ARG(uint64_t, u64Target, 0);
12444 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12445 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
12446 IEM_MC_END();
12447 break;
12448
12449 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12450 }
12451 }
12452 else
12453 {
12454 /* The new RIP is taken from a register. */
12455 switch (pVCpu->iem.s.enmEffOpSize)
12456 {
12457 case IEMMODE_16BIT:
12458 IEM_MC_BEGIN(1, 1);
12459 IEM_MC_ARG(uint16_t, u16Target, 0);
12460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12461 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12463 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12464 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
12465 IEM_MC_END();
12466 break;
12467
12468 case IEMMODE_32BIT:
12469 IEM_MC_BEGIN(1, 1);
12470 IEM_MC_ARG(uint32_t, u32Target, 0);
12471 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12472 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12474 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12475 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
12476 IEM_MC_END();
12477 break;
12478
12479 case IEMMODE_64BIT:
12480 IEM_MC_BEGIN(1, 1);
12481 IEM_MC_ARG(uint64_t, u64Target, 0);
12482 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12483 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12485 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12486 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
12487 IEM_MC_END();
12488 break;
12489
12490 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12491 }
12492 }
12493}
12494
12495typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
12496
12497FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
12498{
12499 /* Registers? How?? */
12500 if (RT_LIKELY(IEM_IS_MODRM_MEM_MODE(bRm)))
12501 { /* likely */ }
12502 else
12503 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
12504
12505 /* 64-bit mode: Default is 32-bit, but only intel respects a REX.W prefix. */
12506 /** @todo what does VIA do? */
12507 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT || pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT || IEM_IS_GUEST_CPU_INTEL(pVCpu))
12508 { /* likely */ }
12509 else
12510 pVCpu->iem.s.enmEffOpSize = IEMMODE_32BIT;
12511
12512 /* Far pointer loaded from memory. */
12513 switch (pVCpu->iem.s.enmEffOpSize)
12514 {
12515 case IEMMODE_16BIT:
12516 IEM_MC_BEGIN(3, 1);
12517 IEM_MC_ARG(uint16_t, u16Sel, 0);
12518 IEM_MC_ARG(uint16_t, offSeg, 1);
12519 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
12520 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12521 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12523 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12524 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
12525 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
12526 IEM_MC_END();
12527 break;
12528
12529 case IEMMODE_32BIT:
12530 IEM_MC_BEGIN(3, 1);
12531 IEM_MC_ARG(uint16_t, u16Sel, 0);
12532 IEM_MC_ARG(uint32_t, offSeg, 1);
12533 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
12534 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12535 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12537 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12538 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
12539 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
12540 IEM_MC_END();
12541 break;
12542
12543 case IEMMODE_64BIT:
12544 Assert(!IEM_IS_GUEST_CPU_AMD(pVCpu));
12545 IEM_MC_BEGIN(3, 1);
12546 IEM_MC_ARG(uint16_t, u16Sel, 0);
12547 IEM_MC_ARG(uint64_t, offSeg, 1);
12548 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_64BIT, 2);
12549 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12550 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12552 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12553 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8);
12554 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
12555 IEM_MC_END();
12556 break;
12557
12558 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12559 }
12560}
12561
12562
12563/**
12564 * Opcode 0xff /3.
12565 * @param bRm The RM byte.
12566 */
12567FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
12568{
12569 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
12570 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
12571}
12572
12573
12574/**
12575 * Opcode 0xff /4.
12576 * @param bRm The RM byte.
12577 */
12578FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
12579{
12580 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
12581 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12582
12583 if (IEM_IS_MODRM_REG_MODE(bRm))
12584 {
12585 /* The new RIP is taken from a register. */
12586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12587 switch (pVCpu->iem.s.enmEffOpSize)
12588 {
12589 case IEMMODE_16BIT:
12590 IEM_MC_BEGIN(0, 1);
12591 IEM_MC_LOCAL(uint16_t, u16Target);
12592 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12593 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
12594 IEM_MC_END();
12595 break;
12596
12597 case IEMMODE_32BIT:
12598 IEM_MC_BEGIN(0, 1);
12599 IEM_MC_LOCAL(uint32_t, u32Target);
12600 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12601 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
12602 IEM_MC_END();
12603 break;
12604
12605 case IEMMODE_64BIT:
12606 IEM_MC_BEGIN(0, 1);
12607 IEM_MC_LOCAL(uint64_t, u64Target);
12608 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
12609 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
12610 IEM_MC_END();
12611 break;
12612
12613 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12614 }
12615 }
12616 else
12617 {
12618 /* The new RIP is taken from a memory location. */
12619 switch (pVCpu->iem.s.enmEffOpSize)
12620 {
12621 case IEMMODE_16BIT:
12622 IEM_MC_BEGIN(0, 2);
12623 IEM_MC_LOCAL(uint16_t, u16Target);
12624 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12625 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12627 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12628 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
12629 IEM_MC_END();
12630 break;
12631
12632 case IEMMODE_32BIT:
12633 IEM_MC_BEGIN(0, 2);
12634 IEM_MC_LOCAL(uint32_t, u32Target);
12635 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12636 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12638 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12639 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
12640 IEM_MC_END();
12641 break;
12642
12643 case IEMMODE_64BIT:
12644 IEM_MC_BEGIN(0, 2);
12645 IEM_MC_LOCAL(uint64_t, u64Target);
12646 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12647 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12649 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12650 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
12651 IEM_MC_END();
12652 break;
12653
12654 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12655 }
12656 }
12657}
12658
12659
12660/**
12661 * Opcode 0xff /5.
12662 * @param bRm The RM byte.
12663 */
12664FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
12665{
12666 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
12667 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
12668}
12669
12670
12671/**
12672 * Opcode 0xff /6.
12673 * @param bRm The RM byte.
12674 */
12675FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
12676{
12677 IEMOP_MNEMONIC(push_Ev, "push Ev");
12678
12679 /* Registers are handled by a common worker. */
12680 if (IEM_IS_MODRM_REG_MODE(bRm))
12681 return FNIEMOP_CALL_1(iemOpCommonPushGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
12682
12683 /* Memory we do here. */
12684 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12685 switch (pVCpu->iem.s.enmEffOpSize)
12686 {
12687 case IEMMODE_16BIT:
12688 IEM_MC_BEGIN(0, 2);
12689 IEM_MC_LOCAL(uint16_t, u16Src);
12690 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12691 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12693 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12694 IEM_MC_PUSH_U16(u16Src);
12695 IEM_MC_ADVANCE_RIP_AND_FINISH();
12696 IEM_MC_END();
12697 break;
12698
12699 case IEMMODE_32BIT:
12700 IEM_MC_BEGIN(0, 2);
12701 IEM_MC_LOCAL(uint32_t, u32Src);
12702 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12703 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12705 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12706 IEM_MC_PUSH_U32(u32Src);
12707 IEM_MC_ADVANCE_RIP_AND_FINISH();
12708 IEM_MC_END();
12709 break;
12710
12711 case IEMMODE_64BIT:
12712 IEM_MC_BEGIN(0, 2);
12713 IEM_MC_LOCAL(uint64_t, u64Src);
12714 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12715 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12717 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12718 IEM_MC_PUSH_U64(u64Src);
12719 IEM_MC_ADVANCE_RIP_AND_FINISH();
12720 IEM_MC_END();
12721 break;
12722
12723 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12724 }
12725}
12726
12727
12728/**
12729 * @opcode 0xff
12730 */
12731FNIEMOP_DEF(iemOp_Grp5)
12732{
12733 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12734 switch (IEM_GET_MODRM_REG_8(bRm))
12735 {
12736 case 0:
12737 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
12738 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
12739 case 1:
12740 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
12741 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
12742 case 2:
12743 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
12744 case 3:
12745 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
12746 case 4:
12747 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
12748 case 5:
12749 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
12750 case 6:
12751 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
12752 case 7:
12753 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
12754 return IEMOP_RAISE_INVALID_OPCODE();
12755 }
12756 AssertFailedReturn(VERR_IEM_IPE_3);
12757}
12758
12759
12760
12761const PFNIEMOP g_apfnOneByteMap[256] =
12762{
12763 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
12764 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
12765 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
12766 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
12767 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
12768 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
12769 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
12770 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
12771 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
12772 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
12773 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
12774 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
12775 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
12776 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
12777 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
12778 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
12779 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
12780 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
12781 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
12782 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
12783 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
12784 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
12785 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
12786 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
12787 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
12788 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
12789 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
12790 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
12791 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
12792 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
12793 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
12794 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
12795 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
12796 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
12797 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
12798 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
12799 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
12800 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
12801 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
12802 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
12803 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
12804 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
12805 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
12806 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
12807 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
12808 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
12809 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
12810 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
12811 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
12812 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
12813 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
12814 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
12815 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
12816 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
12817 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
12818 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
12819 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
12820 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
12821 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
12822 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
12823 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
12824 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
12825 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
12826 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
12827};
12828
12829
12830/** @} */
12831
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette