VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstOneByte.cpp.h@ 102029

Last change on this file since 102029 was 102029, checked in by vboxsync, 17 months ago

VMM/IEM: Added more variable checks to tstIEMCheckMc and found 4 GCPtrMem variables that weren't declared as IEM_MC_LOCAL. bugref:10371

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 531.3 KB
Line 
1/* $Id: IEMAllInstOneByte.cpp.h 102029 2023-11-09 12:34:40Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*******************************************************************************
30* Global Variables *
31*******************************************************************************/
32extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
33
34/* Instruction group definitions: */
35
36/** @defgroup og_gen General
37 * @{ */
38 /** @defgroup og_gen_arith Arithmetic
39 * @{ */
40 /** @defgroup og_gen_arith_bin Binary numbers */
41 /** @defgroup og_gen_arith_dec Decimal numbers */
42 /** @} */
43/** @} */
44
45/** @defgroup og_stack Stack
46 * @{ */
47 /** @defgroup og_stack_sreg Segment registers */
48/** @} */
49
50/** @defgroup og_prefix Prefixes */
51/** @defgroup og_escapes Escape bytes */
52
53
54
55/** @name One byte opcodes.
56 * @{
57 */
58
59/**
60 * Body for instructions like ADD, AND, OR, TEST, CMP, ++ with a byte
61 * memory/register as the destination.
62 *
63 * Used with IEMOP_BODY_BINARY_rm_r8_NO_LOCK or IEMOP_BODY_BINARY_rm_r8_LOCKED.
64 */
65#define IEMOP_BODY_BINARY_rm_r8_RW(a_fnNormalU8) \
66 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
67 \
68 /* \
69 * If rm is denoting a register, no more instruction bytes. \
70 */ \
71 if (IEM_IS_MODRM_REG_MODE(bRm)) \
72 { \
73 IEM_MC_BEGIN(3, 0, 0, 0); \
74 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
75 IEM_MC_ARG(uint8_t, u8Src, 1); \
76 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
77 \
78 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
79 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
80 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
81 IEM_MC_REF_EFLAGS(pEFlags); \
82 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
83 \
84 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
85 IEM_MC_END(); \
86 } \
87 else \
88 { \
89 /* \
90 * We're accessing memory. \
91 * Note! We're putting the eflags on the stack here so we can commit them \
92 * after the memory. \
93 */ \
94 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
95 { \
96 IEM_MC_BEGIN(3, 3, 0, 0); \
97 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
98 IEM_MC_ARG(uint8_t, u8Src, 1); \
99 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
101 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
102 \
103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
104 IEMOP_HLP_DONE_DECODING(); \
105 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
106 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
107 IEM_MC_FETCH_EFLAGS(EFlags); \
108 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
109 \
110 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
111 IEM_MC_COMMIT_EFLAGS(EFlags); \
112 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
113 IEM_MC_END(); \
114 } \
115 else \
116 { \
117 (void)0
118
119/**
120 * Body for instructions like TEST & CMP, ++ with a byte memory/registers as
121 * operands.
122 *
123 * Used with IEMOP_BODY_BINARY_rm_r8_NO_LOCK or IEMOP_BODY_BINARY_rm_r8_LOCKED.
124 */
125#define IEMOP_BODY_BINARY_rm_r8_RO(a_fnNormalU8) \
126 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
127 \
128 /* \
129 * If rm is denoting a register, no more instruction bytes. \
130 */ \
131 if (IEM_IS_MODRM_REG_MODE(bRm)) \
132 { \
133 IEM_MC_BEGIN(3, 0, 0, 0); \
134 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
135 IEM_MC_ARG(uint8_t, u8Src, 1); \
136 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
137 \
138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
139 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
140 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
141 IEM_MC_REF_EFLAGS(pEFlags); \
142 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
143 \
144 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
145 IEM_MC_END(); \
146 } \
147 else \
148 { \
149 /* \
150 * We're accessing memory. \
151 * Note! We're putting the eflags on the stack here so we can commit them \
152 * after the memory. \
153 */ \
154 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
155 { \
156 IEM_MC_BEGIN(3, 3, 0, 0); \
157 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
158 IEM_MC_ARG(uint8_t, u8Src, 1); \
159 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
160 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
161 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
162 \
163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
164 IEMOP_HLP_DONE_DECODING(); \
165 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
166 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
167 IEM_MC_FETCH_EFLAGS(EFlags); \
168 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
169 \
170 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu8Dst, bUnmapInfo); \
171 IEM_MC_COMMIT_EFLAGS(EFlags); \
172 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
173 IEM_MC_END(); \
174 } \
175 else \
176 { \
177 (void)0
178
179#define IEMOP_BODY_BINARY_rm_r8_NO_LOCK() \
180 IEMOP_HLP_DONE_DECODING(); \
181 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
182 } \
183 } \
184 (void)0
185
186#define IEMOP_BODY_BINARY_rm_r8_LOCKED(a_fnLockedU8) \
187 IEM_MC_BEGIN(3, 3, 0, 0); \
188 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
189 IEM_MC_ARG(uint8_t, u8Src, 1); \
190 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
192 IEM_MC_LOCAL(uint8_t, bMapInfoDst); \
193 \
194 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
195 IEMOP_HLP_DONE_DECODING(); \
196 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bMapInfoDst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
197 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
198 IEM_MC_FETCH_EFLAGS(EFlags); \
199 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
200 \
201 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bMapInfoDst); \
202 IEM_MC_COMMIT_EFLAGS(EFlags); \
203 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
204 IEM_MC_END(); \
205 } \
206 } \
207 (void)0
208
209/**
210 * Body for byte instructions like ADD, AND, OR, ++ with a register as the
211 * destination.
212 */
213#define IEMOP_BODY_BINARY_r8_rm(a_fnNormalU8) \
214 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
215 \
216 /* \
217 * If rm is denoting a register, no more instruction bytes. \
218 */ \
219 if (IEM_IS_MODRM_REG_MODE(bRm)) \
220 { \
221 IEM_MC_BEGIN(3, 0, 0, 0); \
222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
223 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
224 IEM_MC_ARG(uint8_t, u8Src, 1); \
225 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
226 \
227 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
228 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
229 IEM_MC_REF_EFLAGS(pEFlags); \
230 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
231 \
232 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
233 IEM_MC_END(); \
234 } \
235 else \
236 { \
237 /* \
238 * We're accessing memory. \
239 */ \
240 IEM_MC_BEGIN(3, 1, 0, 0); \
241 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
242 IEM_MC_ARG(uint8_t, u8Src, 1); \
243 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
244 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
245 \
246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
248 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
249 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
250 IEM_MC_REF_EFLAGS(pEFlags); \
251 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
252 \
253 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
254 IEM_MC_END(); \
255 } \
256 (void)0
257
258
259/**
260 * Body for word/dword/qword instructions like ADD, AND, OR, ++ with
261 * memory/register as the destination.
262 */
263#define IEMOP_BODY_BINARY_rm_rv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
264 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
265 \
266 /* \
267 * If rm is denoting a register, no more instruction bytes. \
268 */ \
269 if (IEM_IS_MODRM_REG_MODE(bRm)) \
270 { \
271 switch (pVCpu->iem.s.enmEffOpSize) \
272 { \
273 case IEMMODE_16BIT: \
274 IEM_MC_BEGIN(3, 0, 0, 0); \
275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
276 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
277 IEM_MC_ARG(uint16_t, u16Src, 1); \
278 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
279 \
280 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
281 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
282 IEM_MC_REF_EFLAGS(pEFlags); \
283 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
284 \
285 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
286 IEM_MC_END(); \
287 break; \
288 \
289 case IEMMODE_32BIT: \
290 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
292 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
293 IEM_MC_ARG(uint32_t, u32Src, 1); \
294 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
295 \
296 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
297 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
298 IEM_MC_REF_EFLAGS(pEFlags); \
299 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
300 \
301 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
302 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
303 IEM_MC_END(); \
304 break; \
305 \
306 case IEMMODE_64BIT: \
307 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
309 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
310 IEM_MC_ARG(uint64_t, u64Src, 1); \
311 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
312 \
313 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
314 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
315 IEM_MC_REF_EFLAGS(pEFlags); \
316 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
317 \
318 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
319 IEM_MC_END(); \
320 break; \
321 \
322 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
323 } \
324 } \
325 else \
326 { \
327 /* \
328 * We're accessing memory. \
329 * Note! We're putting the eflags on the stack here so we can commit them \
330 * after the memory. \
331 */ \
332 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
333 { \
334 switch (pVCpu->iem.s.enmEffOpSize) \
335 { \
336 case IEMMODE_16BIT: \
337 IEM_MC_BEGIN(3, 3, 0, 0); \
338 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
339 IEM_MC_ARG(uint16_t, u16Src, 1); \
340 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
341 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
342 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
343 \
344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
345 IEMOP_HLP_DONE_DECODING(); \
346 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
347 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
348 IEM_MC_FETCH_EFLAGS(EFlags); \
349 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
350 \
351 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
352 IEM_MC_COMMIT_EFLAGS(EFlags); \
353 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
354 IEM_MC_END(); \
355 break; \
356 \
357 case IEMMODE_32BIT: \
358 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
359 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
360 IEM_MC_ARG(uint32_t, u32Src, 1); \
361 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
362 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
363 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
364 \
365 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
366 IEMOP_HLP_DONE_DECODING(); \
367 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
368 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
369 IEM_MC_FETCH_EFLAGS(EFlags); \
370 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
371 \
372 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
373 IEM_MC_COMMIT_EFLAGS(EFlags); \
374 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
375 IEM_MC_END(); \
376 break; \
377 \
378 case IEMMODE_64BIT: \
379 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
380 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
381 IEM_MC_ARG(uint64_t, u64Src, 1); \
382 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
383 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
384 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
385 \
386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
387 IEMOP_HLP_DONE_DECODING(); \
388 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
389 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
390 IEM_MC_FETCH_EFLAGS(EFlags); \
391 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
392 \
393 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
394 IEM_MC_COMMIT_EFLAGS(EFlags); \
395 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
396 IEM_MC_END(); \
397 break; \
398 \
399 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
400 } \
401 } \
402 else \
403 { \
404 (void)0
405/* Separate macro to work around parsing issue in IEMAllInstPython.py */
406#define IEMOP_BODY_BINARY_rm_rv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
407 switch (pVCpu->iem.s.enmEffOpSize) \
408 { \
409 case IEMMODE_16BIT: \
410 IEM_MC_BEGIN(3, 3, 0, 0); \
411 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
412 IEM_MC_ARG(uint16_t, u16Src, 1); \
413 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
415 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
416 \
417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
418 IEMOP_HLP_DONE_DECODING(); \
419 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
420 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
421 IEM_MC_FETCH_EFLAGS(EFlags); \
422 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
423 \
424 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
425 IEM_MC_COMMIT_EFLAGS(EFlags); \
426 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
427 IEM_MC_END(); \
428 break; \
429 \
430 case IEMMODE_32BIT: \
431 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
432 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
433 IEM_MC_ARG(uint32_t, u32Src, 1); \
434 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
436 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
437 \
438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
439 IEMOP_HLP_DONE_DECODING(); \
440 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
441 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
442 IEM_MC_FETCH_EFLAGS(EFlags); \
443 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
444 \
445 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo /* CMP,TEST */); \
446 IEM_MC_COMMIT_EFLAGS(EFlags); \
447 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
448 IEM_MC_END(); \
449 break; \
450 \
451 case IEMMODE_64BIT: \
452 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
453 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
454 IEM_MC_ARG(uint64_t, u64Src, 1); \
455 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
456 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
457 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
458 \
459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
460 IEMOP_HLP_DONE_DECODING(); \
461 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
462 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
463 IEM_MC_FETCH_EFLAGS(EFlags); \
464 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
465 \
466 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
467 IEM_MC_COMMIT_EFLAGS(EFlags); \
468 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
469 IEM_MC_END(); \
470 break; \
471 \
472 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
473 } \
474 } \
475 } \
476 (void)0
477
478/**
479 * Body for read-only word/dword/qword instructions like TEST and CMP with
480 * memory/register as the destination.
481 */
482#define IEMOP_BODY_BINARY_rm_rv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
483 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
484 \
485 /* \
486 * If rm is denoting a register, no more instruction bytes. \
487 */ \
488 if (IEM_IS_MODRM_REG_MODE(bRm)) \
489 { \
490 switch (pVCpu->iem.s.enmEffOpSize) \
491 { \
492 case IEMMODE_16BIT: \
493 IEM_MC_BEGIN(3, 0, 0, 0); \
494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
495 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
496 IEM_MC_ARG(uint16_t, u16Src, 1); \
497 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
498 \
499 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
500 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
501 IEM_MC_REF_EFLAGS(pEFlags); \
502 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
503 \
504 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
505 IEM_MC_END(); \
506 break; \
507 \
508 case IEMMODE_32BIT: \
509 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
511 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
512 IEM_MC_ARG(uint32_t, u32Src, 1); \
513 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
514 \
515 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
516 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
517 IEM_MC_REF_EFLAGS(pEFlags); \
518 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
519 \
520 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
521 IEM_MC_END(); \
522 break; \
523 \
524 case IEMMODE_64BIT: \
525 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
527 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
528 IEM_MC_ARG(uint64_t, u64Src, 1); \
529 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
530 \
531 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
532 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
533 IEM_MC_REF_EFLAGS(pEFlags); \
534 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
535 \
536 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
537 IEM_MC_END(); \
538 break; \
539 \
540 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
541 } \
542 } \
543 else \
544 { \
545 /* \
546 * We're accessing memory. \
547 * Note! We're putting the eflags on the stack here so we can commit them \
548 * after the memory. \
549 */ \
550 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
551 { \
552 switch (pVCpu->iem.s.enmEffOpSize) \
553 { \
554 case IEMMODE_16BIT: \
555 IEM_MC_BEGIN(3, 3, 0, 0); \
556 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
557 IEM_MC_ARG(uint16_t, u16Src, 1); \
558 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
560 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
561 \
562 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
563 IEMOP_HLP_DONE_DECODING(); \
564 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
565 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
566 IEM_MC_FETCH_EFLAGS(EFlags); \
567 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
568 \
569 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
570 IEM_MC_COMMIT_EFLAGS(EFlags); \
571 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
572 IEM_MC_END(); \
573 break; \
574 \
575 case IEMMODE_32BIT: \
576 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
577 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
578 IEM_MC_ARG(uint32_t, u32Src, 1); \
579 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
580 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
581 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
582 \
583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
584 IEMOP_HLP_DONE_DECODING(); \
585 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
586 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
587 IEM_MC_FETCH_EFLAGS(EFlags); \
588 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
589 \
590 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
591 IEM_MC_COMMIT_EFLAGS(EFlags); \
592 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
593 IEM_MC_END(); \
594 break; \
595 \
596 case IEMMODE_64BIT: \
597 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
598 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
599 IEM_MC_ARG(uint64_t, u64Src, 1); \
600 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
602 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
603 \
604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
605 IEMOP_HLP_DONE_DECODING(); \
606 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
607 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
608 IEM_MC_FETCH_EFLAGS(EFlags); \
609 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
610 \
611 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
612 IEM_MC_COMMIT_EFLAGS(EFlags); \
613 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
614 IEM_MC_END(); \
615 break; \
616 \
617 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
618 } \
619 } \
620 else \
621 { \
622 IEMOP_HLP_DONE_DECODING(); \
623 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
624 } \
625 } \
626 (void)0
627
628
629/**
630 * Body for instructions like ADD, AND, OR, ++ with working on AL with
631 * a byte immediate.
632 */
633#define IEMOP_BODY_BINARY_AL_Ib(a_fnNormalU8) \
634 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
635 \
636 IEM_MC_BEGIN(3, 0, 0, 0); \
637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
638 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
639 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1); \
640 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
641 \
642 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX); \
643 IEM_MC_REF_EFLAGS(pEFlags); \
644 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
645 \
646 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
647 IEM_MC_END()
648
649/**
650 * Body for instructions like ADD, AND, OR, ++ with working on
651 * AX/EAX/RAX with a word/dword immediate.
652 */
653#define IEMOP_BODY_BINARY_rAX_Iz(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fModifiesDstReg) \
654 switch (pVCpu->iem.s.enmEffOpSize) \
655 { \
656 case IEMMODE_16BIT: \
657 { \
658 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
659 \
660 IEM_MC_BEGIN(3, 0, 0, 0); \
661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
662 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
663 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1); \
664 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
665 \
666 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX); \
667 IEM_MC_REF_EFLAGS(pEFlags); \
668 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
669 \
670 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
671 IEM_MC_END(); \
672 } \
673 \
674 case IEMMODE_32BIT: \
675 { \
676 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
677 \
678 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
680 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
681 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1); \
682 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
683 \
684 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX); \
685 IEM_MC_REF_EFLAGS(pEFlags); \
686 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
687 \
688 if (a_fModifiesDstReg) \
689 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
690 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
691 IEM_MC_END(); \
692 } \
693 \
694 case IEMMODE_64BIT: \
695 { \
696 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
697 \
698 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
700 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
701 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1); \
702 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
703 \
704 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX); \
705 IEM_MC_REF_EFLAGS(pEFlags); \
706 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
707 \
708 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
709 IEM_MC_END(); \
710 } \
711 \
712 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
713 } \
714 (void)0
715
716
717
718/* Instruction specification format - work in progress: */
719
720/**
721 * @opcode 0x00
722 * @opmnemonic add
723 * @op1 rm:Eb
724 * @op2 reg:Gb
725 * @opmaps one
726 * @openc ModR/M
727 * @opflmodify cf,pf,af,zf,sf,of
728 * @ophints harmless ignores_op_sizes
729 * @opstats add_Eb_Gb
730 * @opgroup og_gen_arith_bin
731 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
732 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
733 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
734 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
735 */
736FNIEMOP_DEF(iemOp_add_Eb_Gb)
737{
738 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
739 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_add_u8);
740 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_add_u8_locked);
741}
742
743
744/**
745 * @opcode 0x01
746 * @opgroup og_gen_arith_bin
747 * @opflmodify cf,pf,af,zf,sf,of
748 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
749 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
750 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
751 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
752 */
753FNIEMOP_DEF(iemOp_add_Ev_Gv)
754{
755 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
756 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
757 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
758}
759
760
761/**
762 * @opcode 0x02
763 * @opgroup og_gen_arith_bin
764 * @opflmodify cf,pf,af,zf,sf,of
765 * @opcopytests iemOp_add_Eb_Gb
766 */
767FNIEMOP_DEF(iemOp_add_Gb_Eb)
768{
769 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
770 IEMOP_BODY_BINARY_r8_rm(iemAImpl_add_u8);
771}
772
773
774/**
775 * @opcode 0x03
776 * @opgroup og_gen_arith_bin
777 * @opflmodify cf,pf,af,zf,sf,of
778 * @opcopytests iemOp_add_Ev_Gv
779 */
780FNIEMOP_DEF(iemOp_add_Gv_Ev)
781{
782 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
783 IEMOP_BODY_BINARY_rv_rm(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1, 0);
784}
785
786
787/**
788 * @opcode 0x04
789 * @opgroup og_gen_arith_bin
790 * @opflmodify cf,pf,af,zf,sf,of
791 * @opcopytests iemOp_add_Eb_Gb
792 */
793FNIEMOP_DEF(iemOp_add_Al_Ib)
794{
795 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
796 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_add_u8);
797}
798
799
800/**
801 * @opcode 0x05
802 * @opgroup og_gen_arith_bin
803 * @opflmodify cf,pf,af,zf,sf,of
804 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
805 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
806 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
807 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
808 */
809FNIEMOP_DEF(iemOp_add_eAX_Iz)
810{
811 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
812 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
813}
814
815
816/**
817 * @opcode 0x06
818 * @opgroup og_stack_sreg
819 */
820FNIEMOP_DEF(iemOp_push_ES)
821{
822 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
823 IEMOP_HLP_NO_64BIT();
824 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
825}
826
827
828/**
829 * @opcode 0x07
830 * @opgroup og_stack_sreg
831 */
832FNIEMOP_DEF(iemOp_pop_ES)
833{
834 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
835 IEMOP_HLP_NO_64BIT();
836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
837 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE,
838 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
839 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
840 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
841 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES),
842 iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
843}
844
845
846/**
847 * @opcode 0x08
848 * @opgroup og_gen_arith_bin
849 * @opflmodify cf,pf,af,zf,sf,of
850 * @opflundef af
851 * @opflclear of,cf
852 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
853 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
854 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
855 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
856 */
857FNIEMOP_DEF(iemOp_or_Eb_Gb)
858{
859 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
860 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
861 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_or_u8);
862 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_or_u8_locked);
863}
864
865
866/*
867 * @opcode 0x09
868 * @opgroup og_gen_arith_bin
869 * @opflmodify cf,pf,af,zf,sf,of
870 * @opflundef af
871 * @opflclear of,cf
872 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
873 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
874 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
875 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
876 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
877 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
878 */
879FNIEMOP_DEF(iemOp_or_Ev_Gv)
880{
881 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
882 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
883 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
884 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
885}
886
887
888/**
889 * @opcode 0x0a
890 * @opgroup og_gen_arith_bin
891 * @opflmodify cf,pf,af,zf,sf,of
892 * @opflundef af
893 * @opflclear of,cf
894 * @opcopytests iemOp_or_Eb_Gb
895 */
896FNIEMOP_DEF(iemOp_or_Gb_Eb)
897{
898 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
899 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
900 IEMOP_BODY_BINARY_r8_rm(iemAImpl_or_u8);
901}
902
903
904/**
905 * @opcode 0x0b
906 * @opgroup og_gen_arith_bin
907 * @opflmodify cf,pf,af,zf,sf,of
908 * @opflundef af
909 * @opflclear of,cf
910 * @opcopytests iemOp_or_Ev_Gv
911 */
912FNIEMOP_DEF(iemOp_or_Gv_Ev)
913{
914 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
915 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
916 IEMOP_BODY_BINARY_rv_rm(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1, 0);
917}
918
919
920/**
921 * @opcode 0x0c
922 * @opgroup og_gen_arith_bin
923 * @opflmodify cf,pf,af,zf,sf,of
924 * @opflundef af
925 * @opflclear of,cf
926 * @opcopytests iemOp_or_Eb_Gb
927 */
928FNIEMOP_DEF(iemOp_or_Al_Ib)
929{
930 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
931 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
932 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_or_u8);
933}
934
935
936/**
937 * @opcode 0x0d
938 * @opgroup og_gen_arith_bin
939 * @opflmodify cf,pf,af,zf,sf,of
940 * @opflundef af
941 * @opflclear of,cf
942 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
943 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
944 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
945 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
946 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
947 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
948 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
949 */
950FNIEMOP_DEF(iemOp_or_eAX_Iz)
951{
952 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
953 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
954 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
955}
956
957
958/**
959 * @opcode 0x0e
960 * @opgroup og_stack_sreg
961 */
962FNIEMOP_DEF(iemOp_push_CS)
963{
964 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, 0);
965 IEMOP_HLP_NO_64BIT();
966 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
967}
968
969
970/**
971 * @opcode 0x0f
972 * @opmnemonic EscTwo0f
973 * @openc two0f
974 * @opdisenum OP_2B_ESC
975 * @ophints harmless
976 * @opgroup og_escapes
977 */
978FNIEMOP_DEF(iemOp_2byteEscape)
979{
980#if 0 /// @todo def VBOX_STRICT
981 /* Sanity check the table the first time around. */
982 static bool s_fTested = false;
983 if (RT_LIKELY(s_fTested)) { /* likely */ }
984 else
985 {
986 s_fTested = true;
987 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
988 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
989 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
990 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
991 }
992#endif
993
994 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
995 {
996 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
997 IEMOP_HLP_MIN_286();
998 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
999 }
1000 /* @opdone */
1001
1002 /*
1003 * On the 8086 this is a POP CS instruction.
1004 * For the time being we don't specify this this.
1005 */
1006 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, IEMOPHINT_SKIP_PYTHON);
1007 IEMOP_HLP_NO_64BIT();
1008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1009 /** @todo eliminate END_TB here */
1010 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_END_TB,
1011 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1012 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_CS),
1013 iemCImpl_pop_Sreg, X86_SREG_CS, pVCpu->iem.s.enmEffOpSize);
1014}
1015
1016/**
1017 * @opcode 0x10
1018 * @opgroup og_gen_arith_bin
1019 * @opfltest cf
1020 * @opflmodify cf,pf,af,zf,sf,of
1021 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1022 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1023 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1024 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1025 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1026 */
1027FNIEMOP_DEF(iemOp_adc_Eb_Gb)
1028{
1029 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1030 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_adc_u8);
1031 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_adc_u8_locked);
1032}
1033
1034
1035/**
1036 * @opcode 0x11
1037 * @opgroup og_gen_arith_bin
1038 * @opfltest cf
1039 * @opflmodify cf,pf,af,zf,sf,of
1040 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1041 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1042 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1043 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1044 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1045 */
1046FNIEMOP_DEF(iemOp_adc_Ev_Gv)
1047{
1048 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1049 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
1050 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
1051}
1052
1053
1054/**
1055 * @opcode 0x12
1056 * @opgroup og_gen_arith_bin
1057 * @opfltest cf
1058 * @opflmodify cf,pf,af,zf,sf,of
1059 * @opcopytests iemOp_adc_Eb_Gb
1060 */
1061FNIEMOP_DEF(iemOp_adc_Gb_Eb)
1062{
1063 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1064 IEMOP_BODY_BINARY_r8_rm(iemAImpl_adc_u8);
1065}
1066
1067
1068/**
1069 * @opcode 0x13
1070 * @opgroup og_gen_arith_bin
1071 * @opfltest cf
1072 * @opflmodify cf,pf,af,zf,sf,of
1073 * @opcopytests iemOp_adc_Ev_Gv
1074 */
1075FNIEMOP_DEF(iemOp_adc_Gv_Ev)
1076{
1077 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1078 IEMOP_BODY_BINARY_rv_rm(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1, 0);
1079}
1080
1081
1082/**
1083 * @opcode 0x14
1084 * @opgroup og_gen_arith_bin
1085 * @opfltest cf
1086 * @opflmodify cf,pf,af,zf,sf,of
1087 * @opcopytests iemOp_adc_Eb_Gb
1088 */
1089FNIEMOP_DEF(iemOp_adc_Al_Ib)
1090{
1091 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1092 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_adc_u8);
1093}
1094
1095
1096/**
1097 * @opcode 0x15
1098 * @opgroup og_gen_arith_bin
1099 * @opfltest cf
1100 * @opflmodify cf,pf,af,zf,sf,of
1101 * @opcopytests iemOp_adc_Ev_Gv
1102 */
1103FNIEMOP_DEF(iemOp_adc_eAX_Iz)
1104{
1105 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1106 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
1107}
1108
1109
1110/**
1111 * @opcode 0x16
1112 */
1113FNIEMOP_DEF(iemOp_push_SS)
1114{
1115 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1116 IEMOP_HLP_NO_64BIT();
1117 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
1118}
1119
1120
1121/**
1122 * @opcode 0x17
1123 * @opgroup og_gen_arith_bin
1124 * @opfltest cf
1125 * @opflmodify cf,pf,af,zf,sf,of
1126 */
1127FNIEMOP_DEF(iemOp_pop_SS)
1128{
1129 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
1130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1131 IEMOP_HLP_NO_64BIT();
1132 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_INHIBIT_SHADOW,
1133 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1134 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_SS)
1135 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_SS)
1136 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_SS),
1137 iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
1138}
1139
1140
1141/**
1142 * @opcode 0x18
1143 * @opgroup og_gen_arith_bin
1144 * @opfltest cf
1145 * @opflmodify cf,pf,af,zf,sf,of
1146 */
1147FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
1148{
1149 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1150 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_sbb_u8);
1151 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sbb_u8_locked);
1152}
1153
1154
1155/**
1156 * @opcode 0x19
1157 * @opgroup og_gen_arith_bin
1158 * @opfltest cf
1159 * @opflmodify cf,pf,af,zf,sf,of
1160 */
1161FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
1162{
1163 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1164 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
1165 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
1166}
1167
1168
1169/**
1170 * @opcode 0x1a
1171 * @opgroup og_gen_arith_bin
1172 * @opfltest cf
1173 * @opflmodify cf,pf,af,zf,sf,of
1174 */
1175FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
1176{
1177 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1178 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sbb_u8);
1179}
1180
1181
1182/**
1183 * @opcode 0x1b
1184 * @opgroup og_gen_arith_bin
1185 * @opfltest cf
1186 * @opflmodify cf,pf,af,zf,sf,of
1187 */
1188FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
1189{
1190 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1191 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1, 0);
1192}
1193
1194
1195/**
1196 * @opcode 0x1c
1197 * @opgroup og_gen_arith_bin
1198 * @opfltest cf
1199 * @opflmodify cf,pf,af,zf,sf,of
1200 */
1201FNIEMOP_DEF(iemOp_sbb_Al_Ib)
1202{
1203 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1204 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sbb_u8);
1205}
1206
1207
1208/**
1209 * @opcode 0x1d
1210 * @opgroup og_gen_arith_bin
1211 * @opfltest cf
1212 * @opflmodify cf,pf,af,zf,sf,of
1213 */
1214FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
1215{
1216 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1217 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
1218}
1219
1220
1221/**
1222 * @opcode 0x1e
1223 * @opgroup og_stack_sreg
1224 */
1225FNIEMOP_DEF(iemOp_push_DS)
1226{
1227 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1228 IEMOP_HLP_NO_64BIT();
1229 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
1230}
1231
1232
1233/**
1234 * @opcode 0x1f
1235 * @opgroup og_stack_sreg
1236 */
1237FNIEMOP_DEF(iemOp_pop_DS)
1238{
1239 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1241 IEMOP_HLP_NO_64BIT();
1242 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE,
1243 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1244 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
1245 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
1246 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS),
1247 iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
1248}
1249
1250
1251/**
1252 * @opcode 0x20
1253 * @opgroup og_gen_arith_bin
1254 * @opflmodify cf,pf,af,zf,sf,of
1255 * @opflundef af
1256 * @opflclear of,cf
1257 */
1258FNIEMOP_DEF(iemOp_and_Eb_Gb)
1259{
1260 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1261 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1262 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_and_u8);
1263 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_and_u8_locked);
1264}
1265
1266
1267/**
1268 * @opcode 0x21
1269 * @opgroup og_gen_arith_bin
1270 * @opflmodify cf,pf,af,zf,sf,of
1271 * @opflundef af
1272 * @opflclear of,cf
1273 */
1274FNIEMOP_DEF(iemOp_and_Ev_Gv)
1275{
1276 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1277 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1278 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
1279 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
1280}
1281
1282
1283/**
1284 * @opcode 0x22
1285 * @opgroup og_gen_arith_bin
1286 * @opflmodify cf,pf,af,zf,sf,of
1287 * @opflundef af
1288 * @opflclear of,cf
1289 */
1290FNIEMOP_DEF(iemOp_and_Gb_Eb)
1291{
1292 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1293 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1294 IEMOP_BODY_BINARY_r8_rm(iemAImpl_and_u8);
1295}
1296
1297
1298/**
1299 * @opcode 0x23
1300 * @opgroup og_gen_arith_bin
1301 * @opflmodify cf,pf,af,zf,sf,of
1302 * @opflundef af
1303 * @opflclear of,cf
1304 */
1305FNIEMOP_DEF(iemOp_and_Gv_Ev)
1306{
1307 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1308 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1309 IEMOP_BODY_BINARY_rv_rm(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1, 0);
1310}
1311
1312
1313/**
1314 * @opcode 0x24
1315 * @opgroup og_gen_arith_bin
1316 * @opflmodify cf,pf,af,zf,sf,of
1317 * @opflundef af
1318 * @opflclear of,cf
1319 */
1320FNIEMOP_DEF(iemOp_and_Al_Ib)
1321{
1322 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
1323 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1324 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_and_u8);
1325}
1326
1327
1328/**
1329 * @opcode 0x25
1330 * @opgroup og_gen_arith_bin
1331 * @opflmodify cf,pf,af,zf,sf,of
1332 * @opflundef af
1333 * @opflclear of,cf
1334 */
1335FNIEMOP_DEF(iemOp_and_eAX_Iz)
1336{
1337 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1338 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1339 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1340}
1341
1342
1343/**
1344 * @opcode 0x26
1345 * @opmnemonic SEG
1346 * @op1 ES
1347 * @opgroup og_prefix
1348 * @openc prefix
1349 * @opdisenum OP_SEG
1350 * @ophints harmless
1351 */
1352FNIEMOP_DEF(iemOp_seg_ES)
1353{
1354 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
1355 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
1356 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
1357
1358 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1359 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1360}
1361
1362
1363/**
1364 * @opcode 0x27
1365 * @opfltest af,cf
1366 * @opflmodify cf,pf,af,zf,sf,of
1367 * @opflundef of
1368 */
1369FNIEMOP_DEF(iemOp_daa)
1370{
1371 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1372 IEMOP_HLP_NO_64BIT();
1373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1374 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1375 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_daa);
1376}
1377
1378
1379/**
1380 * @opcode 0x28
1381 * @opgroup og_gen_arith_bin
1382 * @opflmodify cf,pf,af,zf,sf,of
1383 */
1384FNIEMOP_DEF(iemOp_sub_Eb_Gb)
1385{
1386 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1387 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_sub_u8);
1388 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sub_u8_locked);
1389}
1390
1391
1392/**
1393 * @opcode 0x29
1394 * @opgroup og_gen_arith_bin
1395 * @opflmodify cf,pf,af,zf,sf,of
1396 */
1397FNIEMOP_DEF(iemOp_sub_Ev_Gv)
1398{
1399 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1400 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
1401 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
1402}
1403
1404
1405/**
1406 * @opcode 0x2a
1407 * @opgroup og_gen_arith_bin
1408 * @opflmodify cf,pf,af,zf,sf,of
1409 */
1410FNIEMOP_DEF(iemOp_sub_Gb_Eb)
1411{
1412 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1413 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sub_u8);
1414}
1415
1416
1417/**
1418 * @opcode 0x2b
1419 * @opgroup og_gen_arith_bin
1420 * @opflmodify cf,pf,af,zf,sf,of
1421 */
1422FNIEMOP_DEF(iemOp_sub_Gv_Ev)
1423{
1424 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1425 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1, 0);
1426}
1427
1428
1429/**
1430 * @opcode 0x2c
1431 * @opgroup og_gen_arith_bin
1432 * @opflmodify cf,pf,af,zf,sf,of
1433 */
1434FNIEMOP_DEF(iemOp_sub_Al_Ib)
1435{
1436 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1437 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sub_u8);
1438}
1439
1440
1441/**
1442 * @opcode 0x2d
1443 * @opgroup og_gen_arith_bin
1444 * @opflmodify cf,pf,af,zf,sf,of
1445 */
1446FNIEMOP_DEF(iemOp_sub_eAX_Iz)
1447{
1448 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1449 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1450}
1451
1452
1453/**
1454 * @opcode 0x2e
1455 * @opmnemonic SEG
1456 * @op1 CS
1457 * @opgroup og_prefix
1458 * @openc prefix
1459 * @opdisenum OP_SEG
1460 * @ophints harmless
1461 */
1462FNIEMOP_DEF(iemOp_seg_CS)
1463{
1464 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
1465 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
1466 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
1467
1468 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1469 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1470}
1471
1472
1473/**
1474 * @opcode 0x2f
1475 * @opfltest af,cf
1476 * @opflmodify cf,pf,af,zf,sf,of
1477 * @opflundef of
1478 */
1479FNIEMOP_DEF(iemOp_das)
1480{
1481 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1482 IEMOP_HLP_NO_64BIT();
1483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1484 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1485 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_das);
1486}
1487
1488
1489/**
1490 * @opcode 0x30
1491 * @opgroup og_gen_arith_bin
1492 * @opflmodify cf,pf,af,zf,sf,of
1493 * @opflundef af
1494 * @opflclear of,cf
1495 */
1496FNIEMOP_DEF(iemOp_xor_Eb_Gb)
1497{
1498 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1499 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1500 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_xor_u8);
1501 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_xor_u8_locked);
1502}
1503
1504
1505/**
1506 * @opcode 0x31
1507 * @opgroup og_gen_arith_bin
1508 * @opflmodify cf,pf,af,zf,sf,of
1509 * @opflundef af
1510 * @opflclear of,cf
1511 */
1512FNIEMOP_DEF(iemOp_xor_Ev_Gv)
1513{
1514 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1515 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1516 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
1517 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
1518}
1519
1520
1521/**
1522 * @opcode 0x32
1523 * @opgroup og_gen_arith_bin
1524 * @opflmodify cf,pf,af,zf,sf,of
1525 * @opflundef af
1526 * @opflclear of,cf
1527 */
1528FNIEMOP_DEF(iemOp_xor_Gb_Eb)
1529{
1530 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1531 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1532 IEMOP_BODY_BINARY_r8_rm(iemAImpl_xor_u8);
1533}
1534
1535
1536/**
1537 * @opcode 0x33
1538 * @opgroup og_gen_arith_bin
1539 * @opflmodify cf,pf,af,zf,sf,of
1540 * @opflundef af
1541 * @opflclear of,cf
1542 */
1543FNIEMOP_DEF(iemOp_xor_Gv_Ev)
1544{
1545 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1546 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1547 IEMOP_BODY_BINARY_rv_rm(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1, 0);
1548}
1549
1550
1551/**
1552 * @opcode 0x34
1553 * @opgroup og_gen_arith_bin
1554 * @opflmodify cf,pf,af,zf,sf,of
1555 * @opflundef af
1556 * @opflclear of,cf
1557 */
1558FNIEMOP_DEF(iemOp_xor_Al_Ib)
1559{
1560 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1561 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1562 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_xor_u8);
1563}
1564
1565
1566/**
1567 * @opcode 0x35
1568 * @opgroup og_gen_arith_bin
1569 * @opflmodify cf,pf,af,zf,sf,of
1570 * @opflundef af
1571 * @opflclear of,cf
1572 */
1573FNIEMOP_DEF(iemOp_xor_eAX_Iz)
1574{
1575 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1576 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1577 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1578}
1579
1580
1581/**
1582 * @opcode 0x36
1583 * @opmnemonic SEG
1584 * @op1 SS
1585 * @opgroup og_prefix
1586 * @openc prefix
1587 * @opdisenum OP_SEG
1588 * @ophints harmless
1589 */
1590FNIEMOP_DEF(iemOp_seg_SS)
1591{
1592 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
1593 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
1594 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
1595
1596 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1597 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1598}
1599
1600
1601/**
1602 * @opcode 0x37
1603 * @opfltest af,cf
1604 * @opflmodify cf,pf,af,zf,sf,of
1605 * @opflundef pf,zf,sf,of
1606 * @opgroup og_gen_arith_dec
1607 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
1608 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
1609 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1610 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1611 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1612 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
1613 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
1614 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
1615 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
1616 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
1617 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
1618 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
1619 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
1620 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
1621 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
1622 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1623 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1624 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1625 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1626 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
1627 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
1628 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
1629 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
1630 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
1631 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
1632 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1633 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
1634 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
1635 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
1636 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
1637 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
1638 */
1639FNIEMOP_DEF(iemOp_aaa)
1640{
1641 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1642 IEMOP_HLP_NO_64BIT();
1643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1644 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1645
1646 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aaa);
1647}
1648
1649
1650/**
1651 * @opcode 0x38
1652 */
1653FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
1654{
1655 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
1656 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_cmp_u8);
1657 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
1658}
1659
1660
1661/**
1662 * @opcode 0x39
1663 */
1664FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
1665{
1666 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
1667 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
1668}
1669
1670
1671/**
1672 * @opcode 0x3a
1673 */
1674FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
1675{
1676 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
1677 IEMOP_BODY_BINARY_r8_rm(iemAImpl_cmp_u8);
1678}
1679
1680
1681/**
1682 * @opcode 0x3b
1683 */
1684FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
1685{
1686 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
1687 IEMOP_BODY_BINARY_rv_rm(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0, 0);
1688}
1689
1690
1691/**
1692 * @opcode 0x3c
1693 */
1694FNIEMOP_DEF(iemOp_cmp_Al_Ib)
1695{
1696 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
1697 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_cmp_u8);
1698}
1699
1700
1701/**
1702 * @opcode 0x3d
1703 */
1704FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1705{
1706 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1707 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1708}
1709
1710
1711/**
1712 * @opcode 0x3e
1713 */
1714FNIEMOP_DEF(iemOp_seg_DS)
1715{
1716 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1717 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1718 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1719
1720 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1721 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1722}
1723
1724
1725/**
1726 * @opcode 0x3f
1727 * @opfltest af,cf
1728 * @opflmodify cf,pf,af,zf,sf,of
1729 * @opflundef pf,zf,sf,of
1730 * @opgroup og_gen_arith_dec
1731 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1732 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1733 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1734 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1735 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1736 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1737 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1738 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1739 * @optest amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1740 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1741 * @optest amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1742 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1743 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1744 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1745 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1746 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1747 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1748 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1749 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1750 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1751 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
1752 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1753 * @optest amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
1754 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1755 * @optest amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
1756 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1757 * @optest amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
1758 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1759 * @optest amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
1760 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1761 * @optest amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
1762 * @optest intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1763 * @optest amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1764 * @optest intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1765 * @optest amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1766 */
1767FNIEMOP_DEF(iemOp_aas)
1768{
1769 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1770 IEMOP_HLP_NO_64BIT();
1771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1772 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1773
1774 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aas);
1775}
1776
1777
1778/**
1779 * Common 'inc/dec register' helper.
1780 *
1781 * Not for 64-bit code, only for what became the rex prefixes.
1782 */
1783#define IEMOP_BODY_UNARY_GReg(a_fnNormalU16, a_fnNormalU32, a_iReg) \
1784 switch (pVCpu->iem.s.enmEffOpSize) \
1785 { \
1786 case IEMMODE_16BIT: \
1787 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_64BIT, 0); \
1788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1789 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
1790 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1791 IEM_MC_REF_GREG_U16(pu16Dst, a_iReg); \
1792 IEM_MC_REF_EFLAGS(pEFlags); \
1793 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
1794 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1795 IEM_MC_END(); \
1796 break; \
1797 \
1798 case IEMMODE_32BIT: \
1799 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0); \
1800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1801 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
1802 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1803 IEM_MC_REF_GREG_U32(pu32Dst, a_iReg); \
1804 IEM_MC_REF_EFLAGS(pEFlags); \
1805 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
1806 IEM_MC_CLEAR_HIGH_GREG_U64(a_iReg); \
1807 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1808 IEM_MC_END(); \
1809 break; \
1810 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1811 } \
1812 (void)0
1813
1814/**
1815 * @opcode 0x40
1816 */
1817FNIEMOP_DEF(iemOp_inc_eAX)
1818{
1819 /*
1820 * This is a REX prefix in 64-bit mode.
1821 */
1822 if (IEM_IS_64BIT_CODE(pVCpu))
1823 {
1824 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1825 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1826
1827 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1828 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1829 }
1830
1831 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1832 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xAX);
1833}
1834
1835
1836/**
1837 * @opcode 0x41
1838 */
1839FNIEMOP_DEF(iemOp_inc_eCX)
1840{
1841 /*
1842 * This is a REX prefix in 64-bit mode.
1843 */
1844 if (IEM_IS_64BIT_CODE(pVCpu))
1845 {
1846 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1847 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1848 pVCpu->iem.s.uRexB = 1 << 3;
1849
1850 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1851 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1852 }
1853
1854 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1855 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xCX);
1856}
1857
1858
1859/**
1860 * @opcode 0x42
1861 */
1862FNIEMOP_DEF(iemOp_inc_eDX)
1863{
1864 /*
1865 * This is a REX prefix in 64-bit mode.
1866 */
1867 if (IEM_IS_64BIT_CODE(pVCpu))
1868 {
1869 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1870 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1871 pVCpu->iem.s.uRexIndex = 1 << 3;
1872
1873 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1874 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1875 }
1876
1877 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1878 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDX);
1879}
1880
1881
1882
1883/**
1884 * @opcode 0x43
1885 */
1886FNIEMOP_DEF(iemOp_inc_eBX)
1887{
1888 /*
1889 * This is a REX prefix in 64-bit mode.
1890 */
1891 if (IEM_IS_64BIT_CODE(pVCpu))
1892 {
1893 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1894 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1895 pVCpu->iem.s.uRexB = 1 << 3;
1896 pVCpu->iem.s.uRexIndex = 1 << 3;
1897
1898 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1899 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1900 }
1901
1902 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1903 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBX);
1904}
1905
1906
1907/**
1908 * @opcode 0x44
1909 */
1910FNIEMOP_DEF(iemOp_inc_eSP)
1911{
1912 /*
1913 * This is a REX prefix in 64-bit mode.
1914 */
1915 if (IEM_IS_64BIT_CODE(pVCpu))
1916 {
1917 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1918 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1919 pVCpu->iem.s.uRexReg = 1 << 3;
1920
1921 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1922 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1923 }
1924
1925 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1926 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSP);
1927}
1928
1929
1930/**
1931 * @opcode 0x45
1932 */
1933FNIEMOP_DEF(iemOp_inc_eBP)
1934{
1935 /*
1936 * This is a REX prefix in 64-bit mode.
1937 */
1938 if (IEM_IS_64BIT_CODE(pVCpu))
1939 {
1940 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1941 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1942 pVCpu->iem.s.uRexReg = 1 << 3;
1943 pVCpu->iem.s.uRexB = 1 << 3;
1944
1945 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1946 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1947 }
1948
1949 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1950 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBP);
1951}
1952
1953
1954/**
1955 * @opcode 0x46
1956 */
1957FNIEMOP_DEF(iemOp_inc_eSI)
1958{
1959 /*
1960 * This is a REX prefix in 64-bit mode.
1961 */
1962 if (IEM_IS_64BIT_CODE(pVCpu))
1963 {
1964 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1965 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1966 pVCpu->iem.s.uRexReg = 1 << 3;
1967 pVCpu->iem.s.uRexIndex = 1 << 3;
1968
1969 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1970 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1971 }
1972
1973 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1974 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSI);
1975}
1976
1977
1978/**
1979 * @opcode 0x47
1980 */
1981FNIEMOP_DEF(iemOp_inc_eDI)
1982{
1983 /*
1984 * This is a REX prefix in 64-bit mode.
1985 */
1986 if (IEM_IS_64BIT_CODE(pVCpu))
1987 {
1988 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1989 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1990 pVCpu->iem.s.uRexReg = 1 << 3;
1991 pVCpu->iem.s.uRexB = 1 << 3;
1992 pVCpu->iem.s.uRexIndex = 1 << 3;
1993
1994 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1995 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1996 }
1997
1998 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1999 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDI);
2000}
2001
2002
2003/**
2004 * @opcode 0x48
2005 */
2006FNIEMOP_DEF(iemOp_dec_eAX)
2007{
2008 /*
2009 * This is a REX prefix in 64-bit mode.
2010 */
2011 if (IEM_IS_64BIT_CODE(pVCpu))
2012 {
2013 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
2014 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
2015 iemRecalEffOpSize(pVCpu);
2016
2017 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2018 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2019 }
2020
2021 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
2022 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xAX);
2023}
2024
2025
2026/**
2027 * @opcode 0x49
2028 */
2029FNIEMOP_DEF(iemOp_dec_eCX)
2030{
2031 /*
2032 * This is a REX prefix in 64-bit mode.
2033 */
2034 if (IEM_IS_64BIT_CODE(pVCpu))
2035 {
2036 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
2037 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2038 pVCpu->iem.s.uRexB = 1 << 3;
2039 iemRecalEffOpSize(pVCpu);
2040
2041 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2042 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2043 }
2044
2045 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
2046 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xCX);
2047}
2048
2049
2050/**
2051 * @opcode 0x4a
2052 */
2053FNIEMOP_DEF(iemOp_dec_eDX)
2054{
2055 /*
2056 * This is a REX prefix in 64-bit mode.
2057 */
2058 if (IEM_IS_64BIT_CODE(pVCpu))
2059 {
2060 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
2061 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2062 pVCpu->iem.s.uRexIndex = 1 << 3;
2063 iemRecalEffOpSize(pVCpu);
2064
2065 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2066 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2067 }
2068
2069 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
2070 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDX);
2071}
2072
2073
2074/**
2075 * @opcode 0x4b
2076 */
2077FNIEMOP_DEF(iemOp_dec_eBX)
2078{
2079 /*
2080 * This is a REX prefix in 64-bit mode.
2081 */
2082 if (IEM_IS_64BIT_CODE(pVCpu))
2083 {
2084 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
2085 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2086 pVCpu->iem.s.uRexB = 1 << 3;
2087 pVCpu->iem.s.uRexIndex = 1 << 3;
2088 iemRecalEffOpSize(pVCpu);
2089
2090 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2091 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2092 }
2093
2094 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
2095 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBX);
2096}
2097
2098
2099/**
2100 * @opcode 0x4c
2101 */
2102FNIEMOP_DEF(iemOp_dec_eSP)
2103{
2104 /*
2105 * This is a REX prefix in 64-bit mode.
2106 */
2107 if (IEM_IS_64BIT_CODE(pVCpu))
2108 {
2109 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
2110 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
2111 pVCpu->iem.s.uRexReg = 1 << 3;
2112 iemRecalEffOpSize(pVCpu);
2113
2114 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2115 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2116 }
2117
2118 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
2119 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSP);
2120}
2121
2122
2123/**
2124 * @opcode 0x4d
2125 */
2126FNIEMOP_DEF(iemOp_dec_eBP)
2127{
2128 /*
2129 * This is a REX prefix in 64-bit mode.
2130 */
2131 if (IEM_IS_64BIT_CODE(pVCpu))
2132 {
2133 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
2134 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2135 pVCpu->iem.s.uRexReg = 1 << 3;
2136 pVCpu->iem.s.uRexB = 1 << 3;
2137 iemRecalEffOpSize(pVCpu);
2138
2139 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2140 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2141 }
2142
2143 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
2144 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBP);
2145}
2146
2147
2148/**
2149 * @opcode 0x4e
2150 */
2151FNIEMOP_DEF(iemOp_dec_eSI)
2152{
2153 /*
2154 * This is a REX prefix in 64-bit mode.
2155 */
2156 if (IEM_IS_64BIT_CODE(pVCpu))
2157 {
2158 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
2159 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2160 pVCpu->iem.s.uRexReg = 1 << 3;
2161 pVCpu->iem.s.uRexIndex = 1 << 3;
2162 iemRecalEffOpSize(pVCpu);
2163
2164 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2165 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2166 }
2167
2168 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
2169 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSI);
2170}
2171
2172
2173/**
2174 * @opcode 0x4f
2175 */
2176FNIEMOP_DEF(iemOp_dec_eDI)
2177{
2178 /*
2179 * This is a REX prefix in 64-bit mode.
2180 */
2181 if (IEM_IS_64BIT_CODE(pVCpu))
2182 {
2183 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
2184 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2185 pVCpu->iem.s.uRexReg = 1 << 3;
2186 pVCpu->iem.s.uRexB = 1 << 3;
2187 pVCpu->iem.s.uRexIndex = 1 << 3;
2188 iemRecalEffOpSize(pVCpu);
2189
2190 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2191 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2192 }
2193
2194 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
2195 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDI);
2196}
2197
2198
2199/**
2200 * Common 'push register' helper.
2201 */
2202FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
2203{
2204 if (IEM_IS_64BIT_CODE(pVCpu))
2205 {
2206 iReg |= pVCpu->iem.s.uRexB;
2207 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2208 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2209 }
2210
2211 switch (pVCpu->iem.s.enmEffOpSize)
2212 {
2213 case IEMMODE_16BIT:
2214 IEM_MC_BEGIN(0, 1, 0, 0);
2215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2216 IEM_MC_LOCAL(uint16_t, u16Value);
2217 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
2218 IEM_MC_PUSH_U16(u16Value);
2219 IEM_MC_ADVANCE_RIP_AND_FINISH();
2220 IEM_MC_END();
2221 break;
2222
2223 case IEMMODE_32BIT:
2224 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2226 IEM_MC_LOCAL(uint32_t, u32Value);
2227 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
2228 IEM_MC_PUSH_U32(u32Value);
2229 IEM_MC_ADVANCE_RIP_AND_FINISH();
2230 IEM_MC_END();
2231 break;
2232
2233 case IEMMODE_64BIT:
2234 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2236 IEM_MC_LOCAL(uint64_t, u64Value);
2237 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
2238 IEM_MC_PUSH_U64(u64Value);
2239 IEM_MC_ADVANCE_RIP_AND_FINISH();
2240 IEM_MC_END();
2241 break;
2242
2243 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2244 }
2245}
2246
2247
2248/**
2249 * @opcode 0x50
2250 */
2251FNIEMOP_DEF(iemOp_push_eAX)
2252{
2253 IEMOP_MNEMONIC(push_rAX, "push rAX");
2254 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
2255}
2256
2257
2258/**
2259 * @opcode 0x51
2260 */
2261FNIEMOP_DEF(iemOp_push_eCX)
2262{
2263 IEMOP_MNEMONIC(push_rCX, "push rCX");
2264 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
2265}
2266
2267
2268/**
2269 * @opcode 0x52
2270 */
2271FNIEMOP_DEF(iemOp_push_eDX)
2272{
2273 IEMOP_MNEMONIC(push_rDX, "push rDX");
2274 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
2275}
2276
2277
2278/**
2279 * @opcode 0x53
2280 */
2281FNIEMOP_DEF(iemOp_push_eBX)
2282{
2283 IEMOP_MNEMONIC(push_rBX, "push rBX");
2284 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
2285}
2286
2287
2288/**
2289 * @opcode 0x54
2290 */
2291FNIEMOP_DEF(iemOp_push_eSP)
2292{
2293 IEMOP_MNEMONIC(push_rSP, "push rSP");
2294 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
2295 {
2296 IEM_MC_BEGIN(0, 1, IEM_MC_F_ONLY_8086, 0);
2297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2298 IEM_MC_LOCAL(uint16_t, u16Value);
2299 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
2300 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
2301 IEM_MC_PUSH_U16(u16Value);
2302 IEM_MC_ADVANCE_RIP_AND_FINISH();
2303 IEM_MC_END();
2304 }
2305 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
2306}
2307
2308
2309/**
2310 * @opcode 0x55
2311 */
2312FNIEMOP_DEF(iemOp_push_eBP)
2313{
2314 IEMOP_MNEMONIC(push_rBP, "push rBP");
2315 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
2316}
2317
2318
2319/**
2320 * @opcode 0x56
2321 */
2322FNIEMOP_DEF(iemOp_push_eSI)
2323{
2324 IEMOP_MNEMONIC(push_rSI, "push rSI");
2325 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
2326}
2327
2328
2329/**
2330 * @opcode 0x57
2331 */
2332FNIEMOP_DEF(iemOp_push_eDI)
2333{
2334 IEMOP_MNEMONIC(push_rDI, "push rDI");
2335 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
2336}
2337
2338
2339/**
2340 * Common 'pop register' helper.
2341 */
2342FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
2343{
2344 if (IEM_IS_64BIT_CODE(pVCpu))
2345 {
2346 iReg |= pVCpu->iem.s.uRexB;
2347 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2348 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2349 }
2350
2351 switch (pVCpu->iem.s.enmEffOpSize)
2352 {
2353 case IEMMODE_16BIT:
2354 IEM_MC_BEGIN(0, 1, 0, 0);
2355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2356 IEM_MC_LOCAL(uint16_t *, pu16Dst);
2357 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
2358 IEM_MC_POP_U16(pu16Dst);
2359 IEM_MC_ADVANCE_RIP_AND_FINISH();
2360 IEM_MC_END();
2361 break;
2362
2363 case IEMMODE_32BIT:
2364 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2366 IEM_MC_LOCAL(uint32_t *, pu32Dst);
2367 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
2368 IEM_MC_POP_U32(pu32Dst);
2369 IEM_MC_CLEAR_HIGH_GREG_U64(iReg); /** @todo testcase*/
2370 IEM_MC_ADVANCE_RIP_AND_FINISH();
2371 IEM_MC_END();
2372 break;
2373
2374 case IEMMODE_64BIT:
2375 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2377 IEM_MC_LOCAL(uint64_t *, pu64Dst);
2378 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
2379 IEM_MC_POP_U64(pu64Dst);
2380 IEM_MC_ADVANCE_RIP_AND_FINISH();
2381 IEM_MC_END();
2382 break;
2383
2384 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2385 }
2386}
2387
2388
2389/**
2390 * @opcode 0x58
2391 */
2392FNIEMOP_DEF(iemOp_pop_eAX)
2393{
2394 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
2395 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
2396}
2397
2398
2399/**
2400 * @opcode 0x59
2401 */
2402FNIEMOP_DEF(iemOp_pop_eCX)
2403{
2404 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
2405 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
2406}
2407
2408
2409/**
2410 * @opcode 0x5a
2411 */
2412FNIEMOP_DEF(iemOp_pop_eDX)
2413{
2414 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
2415 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
2416}
2417
2418
2419/**
2420 * @opcode 0x5b
2421 */
2422FNIEMOP_DEF(iemOp_pop_eBX)
2423{
2424 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
2425 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
2426}
2427
2428
2429/**
2430 * @opcode 0x5c
2431 */
2432FNIEMOP_DEF(iemOp_pop_eSP)
2433{
2434 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
2435 if (IEM_IS_64BIT_CODE(pVCpu))
2436 {
2437 if (pVCpu->iem.s.uRexB)
2438 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
2439 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2440 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2441 }
2442
2443 /** @todo add testcase for this instruction. */
2444 switch (pVCpu->iem.s.enmEffOpSize)
2445 {
2446 case IEMMODE_16BIT:
2447 IEM_MC_BEGIN(0, 1, 0, 0);
2448 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2449 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2450 IEM_MC_LOCAL(uint16_t, u16Dst);
2451 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
2452 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
2453 IEM_MC_ADVANCE_RIP_AND_FINISH();
2454 IEM_MC_END();
2455 break;
2456
2457 case IEMMODE_32BIT:
2458 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
2459 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2460 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2461 IEM_MC_LOCAL(uint32_t, u32Dst);
2462 IEM_MC_POP_U32(&u32Dst);
2463 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
2464 IEM_MC_ADVANCE_RIP_AND_FINISH();
2465 IEM_MC_END();
2466 break;
2467
2468 case IEMMODE_64BIT:
2469 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2470 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2471 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2472 IEM_MC_LOCAL(uint64_t, u64Dst);
2473 IEM_MC_POP_U64(&u64Dst);
2474 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
2475 IEM_MC_ADVANCE_RIP_AND_FINISH();
2476 IEM_MC_END();
2477 break;
2478
2479 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2480 }
2481}
2482
2483
2484/**
2485 * @opcode 0x5d
2486 */
2487FNIEMOP_DEF(iemOp_pop_eBP)
2488{
2489 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
2490 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
2491}
2492
2493
2494/**
2495 * @opcode 0x5e
2496 */
2497FNIEMOP_DEF(iemOp_pop_eSI)
2498{
2499 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
2500 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
2501}
2502
2503
2504/**
2505 * @opcode 0x5f
2506 */
2507FNIEMOP_DEF(iemOp_pop_eDI)
2508{
2509 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
2510 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
2511}
2512
2513
2514/**
2515 * @opcode 0x60
2516 */
2517FNIEMOP_DEF(iemOp_pusha)
2518{
2519 IEMOP_MNEMONIC(pusha, "pusha");
2520 IEMOP_HLP_MIN_186();
2521 IEMOP_HLP_NO_64BIT();
2522 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2523 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pusha_16);
2524 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2525 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pusha_32);
2526}
2527
2528
2529/**
2530 * @opcode 0x61
2531 */
2532FNIEMOP_DEF(iemOp_popa__mvex)
2533{
2534 if (!IEM_IS_64BIT_CODE(pVCpu))
2535 {
2536 IEMOP_MNEMONIC(popa, "popa");
2537 IEMOP_HLP_MIN_186();
2538 IEMOP_HLP_NO_64BIT();
2539 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2540 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
2541 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
2542 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
2543 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
2544 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX)
2545 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
2546 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP)
2547 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
2548 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
2549 iemCImpl_popa_16);
2550 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2551 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
2552 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
2553 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
2554 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
2555 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX)
2556 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
2557 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP)
2558 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
2559 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
2560 iemCImpl_popa_32);
2561 }
2562 IEMOP_MNEMONIC(mvex, "mvex");
2563 Log(("mvex prefix is not supported!\n"));
2564 IEMOP_RAISE_INVALID_OPCODE_RET();
2565}
2566
2567
2568/**
2569 * @opcode 0x62
2570 * @opmnemonic bound
2571 * @op1 Gv_RO
2572 * @op2 Ma
2573 * @opmincpu 80186
2574 * @ophints harmless x86_invalid_64
2575 * @optest op1=0 op2=0 ->
2576 * @optest op1=1 op2=0 -> value.xcpt=5
2577 * @optest o16 / op1=0xffff op2=0x0000fffe ->
2578 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
2579 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
2580 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
2581 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
2582 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
2583 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
2584 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
2585 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
2586 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
2587 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
2588 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
2589 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
2590 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
2591 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
2592 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
2593 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
2594 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
2595 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
2596 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
2597 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
2598 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
2599 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
2600 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
2601 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
2602 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
2603 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
2604 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
2605 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
2606 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
2607 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
2608 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
2609 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
2610 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
2611 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
2612 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
2613 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
2614 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
2615 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
2616 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
2617 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
2618 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
2619 */
2620FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
2621{
2622 /* The BOUND instruction is invalid 64-bit mode. In legacy and
2623 compatability mode it is invalid with MOD=3.
2624
2625 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
2626 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
2627 given as R and X without an exact description, so we assume it builds on
2628 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
2629 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
2630 uint8_t bRm;
2631 if (!IEM_IS_64BIT_CODE(pVCpu))
2632 {
2633 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2634 IEMOP_HLP_MIN_186();
2635 IEM_OPCODE_GET_NEXT_U8(&bRm);
2636 if (IEM_IS_MODRM_MEM_MODE(bRm))
2637 {
2638 /** @todo testcase: check that there are two memory accesses involved. Check
2639 * whether they're both read before the \#BR triggers. */
2640 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2641 {
2642 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186 | IEM_MC_F_NOT_64BIT, 0);
2643 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2644 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
2645 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
2646 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2647
2648 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2650
2651 IEM_MC_FETCH_GREG_U16(u16Index, IEM_GET_MODRM_REG_8(bRm));
2652 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2653 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
2654
2655 IEM_MC_CALL_CIMPL_3(0, 0, iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
2656 IEM_MC_END();
2657 }
2658 else /* 32-bit operands */
2659 {
2660 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2661 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2662 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
2663 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
2664 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2665
2666 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2668
2669 IEM_MC_FETCH_GREG_U32(u32Index, IEM_GET_MODRM_REG_8(bRm));
2670 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2671 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
2672
2673 IEM_MC_CALL_CIMPL_3(0, 0, iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
2674 IEM_MC_END();
2675 }
2676 }
2677
2678 /*
2679 * @opdone
2680 */
2681 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2682 {
2683 /* Note that there is no need for the CPU to fetch further bytes
2684 here because MODRM.MOD == 3. */
2685 Log(("evex not supported by the guest CPU!\n"));
2686 IEMOP_RAISE_INVALID_OPCODE_RET();
2687 }
2688 }
2689 else
2690 {
2691 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
2692 * does modr/m read, whereas AMD probably doesn't... */
2693 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2694 {
2695 Log(("evex not supported by the guest CPU!\n"));
2696 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
2697 }
2698 IEM_OPCODE_GET_NEXT_U8(&bRm);
2699 }
2700
2701 IEMOP_MNEMONIC(evex, "evex");
2702 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
2703 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
2704 Log(("evex prefix is not implemented!\n"));
2705 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2706}
2707
2708
2709/** Opcode 0x63 - non-64-bit modes. */
2710FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
2711{
2712 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
2713 IEMOP_HLP_MIN_286();
2714 IEMOP_HLP_NO_REAL_OR_V86_MODE();
2715 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2716
2717 if (IEM_IS_MODRM_REG_MODE(bRm))
2718 {
2719 /* Register */
2720 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2721 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2722 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2723 IEM_MC_ARG(uint16_t, u16Src, 1);
2724 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2725
2726 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2727 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM_8(bRm));
2728 IEM_MC_REF_EFLAGS(pEFlags);
2729 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2730
2731 IEM_MC_ADVANCE_RIP_AND_FINISH();
2732 IEM_MC_END();
2733 }
2734 else
2735 {
2736 /* Memory */
2737 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2738 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2739 IEM_MC_ARG(uint16_t, u16Src, 1);
2740 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2741 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2742 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
2743
2744 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2745 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2746 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2747 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2748 IEM_MC_FETCH_EFLAGS(EFlags);
2749 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2750
2751 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
2752 IEM_MC_COMMIT_EFLAGS(EFlags);
2753 IEM_MC_ADVANCE_RIP_AND_FINISH();
2754 IEM_MC_END();
2755 }
2756}
2757
2758
2759/**
2760 * @opcode 0x63
2761 *
2762 * @note This is a weird one. It works like a regular move instruction if
2763 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2764 * @todo This definitely needs a testcase to verify the odd cases. */
2765FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2766{
2767 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2768
2769 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2770 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2771
2772 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2773 {
2774 if (IEM_IS_MODRM_REG_MODE(bRm))
2775 {
2776 /*
2777 * Register to register.
2778 */
2779 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2780 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2781 IEM_MC_LOCAL(uint64_t, u64Value);
2782 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
2783 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2784 IEM_MC_ADVANCE_RIP_AND_FINISH();
2785 IEM_MC_END();
2786 }
2787 else
2788 {
2789 /*
2790 * We're loading a register from memory.
2791 */
2792 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
2793 IEM_MC_LOCAL(uint64_t, u64Value);
2794 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2795 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2797 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2798 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2799 IEM_MC_ADVANCE_RIP_AND_FINISH();
2800 IEM_MC_END();
2801 }
2802 }
2803 else
2804 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
2805}
2806
2807
2808/**
2809 * @opcode 0x64
2810 * @opmnemonic segfs
2811 * @opmincpu 80386
2812 * @opgroup og_prefixes
2813 */
2814FNIEMOP_DEF(iemOp_seg_FS)
2815{
2816 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2817 IEMOP_HLP_MIN_386();
2818
2819 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2820 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2821
2822 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2823 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2824}
2825
2826
2827/**
2828 * @opcode 0x65
2829 * @opmnemonic seggs
2830 * @opmincpu 80386
2831 * @opgroup og_prefixes
2832 */
2833FNIEMOP_DEF(iemOp_seg_GS)
2834{
2835 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2836 IEMOP_HLP_MIN_386();
2837
2838 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2839 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2840
2841 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2842 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2843}
2844
2845
2846/**
2847 * @opcode 0x66
2848 * @opmnemonic opsize
2849 * @openc prefix
2850 * @opmincpu 80386
2851 * @ophints harmless
2852 * @opgroup og_prefixes
2853 */
2854FNIEMOP_DEF(iemOp_op_size)
2855{
2856 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2857 IEMOP_HLP_MIN_386();
2858
2859 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2860 iemRecalEffOpSize(pVCpu);
2861
2862 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2863 when REPZ or REPNZ are present. */
2864 if (pVCpu->iem.s.idxPrefix == 0)
2865 pVCpu->iem.s.idxPrefix = 1;
2866
2867 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2868 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2869}
2870
2871
2872/**
2873 * @opcode 0x67
2874 * @opmnemonic addrsize
2875 * @openc prefix
2876 * @opmincpu 80386
2877 * @ophints harmless
2878 * @opgroup og_prefixes
2879 */
2880FNIEMOP_DEF(iemOp_addr_size)
2881{
2882 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2883 IEMOP_HLP_MIN_386();
2884
2885 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2886 switch (pVCpu->iem.s.enmDefAddrMode)
2887 {
2888 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2889 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2890 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2891 default: AssertFailed();
2892 }
2893
2894 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2895 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2896}
2897
2898
2899/**
2900 * @opcode 0x68
2901 */
2902FNIEMOP_DEF(iemOp_push_Iz)
2903{
2904 IEMOP_MNEMONIC(push_Iz, "push Iz");
2905 IEMOP_HLP_MIN_186();
2906 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2907 switch (pVCpu->iem.s.enmEffOpSize)
2908 {
2909 case IEMMODE_16BIT:
2910 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_186, 0);
2911 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2913 IEM_MC_PUSH_U16(u16Imm);
2914 IEM_MC_ADVANCE_RIP_AND_FINISH();
2915 IEM_MC_END();
2916 break;
2917
2918 case IEMMODE_32BIT:
2919 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2920 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2922 IEM_MC_PUSH_U32(u32Imm);
2923 IEM_MC_ADVANCE_RIP_AND_FINISH();
2924 IEM_MC_END();
2925 break;
2926
2927 case IEMMODE_64BIT:
2928 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
2929 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2931 IEM_MC_PUSH_U64(u64Imm);
2932 IEM_MC_ADVANCE_RIP_AND_FINISH();
2933 IEM_MC_END();
2934 break;
2935
2936 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2937 }
2938}
2939
2940
2941/**
2942 * @opcode 0x69
2943 */
2944FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
2945{
2946 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
2947 IEMOP_HLP_MIN_186();
2948 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2949 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2950
2951 switch (pVCpu->iem.s.enmEffOpSize)
2952 {
2953 case IEMMODE_16BIT:
2954 {
2955 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
2956 if (IEM_IS_MODRM_REG_MODE(bRm))
2957 {
2958 /* register operand */
2959 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2960 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186, 0);
2961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2962 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2963 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
2964 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2965 IEM_MC_LOCAL(uint16_t, u16Tmp);
2966
2967 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2968 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2969 IEM_MC_REF_EFLAGS(pEFlags);
2970 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2971 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2972
2973 IEM_MC_ADVANCE_RIP_AND_FINISH();
2974 IEM_MC_END();
2975 }
2976 else
2977 {
2978 /* memory operand */
2979 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186, 0);
2980 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2981 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2982
2983 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2985
2986 IEM_MC_LOCAL(uint16_t, u16Tmp);
2987 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2988
2989 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
2990 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
2991 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2992 IEM_MC_REF_EFLAGS(pEFlags);
2993 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2994 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2995
2996 IEM_MC_ADVANCE_RIP_AND_FINISH();
2997 IEM_MC_END();
2998 }
2999 break;
3000 }
3001
3002 case IEMMODE_32BIT:
3003 {
3004 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
3005 if (IEM_IS_MODRM_REG_MODE(bRm))
3006 {
3007 /* register operand */
3008 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3009 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
3010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3011 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3012 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
3013 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3014 IEM_MC_LOCAL(uint32_t, u32Tmp);
3015
3016 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3017 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
3018 IEM_MC_REF_EFLAGS(pEFlags);
3019 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3020 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3021
3022 IEM_MC_ADVANCE_RIP_AND_FINISH();
3023 IEM_MC_END();
3024 }
3025 else
3026 {
3027 /* memory operand */
3028 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0);
3029 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3030 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3031
3032 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3034
3035 IEM_MC_LOCAL(uint32_t, u32Tmp);
3036 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3037
3038 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3039 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
3040 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3041 IEM_MC_REF_EFLAGS(pEFlags);
3042 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3043 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3044
3045 IEM_MC_ADVANCE_RIP_AND_FINISH();
3046 IEM_MC_END();
3047 }
3048 break;
3049 }
3050
3051 case IEMMODE_64BIT:
3052 {
3053 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3054 if (IEM_IS_MODRM_REG_MODE(bRm))
3055 {
3056 /* register operand */
3057 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3058 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
3059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3060 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3061 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
3062 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3063 IEM_MC_LOCAL(uint64_t, u64Tmp);
3064
3065 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3066 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3067 IEM_MC_REF_EFLAGS(pEFlags);
3068 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3069 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3070
3071 IEM_MC_ADVANCE_RIP_AND_FINISH();
3072 IEM_MC_END();
3073 }
3074 else
3075 {
3076 /* memory operand */
3077 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3080
3081 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); /* Not using IEM_OPCODE_GET_NEXT_S32_SX_U64 to reduce the */
3082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /* parameter count for the threaded function for this block. */
3083
3084 IEM_MC_LOCAL(uint64_t, u64Tmp);
3085 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3086
3087 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3088 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int32_t)u32Imm, 1);
3089 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3090 IEM_MC_REF_EFLAGS(pEFlags);
3091 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3092 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3093
3094 IEM_MC_ADVANCE_RIP_AND_FINISH();
3095 IEM_MC_END();
3096 }
3097 break;
3098 }
3099
3100 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3101 }
3102}
3103
3104
3105/**
3106 * @opcode 0x6a
3107 */
3108FNIEMOP_DEF(iemOp_push_Ib)
3109{
3110 IEMOP_MNEMONIC(push_Ib, "push Ib");
3111 IEMOP_HLP_MIN_186();
3112 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3113 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3114
3115 switch (pVCpu->iem.s.enmEffOpSize)
3116 {
3117 case IEMMODE_16BIT:
3118 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_186, 0);
3119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3120 IEM_MC_PUSH_U16(i8Imm);
3121 IEM_MC_ADVANCE_RIP_AND_FINISH();
3122 IEM_MC_END();
3123 break;
3124 case IEMMODE_32BIT:
3125 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
3126 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3127 IEM_MC_PUSH_U32(i8Imm);
3128 IEM_MC_ADVANCE_RIP_AND_FINISH();
3129 IEM_MC_END();
3130 break;
3131 case IEMMODE_64BIT:
3132 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
3133 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3134 IEM_MC_PUSH_U64(i8Imm);
3135 IEM_MC_ADVANCE_RIP_AND_FINISH();
3136 IEM_MC_END();
3137 break;
3138 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3139 }
3140}
3141
3142
3143/**
3144 * @opcode 0x6b
3145 */
3146FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
3147{
3148 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
3149 IEMOP_HLP_MIN_186();
3150 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3151 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3152
3153 switch (pVCpu->iem.s.enmEffOpSize)
3154 {
3155 case IEMMODE_16BIT:
3156 {
3157 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
3158 if (IEM_IS_MODRM_REG_MODE(bRm))
3159 {
3160 /* register operand */
3161 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186, 0);
3162 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3164 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3165 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
3166 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3167 IEM_MC_LOCAL(uint16_t, u16Tmp);
3168
3169 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3170 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
3171 IEM_MC_REF_EFLAGS(pEFlags);
3172 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3173 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3174
3175 IEM_MC_ADVANCE_RIP_AND_FINISH();
3176 IEM_MC_END();
3177 }
3178 else
3179 {
3180 /* memory operand */
3181 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186, 0);
3182
3183 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3184 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3185
3186 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
3187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3188
3189 IEM_MC_LOCAL(uint16_t, u16Tmp);
3190 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3191
3192 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3193 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
3194 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3195 IEM_MC_REF_EFLAGS(pEFlags);
3196 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3197 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3198
3199 IEM_MC_ADVANCE_RIP_AND_FINISH();
3200 IEM_MC_END();
3201 }
3202 break;
3203 }
3204
3205 case IEMMODE_32BIT:
3206 {
3207 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
3208 if (IEM_IS_MODRM_REG_MODE(bRm))
3209 {
3210 /* register operand */
3211 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3212 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
3213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3214 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3215 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
3216 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3217 IEM_MC_LOCAL(uint32_t, u32Tmp);
3218
3219 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3220 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
3221 IEM_MC_REF_EFLAGS(pEFlags);
3222 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3223 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3224
3225 IEM_MC_ADVANCE_RIP_AND_FINISH();
3226 IEM_MC_END();
3227 }
3228 else
3229 {
3230 /* memory operand */
3231 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0);
3232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3234
3235 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
3236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3237
3238 IEM_MC_LOCAL(uint32_t, u32Tmp);
3239 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3240
3241 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3242 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
3243 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3244 IEM_MC_REF_EFLAGS(pEFlags);
3245 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3246 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3247
3248 IEM_MC_ADVANCE_RIP_AND_FINISH();
3249 IEM_MC_END();
3250 }
3251 break;
3252 }
3253
3254 case IEMMODE_64BIT:
3255 {
3256 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3257 if (IEM_IS_MODRM_REG_MODE(bRm))
3258 {
3259 /* register operand */
3260 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3261 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
3262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3263 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3264 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 1);
3265 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3266 IEM_MC_LOCAL(uint64_t, u64Tmp);
3267
3268 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3269 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
3270 IEM_MC_REF_EFLAGS(pEFlags);
3271 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3272 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3273
3274 IEM_MC_ADVANCE_RIP_AND_FINISH();
3275 IEM_MC_END();
3276 }
3277 else
3278 {
3279 /* memory operand */
3280 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3281 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3282 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3283
3284 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); /* Not using IEM_OPCODE_GET_NEXT_S8_SX_U64 to reduce the threaded parameter count. */
3285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3286
3287 IEM_MC_LOCAL(uint64_t, u64Tmp);
3288 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3289
3290 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3291 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 1);
3292 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3293 IEM_MC_REF_EFLAGS(pEFlags);
3294 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3295 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3296
3297 IEM_MC_ADVANCE_RIP_AND_FINISH();
3298 IEM_MC_END();
3299 }
3300 break;
3301 }
3302
3303 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3304 }
3305}
3306
3307
3308/**
3309 * @opcode 0x6c
3310 */
3311FNIEMOP_DEF(iemOp_insb_Yb_DX)
3312{
3313 IEMOP_HLP_MIN_186();
3314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3315 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3316 {
3317 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
3318 switch (pVCpu->iem.s.enmEffAddrMode)
3319 {
3320 case IEMMODE_16BIT:
3321 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3322 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3323 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3324 iemCImpl_rep_ins_op8_addr16, false);
3325 case IEMMODE_32BIT:
3326 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3327 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3328 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3329 iemCImpl_rep_ins_op8_addr32, false);
3330 case IEMMODE_64BIT:
3331 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3332 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3333 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3334 iemCImpl_rep_ins_op8_addr64, false);
3335 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3336 }
3337 }
3338 else
3339 {
3340 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
3341 switch (pVCpu->iem.s.enmEffAddrMode)
3342 {
3343 case IEMMODE_16BIT:
3344 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3345 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3346 iemCImpl_ins_op8_addr16, false);
3347 case IEMMODE_32BIT:
3348 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3349 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3350 iemCImpl_ins_op8_addr32, false);
3351 case IEMMODE_64BIT:
3352 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3353 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3354 iemCImpl_ins_op8_addr64, false);
3355 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3356 }
3357 }
3358}
3359
3360
3361/**
3362 * @opcode 0x6d
3363 */
3364FNIEMOP_DEF(iemOp_inswd_Yv_DX)
3365{
3366 IEMOP_HLP_MIN_186();
3367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3368 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3369 {
3370 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
3371 switch (pVCpu->iem.s.enmEffOpSize)
3372 {
3373 case IEMMODE_16BIT:
3374 switch (pVCpu->iem.s.enmEffAddrMode)
3375 {
3376 case IEMMODE_16BIT:
3377 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3378 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3379 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3380 iemCImpl_rep_ins_op16_addr16, false);
3381 case IEMMODE_32BIT:
3382 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3383 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3384 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3385 iemCImpl_rep_ins_op16_addr32, false);
3386 case IEMMODE_64BIT:
3387 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3388 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3389 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3390 iemCImpl_rep_ins_op16_addr64, false);
3391 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3392 }
3393 break;
3394 case IEMMODE_64BIT:
3395 case IEMMODE_32BIT:
3396 switch (pVCpu->iem.s.enmEffAddrMode)
3397 {
3398 case IEMMODE_16BIT:
3399 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3400 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3401 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3402 iemCImpl_rep_ins_op32_addr16, false);
3403 case IEMMODE_32BIT:
3404 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3405 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3406 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3407 iemCImpl_rep_ins_op32_addr32, false);
3408 case IEMMODE_64BIT:
3409 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3410 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3411 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3412 iemCImpl_rep_ins_op32_addr64, false);
3413 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3414 }
3415 break;
3416 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3417 }
3418 }
3419 else
3420 {
3421 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
3422 switch (pVCpu->iem.s.enmEffOpSize)
3423 {
3424 case IEMMODE_16BIT:
3425 switch (pVCpu->iem.s.enmEffAddrMode)
3426 {
3427 case IEMMODE_16BIT:
3428 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3429 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3430 iemCImpl_ins_op16_addr16, false);
3431 case IEMMODE_32BIT:
3432 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3433 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3434 iemCImpl_ins_op16_addr32, false);
3435 case IEMMODE_64BIT:
3436 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3437 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3438 iemCImpl_ins_op16_addr64, false);
3439 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3440 }
3441 break;
3442 case IEMMODE_64BIT:
3443 case IEMMODE_32BIT:
3444 switch (pVCpu->iem.s.enmEffAddrMode)
3445 {
3446 case IEMMODE_16BIT:
3447 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3448 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3449 iemCImpl_ins_op32_addr16, false);
3450 case IEMMODE_32BIT:
3451 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3452 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3453 iemCImpl_ins_op32_addr32, false);
3454 case IEMMODE_64BIT:
3455 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3456 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3457 iemCImpl_ins_op32_addr64, false);
3458 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3459 }
3460 break;
3461 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3462 }
3463 }
3464}
3465
3466
3467/**
3468 * @opcode 0x6e
3469 */
3470FNIEMOP_DEF(iemOp_outsb_Yb_DX)
3471{
3472 IEMOP_HLP_MIN_186();
3473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3474 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3475 {
3476 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
3477 switch (pVCpu->iem.s.enmEffAddrMode)
3478 {
3479 case IEMMODE_16BIT:
3480 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3481 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3482 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3483 iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3484 case IEMMODE_32BIT:
3485 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3486 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3487 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3488 iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3489 case IEMMODE_64BIT:
3490 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3491 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3492 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3493 iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3494 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3495 }
3496 }
3497 else
3498 {
3499 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
3500 switch (pVCpu->iem.s.enmEffAddrMode)
3501 {
3502 case IEMMODE_16BIT:
3503 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3504 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3505 iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3506 case IEMMODE_32BIT:
3507 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3508 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3509 iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3510 case IEMMODE_64BIT:
3511 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3512 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3513 iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3514 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3515 }
3516 }
3517}
3518
3519
3520/**
3521 * @opcode 0x6f
3522 */
3523FNIEMOP_DEF(iemOp_outswd_Yv_DX)
3524{
3525 IEMOP_HLP_MIN_186();
3526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3527 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3528 {
3529 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
3530 switch (pVCpu->iem.s.enmEffOpSize)
3531 {
3532 case IEMMODE_16BIT:
3533 switch (pVCpu->iem.s.enmEffAddrMode)
3534 {
3535 case IEMMODE_16BIT:
3536 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3537 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3538 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3539 iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3540 case IEMMODE_32BIT:
3541 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3542 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3543 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3544 iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3545 case IEMMODE_64BIT:
3546 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3547 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3548 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3549 iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3550 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3551 }
3552 break;
3553 case IEMMODE_64BIT:
3554 case IEMMODE_32BIT:
3555 switch (pVCpu->iem.s.enmEffAddrMode)
3556 {
3557 case IEMMODE_16BIT:
3558 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3559 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3560 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3561 iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3562 case IEMMODE_32BIT:
3563 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3564 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3565 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3566 iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3567 case IEMMODE_64BIT:
3568 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3569 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3570 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3571 iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3572 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3573 }
3574 break;
3575 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3576 }
3577 }
3578 else
3579 {
3580 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
3581 switch (pVCpu->iem.s.enmEffOpSize)
3582 {
3583 case IEMMODE_16BIT:
3584 switch (pVCpu->iem.s.enmEffAddrMode)
3585 {
3586 case IEMMODE_16BIT:
3587 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3588 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3589 iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3590 case IEMMODE_32BIT:
3591 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3592 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3593 iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3594 case IEMMODE_64BIT:
3595 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3596 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3597 iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3598 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3599 }
3600 break;
3601 case IEMMODE_64BIT:
3602 case IEMMODE_32BIT:
3603 switch (pVCpu->iem.s.enmEffAddrMode)
3604 {
3605 case IEMMODE_16BIT:
3606 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3607 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3608 iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3609 case IEMMODE_32BIT:
3610 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3611 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3612 iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3613 case IEMMODE_64BIT:
3614 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3615 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3616 iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3617 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3618 }
3619 break;
3620 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3621 }
3622 }
3623}
3624
3625
3626/**
3627 * @opcode 0x70
3628 */
3629FNIEMOP_DEF(iemOp_jo_Jb)
3630{
3631 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
3632 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3633 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3634
3635 IEM_MC_BEGIN(0, 0, 0, 0);
3636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3637 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3638 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3639 } IEM_MC_ELSE() {
3640 IEM_MC_ADVANCE_RIP_AND_FINISH();
3641 } IEM_MC_ENDIF();
3642 IEM_MC_END();
3643}
3644
3645
3646/**
3647 * @opcode 0x71
3648 */
3649FNIEMOP_DEF(iemOp_jno_Jb)
3650{
3651 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
3652 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3653 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3654
3655 IEM_MC_BEGIN(0, 0, 0, 0);
3656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3657 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3658 IEM_MC_ADVANCE_RIP_AND_FINISH();
3659 } IEM_MC_ELSE() {
3660 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3661 } IEM_MC_ENDIF();
3662 IEM_MC_END();
3663}
3664
3665/**
3666 * @opcode 0x72
3667 */
3668FNIEMOP_DEF(iemOp_jc_Jb)
3669{
3670 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
3671 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3672 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3673
3674 IEM_MC_BEGIN(0, 0, 0, 0);
3675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3676 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3677 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3678 } IEM_MC_ELSE() {
3679 IEM_MC_ADVANCE_RIP_AND_FINISH();
3680 } IEM_MC_ENDIF();
3681 IEM_MC_END();
3682}
3683
3684
3685/**
3686 * @opcode 0x73
3687 */
3688FNIEMOP_DEF(iemOp_jnc_Jb)
3689{
3690 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
3691 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3692 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3693
3694 IEM_MC_BEGIN(0, 0, 0, 0);
3695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3696 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3697 IEM_MC_ADVANCE_RIP_AND_FINISH();
3698 } IEM_MC_ELSE() {
3699 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3700 } IEM_MC_ENDIF();
3701 IEM_MC_END();
3702}
3703
3704
3705/**
3706 * @opcode 0x74
3707 */
3708FNIEMOP_DEF(iemOp_je_Jb)
3709{
3710 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
3711 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3712 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3713
3714 IEM_MC_BEGIN(0, 0, 0, 0);
3715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3716 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3717 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3718 } IEM_MC_ELSE() {
3719 IEM_MC_ADVANCE_RIP_AND_FINISH();
3720 } IEM_MC_ENDIF();
3721 IEM_MC_END();
3722}
3723
3724
3725/**
3726 * @opcode 0x75
3727 */
3728FNIEMOP_DEF(iemOp_jne_Jb)
3729{
3730 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
3731 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3732 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3733
3734 IEM_MC_BEGIN(0, 0, 0, 0);
3735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3736 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3737 IEM_MC_ADVANCE_RIP_AND_FINISH();
3738 } IEM_MC_ELSE() {
3739 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3740 } IEM_MC_ENDIF();
3741 IEM_MC_END();
3742}
3743
3744
3745/**
3746 * @opcode 0x76
3747 */
3748FNIEMOP_DEF(iemOp_jbe_Jb)
3749{
3750 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
3751 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3752 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3753
3754 IEM_MC_BEGIN(0, 0, 0, 0);
3755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3756 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3757 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3758 } IEM_MC_ELSE() {
3759 IEM_MC_ADVANCE_RIP_AND_FINISH();
3760 } IEM_MC_ENDIF();
3761 IEM_MC_END();
3762}
3763
3764
3765/**
3766 * @opcode 0x77
3767 */
3768FNIEMOP_DEF(iemOp_jnbe_Jb)
3769{
3770 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
3771 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3772 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3773
3774 IEM_MC_BEGIN(0, 0, 0, 0);
3775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3776 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3777 IEM_MC_ADVANCE_RIP_AND_FINISH();
3778 } IEM_MC_ELSE() {
3779 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3780 } IEM_MC_ENDIF();
3781 IEM_MC_END();
3782}
3783
3784
3785/**
3786 * @opcode 0x78
3787 */
3788FNIEMOP_DEF(iemOp_js_Jb)
3789{
3790 IEMOP_MNEMONIC(js_Jb, "js Jb");
3791 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3792 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3793
3794 IEM_MC_BEGIN(0, 0, 0, 0);
3795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3796 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3797 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3798 } IEM_MC_ELSE() {
3799 IEM_MC_ADVANCE_RIP_AND_FINISH();
3800 } IEM_MC_ENDIF();
3801 IEM_MC_END();
3802}
3803
3804
3805/**
3806 * @opcode 0x79
3807 */
3808FNIEMOP_DEF(iemOp_jns_Jb)
3809{
3810 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
3811 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3812 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3813
3814 IEM_MC_BEGIN(0, 0, 0, 0);
3815 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3816 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3817 IEM_MC_ADVANCE_RIP_AND_FINISH();
3818 } IEM_MC_ELSE() {
3819 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3820 } IEM_MC_ENDIF();
3821 IEM_MC_END();
3822}
3823
3824
3825/**
3826 * @opcode 0x7a
3827 */
3828FNIEMOP_DEF(iemOp_jp_Jb)
3829{
3830 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
3831 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3832 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3833
3834 IEM_MC_BEGIN(0, 0, 0, 0);
3835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3836 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3837 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3838 } IEM_MC_ELSE() {
3839 IEM_MC_ADVANCE_RIP_AND_FINISH();
3840 } IEM_MC_ENDIF();
3841 IEM_MC_END();
3842}
3843
3844
3845/**
3846 * @opcode 0x7b
3847 */
3848FNIEMOP_DEF(iemOp_jnp_Jb)
3849{
3850 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
3851 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3852 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3853
3854 IEM_MC_BEGIN(0, 0, 0, 0);
3855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3856 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3857 IEM_MC_ADVANCE_RIP_AND_FINISH();
3858 } IEM_MC_ELSE() {
3859 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3860 } IEM_MC_ENDIF();
3861 IEM_MC_END();
3862}
3863
3864
3865/**
3866 * @opcode 0x7c
3867 */
3868FNIEMOP_DEF(iemOp_jl_Jb)
3869{
3870 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
3871 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3872 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3873
3874 IEM_MC_BEGIN(0, 0, 0, 0);
3875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3876 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3877 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3878 } IEM_MC_ELSE() {
3879 IEM_MC_ADVANCE_RIP_AND_FINISH();
3880 } IEM_MC_ENDIF();
3881 IEM_MC_END();
3882}
3883
3884
3885/**
3886 * @opcode 0x7d
3887 */
3888FNIEMOP_DEF(iemOp_jnl_Jb)
3889{
3890 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
3891 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3892 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3893
3894 IEM_MC_BEGIN(0, 0, 0, 0);
3895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3896 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3897 IEM_MC_ADVANCE_RIP_AND_FINISH();
3898 } IEM_MC_ELSE() {
3899 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3900 } IEM_MC_ENDIF();
3901 IEM_MC_END();
3902}
3903
3904
3905/**
3906 * @opcode 0x7e
3907 */
3908FNIEMOP_DEF(iemOp_jle_Jb)
3909{
3910 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
3911 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3912 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3913
3914 IEM_MC_BEGIN(0, 0, 0, 0);
3915 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3916 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3917 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3918 } IEM_MC_ELSE() {
3919 IEM_MC_ADVANCE_RIP_AND_FINISH();
3920 } IEM_MC_ENDIF();
3921 IEM_MC_END();
3922}
3923
3924
3925/**
3926 * @opcode 0x7f
3927 */
3928FNIEMOP_DEF(iemOp_jnle_Jb)
3929{
3930 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
3931 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3932 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3933
3934 IEM_MC_BEGIN(0, 0, 0, 0);
3935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3936 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3937 IEM_MC_ADVANCE_RIP_AND_FINISH();
3938 } IEM_MC_ELSE() {
3939 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3940 } IEM_MC_ENDIF();
3941 IEM_MC_END();
3942}
3943
3944
3945/**
3946 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
3947 * iemOp_Grp1_Eb_Ib_80.
3948 */
3949#define IEMOP_BODY_BINARY_Eb_Ib_RW(a_fnNormalU8) \
3950 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3951 { \
3952 /* register target */ \
3953 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3954 IEM_MC_BEGIN(3, 0, 0, 0); \
3955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3956 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3957 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3958 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3959 \
3960 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3961 IEM_MC_REF_EFLAGS(pEFlags); \
3962 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3963 \
3964 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3965 IEM_MC_END(); \
3966 } \
3967 else \
3968 { \
3969 /* memory target */ \
3970 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
3971 { \
3972 IEM_MC_BEGIN(3, 3, 0, 0); \
3973 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3974 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3975 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3976 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3977 \
3978 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3979 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3980 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3981 IEMOP_HLP_DONE_DECODING(); \
3982 \
3983 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3984 IEM_MC_FETCH_EFLAGS(EFlags); \
3985 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3986 \
3987 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
3988 IEM_MC_COMMIT_EFLAGS(EFlags); \
3989 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3990 IEM_MC_END(); \
3991 } \
3992 else \
3993 { \
3994 (void)0
3995
3996#define IEMOP_BODY_BINARY_Eb_Ib_LOCKED(a_fnLockedU8) \
3997 IEM_MC_BEGIN(3, 3, 0, 0); \
3998 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3999 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4000 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4001 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4002 \
4003 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4004 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4005 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4006 IEMOP_HLP_DONE_DECODING(); \
4007 \
4008 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4009 IEM_MC_FETCH_EFLAGS(EFlags); \
4010 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
4011 \
4012 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
4013 IEM_MC_COMMIT_EFLAGS(EFlags); \
4014 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4015 IEM_MC_END(); \
4016 } \
4017 } \
4018 (void)0
4019
4020#define IEMOP_BODY_BINARY_Eb_Ib_RO(a_fnNormalU8) \
4021 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4022 { \
4023 /* register target */ \
4024 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4025 IEM_MC_BEGIN(3, 0, 0, 0); \
4026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4027 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
4028 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4029 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4030 \
4031 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4032 IEM_MC_REF_EFLAGS(pEFlags); \
4033 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
4034 \
4035 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4036 IEM_MC_END(); \
4037 } \
4038 else \
4039 { \
4040 /* memory target */ \
4041 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4042 { \
4043 IEM_MC_BEGIN(3, 3, 0, 0); \
4044 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
4045 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4046 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4047 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4048 \
4049 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4050 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4051 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4052 IEMOP_HLP_DONE_DECODING(); \
4053 \
4054 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4055 IEM_MC_FETCH_EFLAGS(EFlags); \
4056 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
4057 \
4058 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu8Dst, bUnmapInfo); \
4059 IEM_MC_COMMIT_EFLAGS(EFlags); \
4060 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4061 IEM_MC_END(); \
4062 } \
4063 else \
4064 { \
4065 (void)0
4066
4067#define IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK() \
4068 IEMOP_HLP_DONE_DECODING(); \
4069 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4070 } \
4071 } \
4072 (void)0
4073
4074
4075
4076/**
4077 * @opmaps grp1_80,grp1_83
4078 * @opcode /0
4079 */
4080FNIEMOP_DEF_1(iemOp_Grp1_add_Eb_Ib, uint8_t, bRm)
4081{
4082 IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib");
4083 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_add_u8);
4084 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_add_u8_locked);
4085}
4086
4087
4088/**
4089 * @opmaps grp1_80,grp1_83
4090 * @opcode /1
4091 */
4092FNIEMOP_DEF_1(iemOp_Grp1_or_Eb_Ib, uint8_t, bRm)
4093{
4094 IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib");
4095 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_or_u8);
4096 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_or_u8_locked);
4097}
4098
4099
4100/**
4101 * @opmaps grp1_80,grp1_83
4102 * @opcode /2
4103 */
4104FNIEMOP_DEF_1(iemOp_Grp1_adc_Eb_Ib, uint8_t, bRm)
4105{
4106 IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib");
4107 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_adc_u8);
4108 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_adc_u8_locked);
4109}
4110
4111
4112/**
4113 * @opmaps grp1_80,grp1_83
4114 * @opcode /3
4115 */
4116FNIEMOP_DEF_1(iemOp_Grp1_sbb_Eb_Ib, uint8_t, bRm)
4117{
4118 IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib");
4119 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sbb_u8);
4120 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sbb_u8_locked);
4121}
4122
4123
4124/**
4125 * @opmaps grp1_80,grp1_83
4126 * @opcode /4
4127 */
4128FNIEMOP_DEF_1(iemOp_Grp1_and_Eb_Ib, uint8_t, bRm)
4129{
4130 IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib");
4131 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_and_u8);
4132 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_and_u8_locked);
4133}
4134
4135
4136/**
4137 * @opmaps grp1_80,grp1_83
4138 * @opcode /5
4139 */
4140FNIEMOP_DEF_1(iemOp_Grp1_sub_Eb_Ib, uint8_t, bRm)
4141{
4142 IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib");
4143 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sub_u8);
4144 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sub_u8_locked);
4145}
4146
4147
4148/**
4149 * @opmaps grp1_80,grp1_83
4150 * @opcode /6
4151 */
4152FNIEMOP_DEF_1(iemOp_Grp1_xor_Eb_Ib, uint8_t, bRm)
4153{
4154 IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib");
4155 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_xor_u8);
4156 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_xor_u8_locked);
4157}
4158
4159
4160/**
4161 * @opmaps grp1_80,grp1_83
4162 * @opcode /7
4163 */
4164FNIEMOP_DEF_1(iemOp_Grp1_cmp_Eb_Ib, uint8_t, bRm)
4165{
4166 IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib");
4167 IEMOP_BODY_BINARY_Eb_Ib_RO(iemAImpl_cmp_u8);
4168 IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK();
4169}
4170
4171
4172/**
4173 * @opcode 0x80
4174 */
4175FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
4176{
4177 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4178 switch (IEM_GET_MODRM_REG_8(bRm))
4179 {
4180 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Eb_Ib, bRm);
4181 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Eb_Ib, bRm);
4182 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Eb_Ib, bRm);
4183 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Eb_Ib, bRm);
4184 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Eb_Ib, bRm);
4185 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Eb_Ib, bRm);
4186 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Eb_Ib, bRm);
4187 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Eb_Ib, bRm);
4188 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4189 }
4190}
4191
4192
4193/**
4194 * Body for a group 1 binary operator.
4195 */
4196#define IEMOP_BODY_BINARY_Ev_Iz_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4197 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4198 { \
4199 /* register target */ \
4200 switch (pVCpu->iem.s.enmEffOpSize) \
4201 { \
4202 case IEMMODE_16BIT: \
4203 { \
4204 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4205 IEM_MC_BEGIN(3, 0, 0, 0); \
4206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4207 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4208 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4209 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4210 \
4211 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4212 IEM_MC_REF_EFLAGS(pEFlags); \
4213 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4214 \
4215 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4216 IEM_MC_END(); \
4217 break; \
4218 } \
4219 \
4220 case IEMMODE_32BIT: \
4221 { \
4222 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4223 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4225 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4226 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4227 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4228 \
4229 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4230 IEM_MC_REF_EFLAGS(pEFlags); \
4231 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4232 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
4233 \
4234 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4235 IEM_MC_END(); \
4236 break; \
4237 } \
4238 \
4239 case IEMMODE_64BIT: \
4240 { \
4241 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4242 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4244 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4245 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4246 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4247 \
4248 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4249 IEM_MC_REF_EFLAGS(pEFlags); \
4250 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4251 \
4252 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4253 IEM_MC_END(); \
4254 break; \
4255 } \
4256 \
4257 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4258 } \
4259 } \
4260 else \
4261 { \
4262 /* memory target */ \
4263 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4264 { \
4265 switch (pVCpu->iem.s.enmEffOpSize) \
4266 { \
4267 case IEMMODE_16BIT: \
4268 { \
4269 IEM_MC_BEGIN(3, 3, 0, 0); \
4270 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4271 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4272 \
4273 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4274 IEMOP_HLP_DONE_DECODING(); \
4275 \
4276 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4277 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4278 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4279 \
4280 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4281 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4282 IEM_MC_FETCH_EFLAGS(EFlags); \
4283 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4284 \
4285 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4286 IEM_MC_COMMIT_EFLAGS(EFlags); \
4287 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4288 IEM_MC_END(); \
4289 break; \
4290 } \
4291 \
4292 case IEMMODE_32BIT: \
4293 { \
4294 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4295 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4296 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4297 \
4298 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4299 IEMOP_HLP_DONE_DECODING(); \
4300 \
4301 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4302 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4303 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4304 \
4305 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4306 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4307 IEM_MC_FETCH_EFLAGS(EFlags); \
4308 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4309 \
4310 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4311 IEM_MC_COMMIT_EFLAGS(EFlags); \
4312 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4313 IEM_MC_END(); \
4314 break; \
4315 } \
4316 \
4317 case IEMMODE_64BIT: \
4318 { \
4319 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4320 \
4321 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4322 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4323 \
4324 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4325 IEMOP_HLP_DONE_DECODING(); \
4326 \
4327 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4328 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4329 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4330 \
4331 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4332 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4333 IEM_MC_FETCH_EFLAGS(EFlags); \
4334 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4335 \
4336 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4337 IEM_MC_COMMIT_EFLAGS(EFlags); \
4338 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4339 IEM_MC_END(); \
4340 break; \
4341 } \
4342 \
4343 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4344 } \
4345 } \
4346 else \
4347 { \
4348 (void)0
4349/* This must be a separate macro due to parsing restrictions in IEMAllInstPython.py. */
4350#define IEMOP_BODY_BINARY_Ev_Iz_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4351 switch (pVCpu->iem.s.enmEffOpSize) \
4352 { \
4353 case IEMMODE_16BIT: \
4354 { \
4355 IEM_MC_BEGIN(3, 3, 0, 0); \
4356 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4357 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4358 \
4359 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4360 IEMOP_HLP_DONE_DECODING(); \
4361 \
4362 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4363 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4364 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4365 \
4366 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4367 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4368 IEM_MC_FETCH_EFLAGS(EFlags); \
4369 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4370 \
4371 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4372 IEM_MC_COMMIT_EFLAGS(EFlags); \
4373 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4374 IEM_MC_END(); \
4375 break; \
4376 } \
4377 \
4378 case IEMMODE_32BIT: \
4379 { \
4380 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4381 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4383 \
4384 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4385 IEMOP_HLP_DONE_DECODING(); \
4386 \
4387 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4388 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4389 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4390 \
4391 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4392 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4393 IEM_MC_FETCH_EFLAGS(EFlags); \
4394 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4395 \
4396 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4397 IEM_MC_COMMIT_EFLAGS(EFlags); \
4398 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4399 IEM_MC_END(); \
4400 break; \
4401 } \
4402 \
4403 case IEMMODE_64BIT: \
4404 { \
4405 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4406 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4407 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4408 \
4409 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4410 IEMOP_HLP_DONE_DECODING(); \
4411 \
4412 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4413 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4414 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4415 \
4416 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4417 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4418 IEM_MC_FETCH_EFLAGS(EFlags); \
4419 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4420 \
4421 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4422 IEM_MC_COMMIT_EFLAGS(EFlags); \
4423 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4424 IEM_MC_END(); \
4425 break; \
4426 } \
4427 \
4428 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4429 } \
4430 } \
4431 } \
4432 (void)0
4433
4434/* read-only version */
4435#define IEMOP_BODY_BINARY_Ev_Iz_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4436 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4437 { \
4438 /* register target */ \
4439 switch (pVCpu->iem.s.enmEffOpSize) \
4440 { \
4441 case IEMMODE_16BIT: \
4442 { \
4443 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4444 IEM_MC_BEGIN(3, 0, 0, 0); \
4445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4446 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4447 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4448 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4449 \
4450 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4451 IEM_MC_REF_EFLAGS(pEFlags); \
4452 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4453 \
4454 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4455 IEM_MC_END(); \
4456 break; \
4457 } \
4458 \
4459 case IEMMODE_32BIT: \
4460 { \
4461 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4462 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4464 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4465 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4466 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4467 \
4468 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4469 IEM_MC_REF_EFLAGS(pEFlags); \
4470 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4471 \
4472 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4473 IEM_MC_END(); \
4474 break; \
4475 } \
4476 \
4477 case IEMMODE_64BIT: \
4478 { \
4479 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4480 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4482 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4483 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4484 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4485 \
4486 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4487 IEM_MC_REF_EFLAGS(pEFlags); \
4488 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4489 \
4490 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4491 IEM_MC_END(); \
4492 break; \
4493 } \
4494 \
4495 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4496 } \
4497 } \
4498 else \
4499 { \
4500 /* memory target */ \
4501 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4502 { \
4503 switch (pVCpu->iem.s.enmEffOpSize) \
4504 { \
4505 case IEMMODE_16BIT: \
4506 { \
4507 IEM_MC_BEGIN(3, 3, 0, 0); \
4508 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4509 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4510 \
4511 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4512 IEMOP_HLP_DONE_DECODING(); \
4513 \
4514 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4515 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
4516 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4517 \
4518 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4519 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4520 IEM_MC_FETCH_EFLAGS(EFlags); \
4521 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4522 \
4523 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
4524 IEM_MC_COMMIT_EFLAGS(EFlags); \
4525 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4526 IEM_MC_END(); \
4527 break; \
4528 } \
4529 \
4530 case IEMMODE_32BIT: \
4531 { \
4532 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4533 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4534 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4535 \
4536 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4537 IEMOP_HLP_DONE_DECODING(); \
4538 \
4539 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4540 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
4541 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4542 \
4543 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4544 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4545 IEM_MC_FETCH_EFLAGS(EFlags); \
4546 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4547 \
4548 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
4549 IEM_MC_COMMIT_EFLAGS(EFlags); \
4550 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4551 IEM_MC_END(); \
4552 break; \
4553 } \
4554 \
4555 case IEMMODE_64BIT: \
4556 { \
4557 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4558 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4559 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4560 \
4561 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4562 IEMOP_HLP_DONE_DECODING(); \
4563 \
4564 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4565 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
4566 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4567 \
4568 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4569 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4570 IEM_MC_FETCH_EFLAGS(EFlags); \
4571 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4572 \
4573 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
4574 IEM_MC_COMMIT_EFLAGS(EFlags); \
4575 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4576 IEM_MC_END(); \
4577 break; \
4578 } \
4579 \
4580 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4581 } \
4582 } \
4583 else \
4584 { \
4585 IEMOP_HLP_DONE_DECODING(); \
4586 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4587 } \
4588 } \
4589 (void)0
4590
4591
4592/**
4593 * @opmaps grp1_81
4594 * @opcode /0
4595 */
4596FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Iz, uint8_t, bRm)
4597{
4598 IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz");
4599 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
4600 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4601}
4602
4603
4604/**
4605 * @opmaps grp1_81
4606 * @opcode /1
4607 */
4608FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Iz, uint8_t, bRm)
4609{
4610 IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz");
4611 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
4612 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4613}
4614
4615
4616/**
4617 * @opmaps grp1_81
4618 * @opcode /2
4619 */
4620FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Iz, uint8_t, bRm)
4621{
4622 IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz");
4623 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
4624 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
4625}
4626
4627
4628/**
4629 * @opmaps grp1_81
4630 * @opcode /3
4631 */
4632FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Iz, uint8_t, bRm)
4633{
4634 IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz");
4635 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
4636 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
4637}
4638
4639
4640/**
4641 * @opmaps grp1_81
4642 * @opcode /4
4643 */
4644FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Iz, uint8_t, bRm)
4645{
4646 IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz");
4647 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
4648 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
4649}
4650
4651
4652/**
4653 * @opmaps grp1_81
4654 * @opcode /5
4655 */
4656FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Iz, uint8_t, bRm)
4657{
4658 IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz");
4659 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
4660 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
4661}
4662
4663
4664/**
4665 * @opmaps grp1_81
4666 * @opcode /6
4667 */
4668FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Iz, uint8_t, bRm)
4669{
4670 IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz");
4671 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
4672 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
4673}
4674
4675
4676/**
4677 * @opmaps grp1_81
4678 * @opcode /7
4679 */
4680FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Iz, uint8_t, bRm)
4681{
4682 IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz");
4683 IEMOP_BODY_BINARY_Ev_Iz_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
4684}
4685
4686
4687/**
4688 * @opcode 0x81
4689 */
4690FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
4691{
4692 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4693 switch (IEM_GET_MODRM_REG_8(bRm))
4694 {
4695 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Iz, bRm);
4696 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Iz, bRm);
4697 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Iz, bRm);
4698 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Iz, bRm);
4699 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Iz, bRm);
4700 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Iz, bRm);
4701 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Iz, bRm);
4702 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Iz, bRm);
4703 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4704 }
4705}
4706
4707
4708/**
4709 * @opcode 0x82
4710 * @opmnemonic grp1_82
4711 * @opgroup og_groups
4712 */
4713FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
4714{
4715 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
4716 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
4717}
4718
4719
4720/**
4721 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
4722 * iemOp_Grp1_Ev_Ib.
4723 */
4724#define IEMOP_BODY_BINARY_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4725 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4726 { \
4727 /* \
4728 * Register target \
4729 */ \
4730 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4731 switch (pVCpu->iem.s.enmEffOpSize) \
4732 { \
4733 case IEMMODE_16BIT: \
4734 IEM_MC_BEGIN(3, 0, 0, 0); \
4735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4736 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4737 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4738 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4739 \
4740 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4741 IEM_MC_REF_EFLAGS(pEFlags); \
4742 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4743 \
4744 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4745 IEM_MC_END(); \
4746 break; \
4747 \
4748 case IEMMODE_32BIT: \
4749 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4751 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4752 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4753 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4754 \
4755 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4756 IEM_MC_REF_EFLAGS(pEFlags); \
4757 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4758 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
4759 \
4760 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4761 IEM_MC_END(); \
4762 break; \
4763 \
4764 case IEMMODE_64BIT: \
4765 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4767 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4768 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4769 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4770 \
4771 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4772 IEM_MC_REF_EFLAGS(pEFlags); \
4773 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4774 \
4775 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4776 IEM_MC_END(); \
4777 break; \
4778 \
4779 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4780 } \
4781 } \
4782 else \
4783 { \
4784 /* \
4785 * Memory target. \
4786 */ \
4787 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
4788 { \
4789 switch (pVCpu->iem.s.enmEffOpSize) \
4790 { \
4791 case IEMMODE_16BIT: \
4792 IEM_MC_BEGIN(3, 3, 0, 0); \
4793 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4794 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4795 \
4796 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4797 IEMOP_HLP_DONE_DECODING(); \
4798 \
4799 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4800 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4801 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4802 \
4803 IEM_MC_ARG_CONST(uint16_t, u16Src, (int16_t)(int8_t)u8Imm, 1); \
4804 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4805 IEM_MC_FETCH_EFLAGS(EFlags); \
4806 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4807 \
4808 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4809 IEM_MC_COMMIT_EFLAGS(EFlags); \
4810 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4811 IEM_MC_END(); \
4812 break; \
4813 \
4814 case IEMMODE_32BIT: \
4815 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4816 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4817 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4818 \
4819 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4820 IEMOP_HLP_DONE_DECODING(); \
4821 \
4822 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4823 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4824 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4825 \
4826 IEM_MC_ARG_CONST(uint32_t, u32Src, (int32_t)(int8_t)u8Imm, 1); \
4827 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4828 IEM_MC_FETCH_EFLAGS(EFlags); \
4829 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4830 \
4831 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4832 IEM_MC_COMMIT_EFLAGS(EFlags); \
4833 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4834 IEM_MC_END(); \
4835 break; \
4836 \
4837 case IEMMODE_64BIT: \
4838 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4839 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4840 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4841 \
4842 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4843 IEMOP_HLP_DONE_DECODING(); \
4844 \
4845 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4846 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4847 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4848 \
4849 IEM_MC_ARG_CONST(uint64_t, u64Src, (int64_t)(int8_t)u8Imm, 1); \
4850 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4851 IEM_MC_FETCH_EFLAGS(EFlags); \
4852 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4853 \
4854 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4855 IEM_MC_COMMIT_EFLAGS(EFlags); \
4856 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4857 IEM_MC_END(); \
4858 break; \
4859 \
4860 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4861 } \
4862 } \
4863 else \
4864 { \
4865 (void)0
4866/* Separate macro to work around parsing issue in IEMAllInstPython.py */
4867#define IEMOP_BODY_BINARY_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4868 switch (pVCpu->iem.s.enmEffOpSize) \
4869 { \
4870 case IEMMODE_16BIT: \
4871 IEM_MC_BEGIN(3, 3, 0, 0); \
4872 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4874 \
4875 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4876 IEMOP_HLP_DONE_DECODING(); \
4877 \
4878 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4879 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4880 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4881 \
4882 IEM_MC_ARG_CONST(uint16_t, u16Src, (int16_t)(int8_t)u8Imm, 1); \
4883 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4884 IEM_MC_FETCH_EFLAGS(EFlags); \
4885 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4886 \
4887 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
4888 IEM_MC_COMMIT_EFLAGS(EFlags); \
4889 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4890 IEM_MC_END(); \
4891 break; \
4892 \
4893 case IEMMODE_32BIT: \
4894 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4895 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4897 \
4898 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4899 IEMOP_HLP_DONE_DECODING(); \
4900 \
4901 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4902 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4903 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4904 \
4905 IEM_MC_ARG_CONST(uint32_t, u32Src, (int32_t)(int8_t)u8Imm, 1); \
4906 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4907 IEM_MC_FETCH_EFLAGS(EFlags); \
4908 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4909 \
4910 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
4911 IEM_MC_COMMIT_EFLAGS(EFlags); \
4912 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4913 IEM_MC_END(); \
4914 break; \
4915 \
4916 case IEMMODE_64BIT: \
4917 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4918 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4919 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4920 \
4921 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4922 IEMOP_HLP_DONE_DECODING(); \
4923 \
4924 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4925 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4926 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4927 \
4928 IEM_MC_ARG_CONST(uint64_t, u64Src, (int64_t)(int8_t)u8Imm, 1); \
4929 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4930 IEM_MC_FETCH_EFLAGS(EFlags); \
4931 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4932 \
4933 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
4934 IEM_MC_COMMIT_EFLAGS(EFlags); \
4935 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4936 IEM_MC_END(); \
4937 break; \
4938 \
4939 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4940 } \
4941 } \
4942 } \
4943 (void)0
4944
4945/* read-only variant */
4946#define IEMOP_BODY_BINARY_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4947 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4948 { \
4949 /* \
4950 * Register target \
4951 */ \
4952 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4953 switch (pVCpu->iem.s.enmEffOpSize) \
4954 { \
4955 case IEMMODE_16BIT: \
4956 IEM_MC_BEGIN(3, 0, 0, 0); \
4957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4958 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4959 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4960 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4961 \
4962 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4963 IEM_MC_REF_EFLAGS(pEFlags); \
4964 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4965 \
4966 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4967 IEM_MC_END(); \
4968 break; \
4969 \
4970 case IEMMODE_32BIT: \
4971 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4973 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4974 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4975 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4976 \
4977 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4978 IEM_MC_REF_EFLAGS(pEFlags); \
4979 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4980 \
4981 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4982 IEM_MC_END(); \
4983 break; \
4984 \
4985 case IEMMODE_64BIT: \
4986 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4988 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4989 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4990 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4991 \
4992 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4993 IEM_MC_REF_EFLAGS(pEFlags); \
4994 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4995 \
4996 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4997 IEM_MC_END(); \
4998 break; \
4999 \
5000 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5001 } \
5002 } \
5003 else \
5004 { \
5005 /* \
5006 * Memory target. \
5007 */ \
5008 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
5009 { \
5010 switch (pVCpu->iem.s.enmEffOpSize) \
5011 { \
5012 case IEMMODE_16BIT: \
5013 IEM_MC_BEGIN(3, 3, 0, 0); \
5014 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5015 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5016 \
5017 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5018 IEMOP_HLP_DONE_DECODING(); \
5019 \
5020 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5021 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
5022 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5023 \
5024 IEM_MC_ARG_CONST(uint16_t, u16Src, (int16_t)(int8_t)u8Imm, 1); \
5025 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5026 IEM_MC_FETCH_EFLAGS(EFlags); \
5027 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
5028 \
5029 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo); \
5030 IEM_MC_COMMIT_EFLAGS(EFlags); \
5031 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5032 IEM_MC_END(); \
5033 break; \
5034 \
5035 case IEMMODE_32BIT: \
5036 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
5037 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5038 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5039 \
5040 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5041 IEMOP_HLP_DONE_DECODING(); \
5042 \
5043 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5044 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
5045 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5046 \
5047 IEM_MC_ARG_CONST(uint32_t, u32Src, (int32_t)(int8_t)u8Imm, 1); \
5048 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5049 IEM_MC_FETCH_EFLAGS(EFlags); \
5050 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
5051 \
5052 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo); \
5053 IEM_MC_COMMIT_EFLAGS(EFlags); \
5054 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5055 IEM_MC_END(); \
5056 break; \
5057 \
5058 case IEMMODE_64BIT: \
5059 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
5060 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5061 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5062 \
5063 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5064 IEMOP_HLP_DONE_DECODING(); \
5065 \
5066 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5067 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
5068 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5069 \
5070 IEM_MC_ARG_CONST(uint64_t, u64Src, (int64_t)(int8_t)u8Imm, 1); \
5071 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5072 IEM_MC_FETCH_EFLAGS(EFlags); \
5073 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
5074 \
5075 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo); \
5076 IEM_MC_COMMIT_EFLAGS(EFlags); \
5077 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5078 IEM_MC_END(); \
5079 break; \
5080 \
5081 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5082 } \
5083 } \
5084 else \
5085 { \
5086 IEMOP_HLP_DONE_DECODING(); \
5087 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
5088 } \
5089 } \
5090 (void)0
5091
5092/**
5093 * @opmaps grp1_83
5094 * @opcode /0
5095 */
5096FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Ib, uint8_t, bRm)
5097{
5098 IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib");
5099 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
5100 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
5101}
5102
5103
5104/**
5105 * @opmaps grp1_83
5106 * @opcode /1
5107 */
5108FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Ib, uint8_t, bRm)
5109{
5110 IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib");
5111 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
5112 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
5113}
5114
5115
5116/**
5117 * @opmaps grp1_83
5118 * @opcode /2
5119 */
5120FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Ib, uint8_t, bRm)
5121{
5122 IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib");
5123 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
5124 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
5125}
5126
5127
5128/**
5129 * @opmaps grp1_83
5130 * @opcode /3
5131 */
5132FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Ib, uint8_t, bRm)
5133{
5134 IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib");
5135 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
5136 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
5137}
5138
5139
5140/**
5141 * @opmaps grp1_83
5142 * @opcode /4
5143 */
5144FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Ib, uint8_t, bRm)
5145{
5146 IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib");
5147 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
5148 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
5149}
5150
5151
5152/**
5153 * @opmaps grp1_83
5154 * @opcode /5
5155 */
5156FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Ib, uint8_t, bRm)
5157{
5158 IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib");
5159 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
5160 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
5161}
5162
5163
5164/**
5165 * @opmaps grp1_83
5166 * @opcode /6
5167 */
5168FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Ib, uint8_t, bRm)
5169{
5170 IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib");
5171 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
5172 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
5173}
5174
5175
5176/**
5177 * @opmaps grp1_83
5178 * @opcode /7
5179 */
5180FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Ib, uint8_t, bRm)
5181{
5182 IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib");
5183 IEMOP_BODY_BINARY_Ev_Ib_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
5184}
5185
5186
5187/**
5188 * @opcode 0x83
5189 */
5190FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
5191{
5192 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
5193 to the 386 even if absent in the intel reference manuals and some
5194 3rd party opcode listings. */
5195 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5196 switch (IEM_GET_MODRM_REG_8(bRm))
5197 {
5198 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Ib, bRm);
5199 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Ib, bRm);
5200 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Ib, bRm);
5201 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Ib, bRm);
5202 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Ib, bRm);
5203 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Ib, bRm);
5204 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Ib, bRm);
5205 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Ib, bRm);
5206 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5207 }
5208}
5209
5210
5211/**
5212 * @opcode 0x84
5213 */
5214FNIEMOP_DEF(iemOp_test_Eb_Gb)
5215{
5216 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
5217 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5218 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_test_u8);
5219 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
5220}
5221
5222
5223/**
5224 * @opcode 0x85
5225 */
5226FNIEMOP_DEF(iemOp_test_Ev_Gv)
5227{
5228 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
5229 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5230 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64);
5231}
5232
5233
5234/**
5235 * @opcode 0x86
5236 */
5237FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
5238{
5239 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5240 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
5241
5242 /*
5243 * If rm is denoting a register, no more instruction bytes.
5244 */
5245 if (IEM_IS_MODRM_REG_MODE(bRm))
5246 {
5247 IEM_MC_BEGIN(0, 2, 0, 0);
5248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5249 IEM_MC_LOCAL(uint8_t, uTmp1);
5250 IEM_MC_LOCAL(uint8_t, uTmp2);
5251
5252 IEM_MC_FETCH_GREG_U8(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5253 IEM_MC_FETCH_GREG_U8(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5254 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5255 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5256
5257 IEM_MC_ADVANCE_RIP_AND_FINISH();
5258 IEM_MC_END();
5259 }
5260 else
5261 {
5262 /*
5263 * We're accessing memory.
5264 */
5265 IEM_MC_BEGIN(2, 4, 0, 0);
5266 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5267 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5268 IEM_MC_LOCAL(uint8_t, uTmpReg);
5269 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
5270 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, uTmpReg, 1);
5271
5272 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5274 IEM_MC_MEM_MAP_U8_RW(pu8Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5275 IEM_MC_FETCH_GREG_U8(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5276 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5277 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_locked, pu8Mem, pu8Reg);
5278 else
5279 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_unlocked, pu8Mem, pu8Reg);
5280 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Mem, bUnmapInfo);
5281 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5282
5283 IEM_MC_ADVANCE_RIP_AND_FINISH();
5284 IEM_MC_END();
5285 }
5286}
5287
5288
5289/**
5290 * @opcode 0x87
5291 */
5292FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
5293{
5294 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
5295 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5296
5297 /*
5298 * If rm is denoting a register, no more instruction bytes.
5299 */
5300 if (IEM_IS_MODRM_REG_MODE(bRm))
5301 {
5302 switch (pVCpu->iem.s.enmEffOpSize)
5303 {
5304 case IEMMODE_16BIT:
5305 IEM_MC_BEGIN(0, 2, 0, 0);
5306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5307 IEM_MC_LOCAL(uint16_t, uTmp1);
5308 IEM_MC_LOCAL(uint16_t, uTmp2);
5309
5310 IEM_MC_FETCH_GREG_U16(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5311 IEM_MC_FETCH_GREG_U16(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5312 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5313 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5314
5315 IEM_MC_ADVANCE_RIP_AND_FINISH();
5316 IEM_MC_END();
5317 break;
5318
5319 case IEMMODE_32BIT:
5320 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5322 IEM_MC_LOCAL(uint32_t, uTmp1);
5323 IEM_MC_LOCAL(uint32_t, uTmp2);
5324
5325 IEM_MC_FETCH_GREG_U32(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5326 IEM_MC_FETCH_GREG_U32(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5327 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5328 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5329
5330 IEM_MC_ADVANCE_RIP_AND_FINISH();
5331 IEM_MC_END();
5332 break;
5333
5334 case IEMMODE_64BIT:
5335 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5337 IEM_MC_LOCAL(uint64_t, uTmp1);
5338 IEM_MC_LOCAL(uint64_t, uTmp2);
5339
5340 IEM_MC_FETCH_GREG_U64(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5341 IEM_MC_FETCH_GREG_U64(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5342 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5343 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5344
5345 IEM_MC_ADVANCE_RIP_AND_FINISH();
5346 IEM_MC_END();
5347 break;
5348
5349 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5350 }
5351 }
5352 else
5353 {
5354 /*
5355 * We're accessing memory.
5356 */
5357 switch (pVCpu->iem.s.enmEffOpSize)
5358 {
5359 case IEMMODE_16BIT:
5360 IEM_MC_BEGIN(2, 4, 0, 0);
5361 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5362 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5363 IEM_MC_LOCAL(uint16_t, uTmpReg);
5364 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
5365 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, uTmpReg, 1);
5366
5367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5368 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5369 IEM_MC_MEM_MAP_U16_RW(pu16Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5370 IEM_MC_FETCH_GREG_U16(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5371 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5372 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_locked, pu16Mem, pu16Reg);
5373 else
5374 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_unlocked, pu16Mem, pu16Reg);
5375 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Mem, bUnmapInfo);
5376 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5377
5378 IEM_MC_ADVANCE_RIP_AND_FINISH();
5379 IEM_MC_END();
5380 break;
5381
5382 case IEMMODE_32BIT:
5383 IEM_MC_BEGIN(2, 4, IEM_MC_F_MIN_386, 0);
5384 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5385 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5386 IEM_MC_LOCAL(uint32_t, uTmpReg);
5387 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
5388 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, uTmpReg, 1);
5389
5390 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5391 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5392 IEM_MC_MEM_MAP_U32_RW(pu32Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5393 IEM_MC_FETCH_GREG_U32(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5394 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5395 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_locked, pu32Mem, pu32Reg);
5396 else
5397 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_unlocked, pu32Mem, pu32Reg);
5398 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Mem, bUnmapInfo);
5399 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5400
5401 IEM_MC_ADVANCE_RIP_AND_FINISH();
5402 IEM_MC_END();
5403 break;
5404
5405 case IEMMODE_64BIT:
5406 IEM_MC_BEGIN(2, 4, IEM_MC_F_64BIT, 0);
5407 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5408 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
5409 IEM_MC_LOCAL(uint64_t, uTmpReg);
5410 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
5411 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, uTmpReg, 1);
5412
5413 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5415 IEM_MC_MEM_MAP_U64_RW(pu64Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5416 IEM_MC_FETCH_GREG_U64(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm));
5417 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5418 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_locked, pu64Mem, pu64Reg);
5419 else
5420 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_unlocked, pu64Mem, pu64Reg);
5421 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Mem, bUnmapInfo);
5422 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg);
5423
5424 IEM_MC_ADVANCE_RIP_AND_FINISH();
5425 IEM_MC_END();
5426 break;
5427
5428 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5429 }
5430 }
5431}
5432
5433
5434/**
5435 * @opcode 0x88
5436 */
5437FNIEMOP_DEF(iemOp_mov_Eb_Gb)
5438{
5439 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
5440
5441 uint8_t bRm;
5442 IEM_OPCODE_GET_NEXT_U8(&bRm);
5443
5444 /*
5445 * If rm is denoting a register, no more instruction bytes.
5446 */
5447 if (IEM_IS_MODRM_REG_MODE(bRm))
5448 {
5449 IEM_MC_BEGIN(0, 1, 0, 0);
5450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5451 IEM_MC_LOCAL(uint8_t, u8Value);
5452 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5453 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Value);
5454 IEM_MC_ADVANCE_RIP_AND_FINISH();
5455 IEM_MC_END();
5456 }
5457 else
5458 {
5459 /*
5460 * We're writing a register to memory.
5461 */
5462 IEM_MC_BEGIN(0, 2, 0, 0);
5463 IEM_MC_LOCAL(uint8_t, u8Value);
5464 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5465 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5467 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5468 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
5469 IEM_MC_ADVANCE_RIP_AND_FINISH();
5470 IEM_MC_END();
5471 }
5472}
5473
5474
5475/**
5476 * @opcode 0x89
5477 */
5478FNIEMOP_DEF(iemOp_mov_Ev_Gv)
5479{
5480 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
5481
5482 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5483
5484 /*
5485 * If rm is denoting a register, no more instruction bytes.
5486 */
5487 if (IEM_IS_MODRM_REG_MODE(bRm))
5488 {
5489 switch (pVCpu->iem.s.enmEffOpSize)
5490 {
5491 case IEMMODE_16BIT:
5492 IEM_MC_BEGIN(0, 1, 0, 0);
5493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5494 IEM_MC_LOCAL(uint16_t, u16Value);
5495 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5496 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5497 IEM_MC_ADVANCE_RIP_AND_FINISH();
5498 IEM_MC_END();
5499 break;
5500
5501 case IEMMODE_32BIT:
5502 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5504 IEM_MC_LOCAL(uint32_t, u32Value);
5505 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5506 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5507 IEM_MC_ADVANCE_RIP_AND_FINISH();
5508 IEM_MC_END();
5509 break;
5510
5511 case IEMMODE_64BIT:
5512 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5514 IEM_MC_LOCAL(uint64_t, u64Value);
5515 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5516 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5517 IEM_MC_ADVANCE_RIP_AND_FINISH();
5518 IEM_MC_END();
5519 break;
5520
5521 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5522 }
5523 }
5524 else
5525 {
5526 /*
5527 * We're writing a register to memory.
5528 */
5529 switch (pVCpu->iem.s.enmEffOpSize)
5530 {
5531 case IEMMODE_16BIT:
5532 IEM_MC_BEGIN(0, 2, 0, 0);
5533 IEM_MC_LOCAL(uint16_t, u16Value);
5534 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5535 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5537 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5538 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5539 IEM_MC_ADVANCE_RIP_AND_FINISH();
5540 IEM_MC_END();
5541 break;
5542
5543 case IEMMODE_32BIT:
5544 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5545 IEM_MC_LOCAL(uint32_t, u32Value);
5546 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5547 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5549 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5550 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
5551 IEM_MC_ADVANCE_RIP_AND_FINISH();
5552 IEM_MC_END();
5553 break;
5554
5555 case IEMMODE_64BIT:
5556 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5557 IEM_MC_LOCAL(uint64_t, u64Value);
5558 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5559 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5561 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5562 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
5563 IEM_MC_ADVANCE_RIP_AND_FINISH();
5564 IEM_MC_END();
5565 break;
5566
5567 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5568 }
5569 }
5570}
5571
5572
5573/**
5574 * @opcode 0x8a
5575 */
5576FNIEMOP_DEF(iemOp_mov_Gb_Eb)
5577{
5578 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
5579
5580 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5581
5582 /*
5583 * If rm is denoting a register, no more instruction bytes.
5584 */
5585 if (IEM_IS_MODRM_REG_MODE(bRm))
5586 {
5587 IEM_MC_BEGIN(0, 1, 0, 0);
5588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5589 IEM_MC_LOCAL(uint8_t, u8Value);
5590 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5591 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5592 IEM_MC_ADVANCE_RIP_AND_FINISH();
5593 IEM_MC_END();
5594 }
5595 else
5596 {
5597 /*
5598 * We're loading a register from memory.
5599 */
5600 IEM_MC_BEGIN(0, 2, 0, 0);
5601 IEM_MC_LOCAL(uint8_t, u8Value);
5602 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5603 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5605 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5606 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5607 IEM_MC_ADVANCE_RIP_AND_FINISH();
5608 IEM_MC_END();
5609 }
5610}
5611
5612
5613/**
5614 * @opcode 0x8b
5615 */
5616FNIEMOP_DEF(iemOp_mov_Gv_Ev)
5617{
5618 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
5619
5620 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5621
5622 /*
5623 * If rm is denoting a register, no more instruction bytes.
5624 */
5625 if (IEM_IS_MODRM_REG_MODE(bRm))
5626 {
5627 switch (pVCpu->iem.s.enmEffOpSize)
5628 {
5629 case IEMMODE_16BIT:
5630 IEM_MC_BEGIN(0, 1, 0, 0);
5631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5632 IEM_MC_LOCAL(uint16_t, u16Value);
5633 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5634 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5635 IEM_MC_ADVANCE_RIP_AND_FINISH();
5636 IEM_MC_END();
5637 break;
5638
5639 case IEMMODE_32BIT:
5640 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5642 IEM_MC_LOCAL(uint32_t, u32Value);
5643 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5644 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5645 IEM_MC_ADVANCE_RIP_AND_FINISH();
5646 IEM_MC_END();
5647 break;
5648
5649 case IEMMODE_64BIT:
5650 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5652 IEM_MC_LOCAL(uint64_t, u64Value);
5653 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5654 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5655 IEM_MC_ADVANCE_RIP_AND_FINISH();
5656 IEM_MC_END();
5657 break;
5658
5659 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5660 }
5661 }
5662 else
5663 {
5664 /*
5665 * We're loading a register from memory.
5666 */
5667 switch (pVCpu->iem.s.enmEffOpSize)
5668 {
5669 case IEMMODE_16BIT:
5670 IEM_MC_BEGIN(0, 2, 0, 0);
5671 IEM_MC_LOCAL(uint16_t, u16Value);
5672 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5673 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5675 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5676 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5677 IEM_MC_ADVANCE_RIP_AND_FINISH();
5678 IEM_MC_END();
5679 break;
5680
5681 case IEMMODE_32BIT:
5682 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5683 IEM_MC_LOCAL(uint32_t, u32Value);
5684 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5685 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5687 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5688 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5689 IEM_MC_ADVANCE_RIP_AND_FINISH();
5690 IEM_MC_END();
5691 break;
5692
5693 case IEMMODE_64BIT:
5694 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5695 IEM_MC_LOCAL(uint64_t, u64Value);
5696 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5697 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5699 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5700 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5701 IEM_MC_ADVANCE_RIP_AND_FINISH();
5702 IEM_MC_END();
5703 break;
5704
5705 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5706 }
5707 }
5708}
5709
5710
5711/**
5712 * opcode 0x63
5713 * @todo Table fixme
5714 */
5715FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
5716{
5717 if (!IEM_IS_64BIT_CODE(pVCpu))
5718 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
5719 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
5720 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
5721 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
5722}
5723
5724
5725/**
5726 * @opcode 0x8c
5727 */
5728FNIEMOP_DEF(iemOp_mov_Ev_Sw)
5729{
5730 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
5731
5732 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5733
5734 /*
5735 * Check that the destination register exists. The REX.R prefix is ignored.
5736 */
5737 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5738 if (iSegReg > X86_SREG_GS)
5739 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5740
5741 /*
5742 * If rm is denoting a register, no more instruction bytes.
5743 * In that case, the operand size is respected and the upper bits are
5744 * cleared (starting with some pentium).
5745 */
5746 if (IEM_IS_MODRM_REG_MODE(bRm))
5747 {
5748 switch (pVCpu->iem.s.enmEffOpSize)
5749 {
5750 case IEMMODE_16BIT:
5751 IEM_MC_BEGIN(0, 1, 0, 0);
5752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5753 IEM_MC_LOCAL(uint16_t, u16Value);
5754 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5755 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5756 IEM_MC_ADVANCE_RIP_AND_FINISH();
5757 IEM_MC_END();
5758 break;
5759
5760 case IEMMODE_32BIT:
5761 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5763 IEM_MC_LOCAL(uint32_t, u32Value);
5764 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
5765 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5766 IEM_MC_ADVANCE_RIP_AND_FINISH();
5767 IEM_MC_END();
5768 break;
5769
5770 case IEMMODE_64BIT:
5771 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5773 IEM_MC_LOCAL(uint64_t, u64Value);
5774 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
5775 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5776 IEM_MC_ADVANCE_RIP_AND_FINISH();
5777 IEM_MC_END();
5778 break;
5779
5780 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5781 }
5782 }
5783 else
5784 {
5785 /*
5786 * We're saving the register to memory. The access is word sized
5787 * regardless of operand size prefixes.
5788 */
5789#if 0 /* not necessary */
5790 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5791#endif
5792 IEM_MC_BEGIN(0, 2, 0, 0);
5793 IEM_MC_LOCAL(uint16_t, u16Value);
5794 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5795 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5797 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5798 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5799 IEM_MC_ADVANCE_RIP_AND_FINISH();
5800 IEM_MC_END();
5801 }
5802}
5803
5804
5805
5806
5807/**
5808 * @opcode 0x8d
5809 */
5810FNIEMOP_DEF(iemOp_lea_Gv_M)
5811{
5812 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
5813 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5814 if (IEM_IS_MODRM_REG_MODE(bRm))
5815 IEMOP_RAISE_INVALID_OPCODE_RET(); /* no register form */
5816
5817 switch (pVCpu->iem.s.enmEffOpSize)
5818 {
5819 case IEMMODE_16BIT:
5820 IEM_MC_BEGIN(0, 2, 0, 0);
5821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5822 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5824 IEM_MC_LOCAL(uint16_t, u16Cast);
5825 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
5826 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Cast);
5827 IEM_MC_ADVANCE_RIP_AND_FINISH();
5828 IEM_MC_END();
5829 break;
5830
5831 case IEMMODE_32BIT:
5832 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5833 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5834 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5836 IEM_MC_LOCAL(uint32_t, u32Cast);
5837 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
5838 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Cast);
5839 IEM_MC_ADVANCE_RIP_AND_FINISH();
5840 IEM_MC_END();
5841 break;
5842
5843 case IEMMODE_64BIT:
5844 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5845 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5846 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5848 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), GCPtrEffSrc);
5849 IEM_MC_ADVANCE_RIP_AND_FINISH();
5850 IEM_MC_END();
5851 break;
5852
5853 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5854 }
5855}
5856
5857
5858/**
5859 * @opcode 0x8e
5860 */
5861FNIEMOP_DEF(iemOp_mov_Sw_Ev)
5862{
5863 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
5864
5865 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5866
5867 /*
5868 * The practical operand size is 16-bit.
5869 */
5870#if 0 /* not necessary */
5871 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5872#endif
5873
5874 /*
5875 * Check that the destination register exists and can be used with this
5876 * instruction. The REX.R prefix is ignored.
5877 */
5878 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5879 /** @todo r=bird: What does 8086 do here wrt CS? */
5880 if ( iSegReg == X86_SREG_CS
5881 || iSegReg > X86_SREG_GS)
5882 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5883
5884 /*
5885 * If rm is denoting a register, no more instruction bytes.
5886 *
5887 * Note! Using IEMOP_MOV_SW_EV_REG_BODY here to specify different
5888 * IEM_CIMPL_F_XXX values depending on the CPU mode and target
5889 * register. This is a restriction of the current recompiler
5890 * approach.
5891 */
5892 if (IEM_IS_MODRM_REG_MODE(bRm))
5893 {
5894#define IEMOP_MOV_SW_EV_REG_BODY(a_fCImplFlags) \
5895 IEM_MC_BEGIN(2, 0, 0, a_fCImplFlags); \
5896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5897 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
5898 IEM_MC_ARG(uint16_t, u16Value, 1); \
5899 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5900 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, \
5901 RT_BIT_64(kIemNativeGstReg_SegSelFirst + iSegReg) \
5902 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + iSegReg) \
5903 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + iSegReg), \
5904 iemCImpl_load_SReg, iSRegArg, u16Value); \
5905 IEM_MC_END()
5906
5907 if (iSegReg == X86_SREG_SS)
5908 {
5909 if (IEM_IS_32BIT_CODE(pVCpu))
5910 {
5911 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
5912 }
5913 else
5914 {
5915 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
5916 }
5917 }
5918 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5919 {
5920 IEMOP_MOV_SW_EV_REG_BODY(0);
5921 }
5922 else
5923 {
5924 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_MODE);
5925 }
5926#undef IEMOP_MOV_SW_EV_REG_BODY
5927 }
5928 else
5929 {
5930 /*
5931 * We're loading the register from memory. The access is word sized
5932 * regardless of operand size prefixes.
5933 */
5934#define IEMOP_MOV_SW_EV_MEM_BODY(a_fCImplFlags) \
5935 IEM_MC_BEGIN(2, 1, 0, a_fCImplFlags); \
5936 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
5937 IEM_MC_ARG(uint16_t, u16Value, 1); \
5938 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5939 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5941 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5942 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, \
5943 RT_BIT_64(kIemNativeGstReg_SegSelFirst + iSegReg) \
5944 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + iSegReg) \
5945 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + iSegReg), \
5946 iemCImpl_load_SReg, iSRegArg, u16Value); \
5947 IEM_MC_END()
5948
5949 if (iSegReg == X86_SREG_SS)
5950 {
5951 if (IEM_IS_32BIT_CODE(pVCpu))
5952 {
5953 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
5954 }
5955 else
5956 {
5957 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
5958 }
5959 }
5960 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5961 {
5962 IEMOP_MOV_SW_EV_MEM_BODY(0);
5963 }
5964 else
5965 {
5966 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_MODE);
5967 }
5968#undef IEMOP_MOV_SW_EV_MEM_BODY
5969 }
5970}
5971
5972
5973/** Opcode 0x8f /0. */
5974FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
5975{
5976 /* This bugger is rather annoying as it requires rSP to be updated before
5977 doing the effective address calculations. Will eventually require a
5978 split between the R/M+SIB decoding and the effective address
5979 calculation - which is something that is required for any attempt at
5980 reusing this code for a recompiler. It may also be good to have if we
5981 need to delay #UD exception caused by invalid lock prefixes.
5982
5983 For now, we'll do a mostly safe interpreter-only implementation here. */
5984 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
5985 * now until tests show it's checked.. */
5986 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
5987
5988 /* Register access is relatively easy and can share code. */
5989 if (IEM_IS_MODRM_REG_MODE(bRm))
5990 return FNIEMOP_CALL_1(iemOpCommonPopGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
5991
5992 /*
5993 * Memory target.
5994 *
5995 * Intel says that RSP is incremented before it's used in any effective
5996 * address calcuations. This means some serious extra annoyance here since
5997 * we decode and calculate the effective address in one step and like to
5998 * delay committing registers till everything is done.
5999 *
6000 * So, we'll decode and calculate the effective address twice. This will
6001 * require some recoding if turned into a recompiler.
6002 */
6003 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
6004
6005#if 1 /* This can be compiled, optimize later if needed. */
6006 switch (pVCpu->iem.s.enmEffOpSize)
6007 {
6008 case IEMMODE_16BIT:
6009 IEM_MC_BEGIN(2, 0, 0, 0);
6010 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6011 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2 << 8);
6012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6013 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6014 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem16, iEffSeg, GCPtrEffDst);
6015 IEM_MC_END();
6016 break;
6017
6018 case IEMMODE_32BIT:
6019 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0);
6020 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6021 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4 << 8);
6022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6023 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6024 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem32, iEffSeg, GCPtrEffDst);
6025 IEM_MC_END();
6026 break;
6027
6028 case IEMMODE_64BIT:
6029 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
6030 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6031 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 8 << 8);
6032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6033 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6034 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem64, iEffSeg, GCPtrEffDst);
6035 IEM_MC_END();
6036 break;
6037
6038 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6039 }
6040
6041#else
6042# ifndef TST_IEM_CHECK_MC
6043 /* Calc effective address with modified ESP. */
6044/** @todo testcase */
6045 RTGCPTR GCPtrEff;
6046 VBOXSTRICTRC rcStrict;
6047 switch (pVCpu->iem.s.enmEffOpSize)
6048 {
6049 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 2 << 8, &GCPtrEff); break;
6050 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 4 << 8, &GCPtrEff); break;
6051 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 8 << 8, &GCPtrEff); break;
6052 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6053 }
6054 if (rcStrict != VINF_SUCCESS)
6055 return rcStrict;
6056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6057
6058 /* Perform the operation - this should be CImpl. */
6059 RTUINT64U TmpRsp;
6060 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
6061 switch (pVCpu->iem.s.enmEffOpSize)
6062 {
6063 case IEMMODE_16BIT:
6064 {
6065 uint16_t u16Value;
6066 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
6067 if (rcStrict == VINF_SUCCESS)
6068 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
6069 break;
6070 }
6071
6072 case IEMMODE_32BIT:
6073 {
6074 uint32_t u32Value;
6075 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
6076 if (rcStrict == VINF_SUCCESS)
6077 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
6078 break;
6079 }
6080
6081 case IEMMODE_64BIT:
6082 {
6083 uint64_t u64Value;
6084 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
6085 if (rcStrict == VINF_SUCCESS)
6086 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
6087 break;
6088 }
6089
6090 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6091 }
6092 if (rcStrict == VINF_SUCCESS)
6093 {
6094 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
6095 return iemRegUpdateRipAndFinishClearingRF(pVCpu);
6096 }
6097 return rcStrict;
6098
6099# else
6100 return VERR_IEM_IPE_2;
6101# endif
6102#endif
6103}
6104
6105
6106/**
6107 * @opcode 0x8f
6108 */
6109FNIEMOP_DEF(iemOp_Grp1A__xop)
6110{
6111 /*
6112 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
6113 * three byte VEX prefix, except that the mmmmm field cannot have the values
6114 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
6115 */
6116 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6117 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
6118 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
6119
6120 IEMOP_MNEMONIC(xop, "xop");
6121 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
6122 {
6123 /** @todo Test when exctly the XOP conformance checks kick in during
6124 * instruction decoding and fetching (using \#PF). */
6125 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
6126 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6127 if ( ( pVCpu->iem.s.fPrefixes
6128 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
6129 == 0)
6130 {
6131 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
6132 if ((bXop2 & 0x80 /* XOP.W */) && IEM_IS_64BIT_CODE(pVCpu))
6133 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
6134 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6135 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
6136 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
6137 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
6138 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
6139 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
6140
6141 /** @todo XOP: Just use new tables and decoders. */
6142 switch (bRm & 0x1f)
6143 {
6144 case 8: /* xop opcode map 8. */
6145 IEMOP_BITCH_ABOUT_STUB();
6146 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6147
6148 case 9: /* xop opcode map 9. */
6149 IEMOP_BITCH_ABOUT_STUB();
6150 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6151
6152 case 10: /* xop opcode map 10. */
6153 IEMOP_BITCH_ABOUT_STUB();
6154 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6155
6156 default:
6157 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6158 IEMOP_RAISE_INVALID_OPCODE_RET();
6159 }
6160 }
6161 else
6162 Log(("XOP: Invalid prefix mix!\n"));
6163 }
6164 else
6165 Log(("XOP: XOP support disabled!\n"));
6166 IEMOP_RAISE_INVALID_OPCODE_RET();
6167}
6168
6169
6170/**
6171 * Common 'xchg reg,rAX' helper.
6172 */
6173FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
6174{
6175 iReg |= pVCpu->iem.s.uRexB;
6176 switch (pVCpu->iem.s.enmEffOpSize)
6177 {
6178 case IEMMODE_16BIT:
6179 IEM_MC_BEGIN(0, 2, 0, 0);
6180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6181 IEM_MC_LOCAL(uint16_t, u16Tmp1);
6182 IEM_MC_LOCAL(uint16_t, u16Tmp2);
6183 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
6184 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
6185 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
6186 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
6187 IEM_MC_ADVANCE_RIP_AND_FINISH();
6188 IEM_MC_END();
6189 break;
6190
6191 case IEMMODE_32BIT:
6192 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6194 IEM_MC_LOCAL(uint32_t, u32Tmp1);
6195 IEM_MC_LOCAL(uint32_t, u32Tmp2);
6196 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
6197 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
6198 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
6199 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
6200 IEM_MC_ADVANCE_RIP_AND_FINISH();
6201 IEM_MC_END();
6202 break;
6203
6204 case IEMMODE_64BIT:
6205 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6207 IEM_MC_LOCAL(uint64_t, u64Tmp1);
6208 IEM_MC_LOCAL(uint64_t, u64Tmp2);
6209 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
6210 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
6211 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
6212 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
6213 IEM_MC_ADVANCE_RIP_AND_FINISH();
6214 IEM_MC_END();
6215 break;
6216
6217 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6218 }
6219}
6220
6221
6222/**
6223 * @opcode 0x90
6224 */
6225FNIEMOP_DEF(iemOp_nop)
6226{
6227 /* R8/R8D and RAX/EAX can be exchanged. */
6228 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
6229 {
6230 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
6231 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
6232 }
6233
6234 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
6235 {
6236 IEMOP_MNEMONIC(pause, "pause");
6237 /* ASSUMING that we keep the IEM_F_X86_CTX_IN_GUEST, IEM_F_X86_CTX_VMX
6238 and IEM_F_X86_CTX_SVM in the TB key, we can safely do the following: */
6239 if (!IEM_IS_IN_GUEST(pVCpu))
6240 { /* probable */ }
6241#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6242 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_VMX)
6243 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmx_pause);
6244#endif
6245#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
6246 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_SVM)
6247 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_svm_pause);
6248#endif
6249 }
6250 else
6251 IEMOP_MNEMONIC(nop, "nop");
6252 /** @todo testcase: lock nop; lock pause */
6253 IEM_MC_BEGIN(0, 0, 0, 0);
6254 IEMOP_HLP_DONE_DECODING();
6255 IEM_MC_ADVANCE_RIP_AND_FINISH();
6256 IEM_MC_END();
6257}
6258
6259
6260/**
6261 * @opcode 0x91
6262 */
6263FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
6264{
6265 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
6266 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
6267}
6268
6269
6270/**
6271 * @opcode 0x92
6272 */
6273FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
6274{
6275 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
6276 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
6277}
6278
6279
6280/**
6281 * @opcode 0x93
6282 */
6283FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
6284{
6285 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
6286 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
6287}
6288
6289
6290/**
6291 * @opcode 0x94
6292 */
6293FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
6294{
6295 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
6296 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
6297}
6298
6299
6300/**
6301 * @opcode 0x95
6302 */
6303FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
6304{
6305 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
6306 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
6307}
6308
6309
6310/**
6311 * @opcode 0x96
6312 */
6313FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
6314{
6315 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
6316 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
6317}
6318
6319
6320/**
6321 * @opcode 0x97
6322 */
6323FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
6324{
6325 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
6326 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
6327}
6328
6329
6330/**
6331 * @opcode 0x98
6332 */
6333FNIEMOP_DEF(iemOp_cbw)
6334{
6335 switch (pVCpu->iem.s.enmEffOpSize)
6336 {
6337 case IEMMODE_16BIT:
6338 IEMOP_MNEMONIC(cbw, "cbw");
6339 IEM_MC_BEGIN(0, 1, 0, 0);
6340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6341 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
6342 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
6343 } IEM_MC_ELSE() {
6344 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
6345 } IEM_MC_ENDIF();
6346 IEM_MC_ADVANCE_RIP_AND_FINISH();
6347 IEM_MC_END();
6348 break;
6349
6350 case IEMMODE_32BIT:
6351 IEMOP_MNEMONIC(cwde, "cwde");
6352 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6354 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6355 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
6356 } IEM_MC_ELSE() {
6357 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
6358 } IEM_MC_ENDIF();
6359 IEM_MC_ADVANCE_RIP_AND_FINISH();
6360 IEM_MC_END();
6361 break;
6362
6363 case IEMMODE_64BIT:
6364 IEMOP_MNEMONIC(cdqe, "cdqe");
6365 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6367 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6368 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
6369 } IEM_MC_ELSE() {
6370 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
6371 } IEM_MC_ENDIF();
6372 IEM_MC_ADVANCE_RIP_AND_FINISH();
6373 IEM_MC_END();
6374 break;
6375
6376 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6377 }
6378}
6379
6380
6381/**
6382 * @opcode 0x99
6383 */
6384FNIEMOP_DEF(iemOp_cwd)
6385{
6386 switch (pVCpu->iem.s.enmEffOpSize)
6387 {
6388 case IEMMODE_16BIT:
6389 IEMOP_MNEMONIC(cwd, "cwd");
6390 IEM_MC_BEGIN(0, 1, 0, 0);
6391 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6392 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6393 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
6394 } IEM_MC_ELSE() {
6395 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
6396 } IEM_MC_ENDIF();
6397 IEM_MC_ADVANCE_RIP_AND_FINISH();
6398 IEM_MC_END();
6399 break;
6400
6401 case IEMMODE_32BIT:
6402 IEMOP_MNEMONIC(cdq, "cdq");
6403 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6405 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6406 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
6407 } IEM_MC_ELSE() {
6408 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
6409 } IEM_MC_ENDIF();
6410 IEM_MC_ADVANCE_RIP_AND_FINISH();
6411 IEM_MC_END();
6412 break;
6413
6414 case IEMMODE_64BIT:
6415 IEMOP_MNEMONIC(cqo, "cqo");
6416 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6418 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
6419 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
6420 } IEM_MC_ELSE() {
6421 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
6422 } IEM_MC_ENDIF();
6423 IEM_MC_ADVANCE_RIP_AND_FINISH();
6424 IEM_MC_END();
6425 break;
6426
6427 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6428 }
6429}
6430
6431
6432/**
6433 * @opcode 0x9a
6434 */
6435FNIEMOP_DEF(iemOp_call_Ap)
6436{
6437 IEMOP_MNEMONIC(call_Ap, "call Ap");
6438 IEMOP_HLP_NO_64BIT();
6439
6440 /* Decode the far pointer address and pass it on to the far call C implementation. */
6441 uint32_t off32Seg;
6442 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
6443 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
6444 else
6445 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
6446 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
6447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6448 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
6449 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, UINT64_MAX,
6450 iemCImpl_callf, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
6451 /** @todo make task-switches, ring-switches, ++ return non-zero status */
6452}
6453
6454
6455/** Opcode 0x9b. (aka fwait) */
6456FNIEMOP_DEF(iemOp_wait)
6457{
6458 IEMOP_MNEMONIC(wait, "wait");
6459 IEM_MC_BEGIN(0, 0, 0, 0);
6460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6461 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
6462 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6463 IEM_MC_ADVANCE_RIP_AND_FINISH();
6464 IEM_MC_END();
6465}
6466
6467
6468/**
6469 * @opcode 0x9c
6470 */
6471FNIEMOP_DEF(iemOp_pushf_Fv)
6472{
6473 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
6474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6475 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6476 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP),
6477 iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
6478}
6479
6480
6481/**
6482 * @opcode 0x9d
6483 */
6484FNIEMOP_DEF(iemOp_popf_Fv)
6485{
6486 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
6487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6488 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6489 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE_AND_AFTER,
6490 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP),
6491 iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
6492}
6493
6494
6495/**
6496 * @opcode 0x9e
6497 */
6498FNIEMOP_DEF(iemOp_sahf)
6499{
6500 IEMOP_MNEMONIC(sahf, "sahf");
6501 if ( IEM_IS_64BIT_CODE(pVCpu)
6502 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6503 IEMOP_RAISE_INVALID_OPCODE_RET();
6504 IEM_MC_BEGIN(0, 2, 0, 0);
6505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6506 IEM_MC_LOCAL(uint32_t, u32Flags);
6507 IEM_MC_LOCAL(uint32_t, EFlags);
6508 IEM_MC_FETCH_EFLAGS(EFlags);
6509 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
6510 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6511 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
6512 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
6513 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
6514 IEM_MC_COMMIT_EFLAGS(EFlags);
6515 IEM_MC_ADVANCE_RIP_AND_FINISH();
6516 IEM_MC_END();
6517}
6518
6519
6520/**
6521 * @opcode 0x9f
6522 */
6523FNIEMOP_DEF(iemOp_lahf)
6524{
6525 IEMOP_MNEMONIC(lahf, "lahf");
6526 if ( IEM_IS_64BIT_CODE(pVCpu)
6527 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6528 IEMOP_RAISE_INVALID_OPCODE_RET();
6529 IEM_MC_BEGIN(0, 1, 0, 0);
6530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6531 IEM_MC_LOCAL(uint8_t, u8Flags);
6532 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
6533 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
6534 IEM_MC_ADVANCE_RIP_AND_FINISH();
6535 IEM_MC_END();
6536}
6537
6538
6539/**
6540 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
6541 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode.
6542 * Will return/throw on failures.
6543 * @param a_GCPtrMemOff The variable to store the offset in.
6544 */
6545#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
6546 do \
6547 { \
6548 switch (pVCpu->iem.s.enmEffAddrMode) \
6549 { \
6550 case IEMMODE_16BIT: \
6551 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
6552 break; \
6553 case IEMMODE_32BIT: \
6554 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
6555 break; \
6556 case IEMMODE_64BIT: \
6557 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
6558 break; \
6559 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
6560 } \
6561 } while (0)
6562
6563/**
6564 * @opcode 0xa0
6565 */
6566FNIEMOP_DEF(iemOp_mov_AL_Ob)
6567{
6568 /*
6569 * Get the offset.
6570 */
6571 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
6572 RTGCPTR GCPtrMemOffDecode;
6573 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6574
6575 /*
6576 * Fetch AL.
6577 */
6578 IEM_MC_BEGIN(0, 2, 0, 0);
6579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6580 IEM_MC_LOCAL(uint8_t, u8Tmp);
6581 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6582 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6583 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6584 IEM_MC_ADVANCE_RIP_AND_FINISH();
6585 IEM_MC_END();
6586}
6587
6588
6589/**
6590 * @opcode 0xa1
6591 */
6592FNIEMOP_DEF(iemOp_mov_rAX_Ov)
6593{
6594 /*
6595 * Get the offset.
6596 */
6597 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
6598 RTGCPTR GCPtrMemOffDecode;
6599 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6600
6601 /*
6602 * Fetch rAX.
6603 */
6604 switch (pVCpu->iem.s.enmEffOpSize)
6605 {
6606 case IEMMODE_16BIT:
6607 IEM_MC_BEGIN(0, 2, 0, 0);
6608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6609 IEM_MC_LOCAL(uint16_t, u16Tmp);
6610 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6611 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6612 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
6613 IEM_MC_ADVANCE_RIP_AND_FINISH();
6614 IEM_MC_END();
6615 break;
6616
6617 case IEMMODE_32BIT:
6618 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6620 IEM_MC_LOCAL(uint32_t, u32Tmp);
6621 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6622 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6623 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
6624 IEM_MC_ADVANCE_RIP_AND_FINISH();
6625 IEM_MC_END();
6626 break;
6627
6628 case IEMMODE_64BIT:
6629 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6631 IEM_MC_LOCAL(uint64_t, u64Tmp);
6632 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6633 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6634 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
6635 IEM_MC_ADVANCE_RIP_AND_FINISH();
6636 IEM_MC_END();
6637 break;
6638
6639 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6640 }
6641}
6642
6643
6644/**
6645 * @opcode 0xa2
6646 */
6647FNIEMOP_DEF(iemOp_mov_Ob_AL)
6648{
6649 /*
6650 * Get the offset.
6651 */
6652 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
6653 RTGCPTR GCPtrMemOffDecode;
6654 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6655
6656 /*
6657 * Store AL.
6658 */
6659 IEM_MC_BEGIN(0, 2, 0, 0);
6660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6661 IEM_MC_LOCAL(uint8_t, u8Tmp);
6662 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
6663 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6664 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
6665 IEM_MC_ADVANCE_RIP_AND_FINISH();
6666 IEM_MC_END();
6667}
6668
6669
6670/**
6671 * @opcode 0xa3
6672 */
6673FNIEMOP_DEF(iemOp_mov_Ov_rAX)
6674{
6675 /*
6676 * Get the offset.
6677 */
6678 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
6679 RTGCPTR GCPtrMemOffDecode;
6680 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6681
6682 /*
6683 * Store rAX.
6684 */
6685 switch (pVCpu->iem.s.enmEffOpSize)
6686 {
6687 case IEMMODE_16BIT:
6688 IEM_MC_BEGIN(0, 2, 0, 0);
6689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6690 IEM_MC_LOCAL(uint16_t, u16Tmp);
6691 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
6692 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6693 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
6694 IEM_MC_ADVANCE_RIP_AND_FINISH();
6695 IEM_MC_END();
6696 break;
6697
6698 case IEMMODE_32BIT:
6699 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6700 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6701 IEM_MC_LOCAL(uint32_t, u32Tmp);
6702 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
6703 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6704 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
6705 IEM_MC_ADVANCE_RIP_AND_FINISH();
6706 IEM_MC_END();
6707 break;
6708
6709 case IEMMODE_64BIT:
6710 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6712 IEM_MC_LOCAL(uint64_t, u64Tmp);
6713 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
6714 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6715 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
6716 IEM_MC_ADVANCE_RIP_AND_FINISH();
6717 IEM_MC_END();
6718 break;
6719
6720 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6721 }
6722}
6723
6724/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
6725#define IEM_MOVS_CASE(ValBits, AddrBits, a_fMcFlags) \
6726 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
6727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6728 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6729 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6730 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6731 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
6732 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6733 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
6734 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6735 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6736 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6737 } IEM_MC_ELSE() { \
6738 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6739 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6740 } IEM_MC_ENDIF(); \
6741 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6742 IEM_MC_END() \
6743
6744/**
6745 * @opcode 0xa4
6746 */
6747FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
6748{
6749 /*
6750 * Use the C implementation if a repeat prefix is encountered.
6751 */
6752 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6753 {
6754 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
6755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6756 switch (pVCpu->iem.s.enmEffAddrMode)
6757 {
6758 case IEMMODE_16BIT:
6759 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6760 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6761 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6762 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6763 iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
6764 case IEMMODE_32BIT:
6765 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6766 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6767 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6768 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6769 iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
6770 case IEMMODE_64BIT:
6771 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6772 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6773 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6774 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6775 iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
6776 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6777 }
6778 }
6779
6780 /*
6781 * Sharing case implementation with movs[wdq] below.
6782 */
6783 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
6784 switch (pVCpu->iem.s.enmEffAddrMode)
6785 {
6786 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
6787 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32, IEM_MC_F_MIN_386); break;
6788 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64, IEM_MC_F_64BIT); break;
6789 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6790 }
6791}
6792
6793
6794/**
6795 * @opcode 0xa5
6796 */
6797FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
6798{
6799
6800 /*
6801 * Use the C implementation if a repeat prefix is encountered.
6802 */
6803 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6804 {
6805 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
6806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6807 switch (pVCpu->iem.s.enmEffOpSize)
6808 {
6809 case IEMMODE_16BIT:
6810 switch (pVCpu->iem.s.enmEffAddrMode)
6811 {
6812 case IEMMODE_16BIT:
6813 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6814 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6815 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6816 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6817 iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
6818 case IEMMODE_32BIT:
6819 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6820 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6821 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6822 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6823 iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
6824 case IEMMODE_64BIT:
6825 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6826 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6827 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6828 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6829 iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
6830 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6831 }
6832 break;
6833 case IEMMODE_32BIT:
6834 switch (pVCpu->iem.s.enmEffAddrMode)
6835 {
6836 case IEMMODE_16BIT:
6837 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6838 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6839 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6840 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6841 iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
6842 case IEMMODE_32BIT:
6843 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6844 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6845 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6846 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6847 iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
6848 case IEMMODE_64BIT:
6849 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6850 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6851 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6852 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6853 iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
6854 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6855 }
6856 case IEMMODE_64BIT:
6857 switch (pVCpu->iem.s.enmEffAddrMode)
6858 {
6859 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
6860 case IEMMODE_32BIT:
6861 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6862 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6863 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6864 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6865 iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
6866 case IEMMODE_64BIT:
6867 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6868 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6869 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6870 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6871 iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
6872 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6873 }
6874 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6875 }
6876 }
6877
6878 /*
6879 * Annoying double switch here.
6880 * Using ugly macro for implementing the cases, sharing it with movsb.
6881 */
6882 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
6883 switch (pVCpu->iem.s.enmEffOpSize)
6884 {
6885 case IEMMODE_16BIT:
6886 switch (pVCpu->iem.s.enmEffAddrMode)
6887 {
6888 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
6889 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32, IEM_MC_F_MIN_386); break;
6890 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64, IEM_MC_F_64BIT); break;
6891 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6892 }
6893 break;
6894
6895 case IEMMODE_32BIT:
6896 switch (pVCpu->iem.s.enmEffAddrMode)
6897 {
6898 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
6899 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32, IEM_MC_F_MIN_386); break;
6900 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64, IEM_MC_F_64BIT); break;
6901 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6902 }
6903 break;
6904
6905 case IEMMODE_64BIT:
6906 switch (pVCpu->iem.s.enmEffAddrMode)
6907 {
6908 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6909 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32, IEM_MC_F_64BIT); break;
6910 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64, IEM_MC_F_64BIT); break;
6911 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6912 }
6913 break;
6914 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6915 }
6916}
6917
6918#undef IEM_MOVS_CASE
6919
6920/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
6921#define IEM_CMPS_CASE(ValBits, AddrBits, a_fMcFlags) \
6922 IEM_MC_BEGIN(3, 3, a_fMcFlags, 0); \
6923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6924 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
6925 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
6926 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
6927 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
6928 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6929 \
6930 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6931 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
6932 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6933 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
6934 IEM_MC_REF_LOCAL(puValue1, uValue1); \
6935 IEM_MC_REF_EFLAGS(pEFlags); \
6936 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
6937 \
6938 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6939 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6940 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6941 } IEM_MC_ELSE() { \
6942 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6943 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6944 } IEM_MC_ENDIF(); \
6945 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6946 IEM_MC_END() \
6947
6948/**
6949 * @opcode 0xa6
6950 */
6951FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
6952{
6953
6954 /*
6955 * Use the C implementation if a repeat prefix is encountered.
6956 */
6957 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6958 {
6959 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
6960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6961 switch (pVCpu->iem.s.enmEffAddrMode)
6962 {
6963 case IEMMODE_16BIT:
6964 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6965 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6966 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6967 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6968 iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6969 case IEMMODE_32BIT:
6970 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6971 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6972 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6973 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6974 iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6975 case IEMMODE_64BIT:
6976 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6977 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6978 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6979 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6980 iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6981 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6982 }
6983 }
6984 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6985 {
6986 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
6987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6988 switch (pVCpu->iem.s.enmEffAddrMode)
6989 {
6990 case IEMMODE_16BIT:
6991 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6992 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6993 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6994 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6995 iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6996 case IEMMODE_32BIT:
6997 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6998 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6999 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7000 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7001 iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
7002 case IEMMODE_64BIT:
7003 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7004 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7005 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7006 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7007 iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
7008 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7009 }
7010 }
7011
7012 /*
7013 * Sharing case implementation with cmps[wdq] below.
7014 */
7015 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
7016 switch (pVCpu->iem.s.enmEffAddrMode)
7017 {
7018 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7019 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7020 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64, IEM_MC_F_64BIT); break;
7021 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7022 }
7023}
7024
7025
7026/**
7027 * @opcode 0xa7
7028 */
7029FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
7030{
7031 /*
7032 * Use the C implementation if a repeat prefix is encountered.
7033 */
7034 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7035 {
7036 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
7037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7038 switch (pVCpu->iem.s.enmEffOpSize)
7039 {
7040 case IEMMODE_16BIT:
7041 switch (pVCpu->iem.s.enmEffAddrMode)
7042 {
7043 case IEMMODE_16BIT:
7044 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7045 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7046 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7047 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7048 iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
7049 case IEMMODE_32BIT:
7050 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7051 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7052 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7053 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7054 iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
7055 case IEMMODE_64BIT:
7056 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7057 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7058 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7059 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7060 iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
7061 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7062 }
7063 break;
7064 case IEMMODE_32BIT:
7065 switch (pVCpu->iem.s.enmEffAddrMode)
7066 {
7067 case IEMMODE_16BIT:
7068 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7069 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7070 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7071 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7072 iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
7073 case IEMMODE_32BIT:
7074 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7075 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7076 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7077 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7078 iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
7079 case IEMMODE_64BIT:
7080 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7081 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7082 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7083 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7084 iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
7085 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7086 }
7087 case IEMMODE_64BIT:
7088 switch (pVCpu->iem.s.enmEffAddrMode)
7089 {
7090 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
7091 case IEMMODE_32BIT:
7092 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7093 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7094 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7095 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7096 iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
7097 case IEMMODE_64BIT:
7098 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7099 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7100 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7101 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7102 iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
7103 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7104 }
7105 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7106 }
7107 }
7108
7109 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7110 {
7111 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
7112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7113 switch (pVCpu->iem.s.enmEffOpSize)
7114 {
7115 case IEMMODE_16BIT:
7116 switch (pVCpu->iem.s.enmEffAddrMode)
7117 {
7118 case IEMMODE_16BIT:
7119 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7120 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7121 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7122 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7123 iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
7124 case IEMMODE_32BIT:
7125 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7126 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7127 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7128 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7129 iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
7130 case IEMMODE_64BIT:
7131 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7132 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7133 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7134 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7135 iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
7136 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7137 }
7138 break;
7139 case IEMMODE_32BIT:
7140 switch (pVCpu->iem.s.enmEffAddrMode)
7141 {
7142 case IEMMODE_16BIT:
7143 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7144 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7145 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7146 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7147 iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
7148 case IEMMODE_32BIT:
7149 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7150 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7151 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7152 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7153 iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
7154 case IEMMODE_64BIT:
7155 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7156 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7157 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7158 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7159 iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
7160 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7161 }
7162 case IEMMODE_64BIT:
7163 switch (pVCpu->iem.s.enmEffAddrMode)
7164 {
7165 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
7166 case IEMMODE_32BIT:
7167 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7168 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7169 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7170 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7171 iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
7172 case IEMMODE_64BIT:
7173 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7174 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7175 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7176 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7177 iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
7178 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7179 }
7180 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7181 }
7182 }
7183
7184 /*
7185 * Annoying double switch here.
7186 * Using ugly macro for implementing the cases, sharing it with cmpsb.
7187 */
7188 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
7189 switch (pVCpu->iem.s.enmEffOpSize)
7190 {
7191 case IEMMODE_16BIT:
7192 switch (pVCpu->iem.s.enmEffAddrMode)
7193 {
7194 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7195 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7196 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64, IEM_MC_F_64BIT); break;
7197 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7198 }
7199 break;
7200
7201 case IEMMODE_32BIT:
7202 switch (pVCpu->iem.s.enmEffAddrMode)
7203 {
7204 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7205 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7206 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64, IEM_MC_F_64BIT); break;
7207 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7208 }
7209 break;
7210
7211 case IEMMODE_64BIT:
7212 switch (pVCpu->iem.s.enmEffAddrMode)
7213 {
7214 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7215 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32, IEM_MC_F_MIN_386); break;
7216 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64, IEM_MC_F_64BIT); break;
7217 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7218 }
7219 break;
7220 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7221 }
7222}
7223
7224#undef IEM_CMPS_CASE
7225
7226/**
7227 * @opcode 0xa8
7228 */
7229FNIEMOP_DEF(iemOp_test_AL_Ib)
7230{
7231 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
7232 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7233 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_test_u8);
7234}
7235
7236
7237/**
7238 * @opcode 0xa9
7239 */
7240FNIEMOP_DEF(iemOp_test_eAX_Iz)
7241{
7242 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
7243 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7244 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, 0);
7245}
7246
7247
7248/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
7249#define IEM_STOS_CASE(ValBits, AddrBits, a_fMcFlags) \
7250 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
7251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7252 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7253 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7254 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
7255 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7256 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
7257 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7258 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7259 } IEM_MC_ELSE() { \
7260 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7261 } IEM_MC_ENDIF(); \
7262 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7263 IEM_MC_END() \
7264
7265/**
7266 * @opcode 0xaa
7267 */
7268FNIEMOP_DEF(iemOp_stosb_Yb_AL)
7269{
7270 /*
7271 * Use the C implementation if a repeat prefix is encountered.
7272 */
7273 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7274 {
7275 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
7276 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7277 switch (pVCpu->iem.s.enmEffAddrMode)
7278 {
7279 case IEMMODE_16BIT:
7280 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7281 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7282 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7283 iemCImpl_stos_al_m16);
7284 case IEMMODE_32BIT:
7285 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7286 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7287 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7288 iemCImpl_stos_al_m32);
7289 case IEMMODE_64BIT:
7290 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7291 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7292 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7293 iemCImpl_stos_al_m64);
7294 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7295 }
7296 }
7297
7298 /*
7299 * Sharing case implementation with stos[wdq] below.
7300 */
7301 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
7302 switch (pVCpu->iem.s.enmEffAddrMode)
7303 {
7304 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7305 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7306 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64, IEM_MC_F_64BIT); break;
7307 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7308 }
7309}
7310
7311
7312/**
7313 * @opcode 0xab
7314 */
7315FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
7316{
7317 /*
7318 * Use the C implementation if a repeat prefix is encountered.
7319 */
7320 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7321 {
7322 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
7323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7324 switch (pVCpu->iem.s.enmEffOpSize)
7325 {
7326 case IEMMODE_16BIT:
7327 switch (pVCpu->iem.s.enmEffAddrMode)
7328 {
7329 case IEMMODE_16BIT:
7330 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7331 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7332 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7333 iemCImpl_stos_ax_m16);
7334 case IEMMODE_32BIT:
7335 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7336 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7337 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7338 iemCImpl_stos_ax_m32);
7339 case IEMMODE_64BIT:
7340 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7341 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7342 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7343 iemCImpl_stos_ax_m64);
7344 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7345 }
7346 break;
7347 case IEMMODE_32BIT:
7348 switch (pVCpu->iem.s.enmEffAddrMode)
7349 {
7350 case IEMMODE_16BIT:
7351 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7352 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7353 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7354 iemCImpl_stos_eax_m16);
7355 case IEMMODE_32BIT:
7356 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7357 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7358 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7359 iemCImpl_stos_eax_m32);
7360 case IEMMODE_64BIT:
7361 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7362 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7363 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7364 iemCImpl_stos_eax_m64);
7365 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7366 }
7367 case IEMMODE_64BIT:
7368 switch (pVCpu->iem.s.enmEffAddrMode)
7369 {
7370 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
7371 case IEMMODE_32BIT:
7372 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7373 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7374 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7375 iemCImpl_stos_rax_m32);
7376 case IEMMODE_64BIT:
7377 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7378 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7379 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7380 iemCImpl_stos_rax_m64);
7381 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7382 }
7383 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7384 }
7385 }
7386
7387 /*
7388 * Annoying double switch here.
7389 * Using ugly macro for implementing the cases, sharing it with stosb.
7390 */
7391 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
7392 switch (pVCpu->iem.s.enmEffOpSize)
7393 {
7394 case IEMMODE_16BIT:
7395 switch (pVCpu->iem.s.enmEffAddrMode)
7396 {
7397 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7398 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7399 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64, IEM_MC_F_64BIT); break;
7400 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7401 }
7402 break;
7403
7404 case IEMMODE_32BIT:
7405 switch (pVCpu->iem.s.enmEffAddrMode)
7406 {
7407 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7408 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7409 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64, IEM_MC_F_64BIT); break;
7410 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7411 }
7412 break;
7413
7414 case IEMMODE_64BIT:
7415 switch (pVCpu->iem.s.enmEffAddrMode)
7416 {
7417 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7418 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32, IEM_MC_F_64BIT); break;
7419 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64, IEM_MC_F_64BIT); break;
7420 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7421 }
7422 break;
7423 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7424 }
7425}
7426
7427#undef IEM_STOS_CASE
7428
7429/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
7430#define IEM_LODS_CASE(ValBits, AddrBits, a_fMcFlags) \
7431 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
7432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7433 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7434 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7435 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
7436 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
7437 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
7438 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7439 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7440 } IEM_MC_ELSE() { \
7441 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7442 } IEM_MC_ENDIF(); \
7443 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7444 IEM_MC_END() \
7445
7446/**
7447 * @opcode 0xac
7448 */
7449FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
7450{
7451 /*
7452 * Use the C implementation if a repeat prefix is encountered.
7453 */
7454 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7455 {
7456 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
7457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7458 switch (pVCpu->iem.s.enmEffAddrMode)
7459 {
7460 case IEMMODE_16BIT:
7461 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7462 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7463 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7464 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7465 iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
7466 case IEMMODE_32BIT:
7467 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7468 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7469 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7470 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7471 iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
7472 case IEMMODE_64BIT:
7473 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7474 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7475 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7476 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7477 iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
7478 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7479 }
7480 }
7481
7482 /*
7483 * Sharing case implementation with stos[wdq] below.
7484 */
7485 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
7486 switch (pVCpu->iem.s.enmEffAddrMode)
7487 {
7488 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7489 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7490 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64, IEM_MC_F_64BIT); break;
7491 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7492 }
7493}
7494
7495
7496/**
7497 * @opcode 0xad
7498 */
7499FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
7500{
7501 /*
7502 * Use the C implementation if a repeat prefix is encountered.
7503 */
7504 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7505 {
7506 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
7507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7508 switch (pVCpu->iem.s.enmEffOpSize)
7509 {
7510 case IEMMODE_16BIT:
7511 switch (pVCpu->iem.s.enmEffAddrMode)
7512 {
7513 case IEMMODE_16BIT:
7514 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7515 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7516 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7517 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7518 iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
7519 case IEMMODE_32BIT:
7520 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7521 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7522 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7523 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7524 iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
7525 case IEMMODE_64BIT:
7526 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7527 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7528 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7529 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7530 iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
7531 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7532 }
7533 break;
7534 case IEMMODE_32BIT:
7535 switch (pVCpu->iem.s.enmEffAddrMode)
7536 {
7537 case IEMMODE_16BIT:
7538 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7539 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7540 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7541 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7542 iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
7543 case IEMMODE_32BIT:
7544 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7545 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7546 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7547 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7548 iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
7549 case IEMMODE_64BIT:
7550 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7551 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7552 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7553 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7554 iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
7555 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7556 }
7557 case IEMMODE_64BIT:
7558 switch (pVCpu->iem.s.enmEffAddrMode)
7559 {
7560 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
7561 case IEMMODE_32BIT:
7562 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7563 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7564 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7565 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7566 iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
7567 case IEMMODE_64BIT:
7568 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7569 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7570 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7571 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7572 iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
7573 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7574 }
7575 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7576 }
7577 }
7578
7579 /*
7580 * Annoying double switch here.
7581 * Using ugly macro for implementing the cases, sharing it with lodsb.
7582 */
7583 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
7584 switch (pVCpu->iem.s.enmEffOpSize)
7585 {
7586 case IEMMODE_16BIT:
7587 switch (pVCpu->iem.s.enmEffAddrMode)
7588 {
7589 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7590 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7591 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64, IEM_MC_F_64BIT); break;
7592 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7593 }
7594 break;
7595
7596 case IEMMODE_32BIT:
7597 switch (pVCpu->iem.s.enmEffAddrMode)
7598 {
7599 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7600 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7601 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64, IEM_MC_F_64BIT); break;
7602 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7603 }
7604 break;
7605
7606 case IEMMODE_64BIT:
7607 switch (pVCpu->iem.s.enmEffAddrMode)
7608 {
7609 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7610 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32, IEM_MC_F_64BIT); break;
7611 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64, IEM_MC_F_64BIT); break;
7612 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7613 }
7614 break;
7615 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7616 }
7617}
7618
7619#undef IEM_LODS_CASE
7620
7621/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
7622#define IEM_SCAS_CASE(ValBits, AddrBits, a_fMcFlags) \
7623 IEM_MC_BEGIN(3, 2, a_fMcFlags, 0); \
7624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7625 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
7626 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
7627 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
7628 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7629 \
7630 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7631 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
7632 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
7633 IEM_MC_REF_EFLAGS(pEFlags); \
7634 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
7635 \
7636 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7637 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7638 } IEM_MC_ELSE() { \
7639 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7640 } IEM_MC_ENDIF(); \
7641 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7642 IEM_MC_END();
7643
7644/**
7645 * @opcode 0xae
7646 */
7647FNIEMOP_DEF(iemOp_scasb_AL_Xb)
7648{
7649 /*
7650 * Use the C implementation if a repeat prefix is encountered.
7651 */
7652 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7653 {
7654 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
7655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7656 switch (pVCpu->iem.s.enmEffAddrMode)
7657 {
7658 case IEMMODE_16BIT:
7659 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7660 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7661 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7662 iemCImpl_repe_scas_al_m16);
7663 case IEMMODE_32BIT:
7664 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7665 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7666 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7667 iemCImpl_repe_scas_al_m32);
7668 case IEMMODE_64BIT:
7669 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7670 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7671 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7672 iemCImpl_repe_scas_al_m64);
7673 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7674 }
7675 }
7676 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7677 {
7678 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
7679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7680 switch (pVCpu->iem.s.enmEffAddrMode)
7681 {
7682 case IEMMODE_16BIT:
7683 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7684 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7685 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7686 iemCImpl_repne_scas_al_m16);
7687 case IEMMODE_32BIT:
7688 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7689 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7690 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7691 iemCImpl_repne_scas_al_m32);
7692 case IEMMODE_64BIT:
7693 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7694 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7695 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7696 iemCImpl_repne_scas_al_m64);
7697 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7698 }
7699 }
7700
7701 /*
7702 * Sharing case implementation with stos[wdq] below.
7703 */
7704 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
7705 switch (pVCpu->iem.s.enmEffAddrMode)
7706 {
7707 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7708 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7709 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64, IEM_MC_F_64BIT); break;
7710 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7711 }
7712}
7713
7714
7715/**
7716 * @opcode 0xaf
7717 */
7718FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
7719{
7720 /*
7721 * Use the C implementation if a repeat prefix is encountered.
7722 */
7723 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7724 {
7725 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
7726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7727 switch (pVCpu->iem.s.enmEffOpSize)
7728 {
7729 case IEMMODE_16BIT:
7730 switch (pVCpu->iem.s.enmEffAddrMode)
7731 {
7732 case IEMMODE_16BIT:
7733 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7734 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7735 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7736 iemCImpl_repe_scas_ax_m16);
7737 case IEMMODE_32BIT:
7738 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7739 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7740 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7741 iemCImpl_repe_scas_ax_m32);
7742 case IEMMODE_64BIT:
7743 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7744 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7745 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7746 iemCImpl_repe_scas_ax_m64);
7747 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7748 }
7749 break;
7750 case IEMMODE_32BIT:
7751 switch (pVCpu->iem.s.enmEffAddrMode)
7752 {
7753 case IEMMODE_16BIT:
7754 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7755 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7756 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7757 iemCImpl_repe_scas_eax_m16);
7758 case IEMMODE_32BIT:
7759 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7760 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7761 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7762 iemCImpl_repe_scas_eax_m32);
7763 case IEMMODE_64BIT:
7764 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7765 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7766 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7767 iemCImpl_repe_scas_eax_m64);
7768 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7769 }
7770 case IEMMODE_64BIT:
7771 switch (pVCpu->iem.s.enmEffAddrMode)
7772 {
7773 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
7774 case IEMMODE_32BIT:
7775 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7776 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7777 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7778 iemCImpl_repe_scas_rax_m32);
7779 case IEMMODE_64BIT:
7780 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7781 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7782 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7783 iemCImpl_repe_scas_rax_m64);
7784 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7785 }
7786 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7787 }
7788 }
7789 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7790 {
7791 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
7792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7793 switch (pVCpu->iem.s.enmEffOpSize)
7794 {
7795 case IEMMODE_16BIT:
7796 switch (pVCpu->iem.s.enmEffAddrMode)
7797 {
7798 case IEMMODE_16BIT:
7799 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7800 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7801 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7802 iemCImpl_repne_scas_ax_m16);
7803 case IEMMODE_32BIT:
7804 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7805 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7806 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7807 iemCImpl_repne_scas_ax_m32);
7808 case IEMMODE_64BIT:
7809 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7810 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7811 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7812 iemCImpl_repne_scas_ax_m64);
7813 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7814 }
7815 break;
7816 case IEMMODE_32BIT:
7817 switch (pVCpu->iem.s.enmEffAddrMode)
7818 {
7819 case IEMMODE_16BIT:
7820 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7821 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7822 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7823 iemCImpl_repne_scas_eax_m16);
7824 case IEMMODE_32BIT:
7825 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7826 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7827 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7828 iemCImpl_repne_scas_eax_m32);
7829 case IEMMODE_64BIT:
7830 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7831 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7832 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7833 iemCImpl_repne_scas_eax_m64);
7834 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7835 }
7836 case IEMMODE_64BIT:
7837 switch (pVCpu->iem.s.enmEffAddrMode)
7838 {
7839 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
7840 case IEMMODE_32BIT:
7841 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7842 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7843 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7844 iemCImpl_repne_scas_rax_m32);
7845 case IEMMODE_64BIT:
7846 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7847 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7848 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7849 iemCImpl_repne_scas_rax_m64);
7850 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7851 }
7852 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7853 }
7854 }
7855
7856 /*
7857 * Annoying double switch here.
7858 * Using ugly macro for implementing the cases, sharing it with scasb.
7859 */
7860 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
7861 switch (pVCpu->iem.s.enmEffOpSize)
7862 {
7863 case IEMMODE_16BIT:
7864 switch (pVCpu->iem.s.enmEffAddrMode)
7865 {
7866 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7867 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7868 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64, IEM_MC_F_64BIT); break;
7869 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7870 }
7871 break;
7872
7873 case IEMMODE_32BIT:
7874 switch (pVCpu->iem.s.enmEffAddrMode)
7875 {
7876 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7877 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7878 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64, IEM_MC_F_64BIT); break;
7879 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7880 }
7881 break;
7882
7883 case IEMMODE_64BIT:
7884 switch (pVCpu->iem.s.enmEffAddrMode)
7885 {
7886 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7887 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32, IEM_MC_F_64BIT); break;
7888 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64, IEM_MC_F_64BIT); break;
7889 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7890 }
7891 break;
7892 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7893 }
7894}
7895
7896#undef IEM_SCAS_CASE
7897
7898/**
7899 * Common 'mov r8, imm8' helper.
7900 */
7901FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iFixedReg)
7902{
7903 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7904 IEM_MC_BEGIN(0, 0, 0, 0);
7905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7906 IEM_MC_STORE_GREG_U8_CONST(iFixedReg, u8Imm);
7907 IEM_MC_ADVANCE_RIP_AND_FINISH();
7908 IEM_MC_END();
7909}
7910
7911
7912/**
7913 * @opcode 0xb0
7914 */
7915FNIEMOP_DEF(iemOp_mov_AL_Ib)
7916{
7917 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
7918 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7919}
7920
7921
7922/**
7923 * @opcode 0xb1
7924 */
7925FNIEMOP_DEF(iemOp_CL_Ib)
7926{
7927 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
7928 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7929}
7930
7931
7932/**
7933 * @opcode 0xb2
7934 */
7935FNIEMOP_DEF(iemOp_DL_Ib)
7936{
7937 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
7938 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7939}
7940
7941
7942/**
7943 * @opcode 0xb3
7944 */
7945FNIEMOP_DEF(iemOp_BL_Ib)
7946{
7947 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
7948 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7949}
7950
7951
7952/**
7953 * @opcode 0xb4
7954 */
7955FNIEMOP_DEF(iemOp_mov_AH_Ib)
7956{
7957 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
7958 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7959}
7960
7961
7962/**
7963 * @opcode 0xb5
7964 */
7965FNIEMOP_DEF(iemOp_CH_Ib)
7966{
7967 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
7968 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7969}
7970
7971
7972/**
7973 * @opcode 0xb6
7974 */
7975FNIEMOP_DEF(iemOp_DH_Ib)
7976{
7977 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
7978 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7979}
7980
7981
7982/**
7983 * @opcode 0xb7
7984 */
7985FNIEMOP_DEF(iemOp_BH_Ib)
7986{
7987 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
7988 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7989}
7990
7991
7992/**
7993 * Common 'mov regX,immX' helper.
7994 */
7995FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iFixedReg)
7996{
7997 switch (pVCpu->iem.s.enmEffOpSize)
7998 {
7999 case IEMMODE_16BIT:
8000 IEM_MC_BEGIN(0, 0, 0, 0);
8001 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8003 IEM_MC_STORE_GREG_U16_CONST(iFixedReg, u16Imm);
8004 IEM_MC_ADVANCE_RIP_AND_FINISH();
8005 IEM_MC_END();
8006 break;
8007
8008 case IEMMODE_32BIT:
8009 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8010 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8012 IEM_MC_STORE_GREG_U32_CONST(iFixedReg, u32Imm);
8013 IEM_MC_ADVANCE_RIP_AND_FINISH();
8014 IEM_MC_END();
8015 break;
8016
8017 case IEMMODE_64BIT:
8018 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
8019 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
8020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8021 IEM_MC_STORE_GREG_U64_CONST(iFixedReg, u64Imm);
8022 IEM_MC_ADVANCE_RIP_AND_FINISH();
8023 IEM_MC_END();
8024 break;
8025 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8026 }
8027}
8028
8029
8030/**
8031 * @opcode 0xb8
8032 */
8033FNIEMOP_DEF(iemOp_eAX_Iv)
8034{
8035 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
8036 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8037}
8038
8039
8040/**
8041 * @opcode 0xb9
8042 */
8043FNIEMOP_DEF(iemOp_eCX_Iv)
8044{
8045 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
8046 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8047}
8048
8049
8050/**
8051 * @opcode 0xba
8052 */
8053FNIEMOP_DEF(iemOp_eDX_Iv)
8054{
8055 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
8056 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8057}
8058
8059
8060/**
8061 * @opcode 0xbb
8062 */
8063FNIEMOP_DEF(iemOp_eBX_Iv)
8064{
8065 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
8066 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8067}
8068
8069
8070/**
8071 * @opcode 0xbc
8072 */
8073FNIEMOP_DEF(iemOp_eSP_Iv)
8074{
8075 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
8076 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8077}
8078
8079
8080/**
8081 * @opcode 0xbd
8082 */
8083FNIEMOP_DEF(iemOp_eBP_Iv)
8084{
8085 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
8086 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8087}
8088
8089
8090/**
8091 * @opcode 0xbe
8092 */
8093FNIEMOP_DEF(iemOp_eSI_Iv)
8094{
8095 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
8096 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8097}
8098
8099
8100/**
8101 * @opcode 0xbf
8102 */
8103FNIEMOP_DEF(iemOp_eDI_Iv)
8104{
8105 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
8106 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8107}
8108
8109
8110/**
8111 * @opcode 0xc0
8112 */
8113FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
8114{
8115 IEMOP_HLP_MIN_186();
8116 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8117 PCIEMOPSHIFTSIZES pImpl;
8118 switch (IEM_GET_MODRM_REG_8(bRm))
8119 {
8120 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
8121 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
8122 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
8123 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
8124 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
8125 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
8126 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
8127 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8128 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8129 }
8130 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8131
8132 if (IEM_IS_MODRM_REG_MODE(bRm))
8133 {
8134 /* register */
8135 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8136 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0);
8137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8138 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8139 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8140 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8141 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8142 IEM_MC_REF_EFLAGS(pEFlags);
8143 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8144 IEM_MC_ADVANCE_RIP_AND_FINISH();
8145 IEM_MC_END();
8146 }
8147 else
8148 {
8149 /* memory */
8150 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_186, 0);
8151 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8152 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8153
8154 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8156
8157 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8158 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8159 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8160
8161 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8162 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8163 IEM_MC_FETCH_EFLAGS(EFlags);
8164 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8165
8166 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
8167 IEM_MC_COMMIT_EFLAGS(EFlags);
8168 IEM_MC_ADVANCE_RIP_AND_FINISH();
8169 IEM_MC_END();
8170 }
8171}
8172
8173
8174/**
8175 * @opcode 0xc1
8176 */
8177FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
8178{
8179 IEMOP_HLP_MIN_186();
8180 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8181 PCIEMOPSHIFTSIZES pImpl;
8182 switch (IEM_GET_MODRM_REG_8(bRm))
8183 {
8184 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
8185 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
8186 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
8187 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
8188 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
8189 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
8190 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
8191 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8192 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8193 }
8194 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8195
8196 if (IEM_IS_MODRM_REG_MODE(bRm))
8197 {
8198 /* register */
8199 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8200 switch (pVCpu->iem.s.enmEffOpSize)
8201 {
8202 case IEMMODE_16BIT:
8203 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0);
8204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8205 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8206 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8207 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8208 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8209 IEM_MC_REF_EFLAGS(pEFlags);
8210 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8211 IEM_MC_ADVANCE_RIP_AND_FINISH();
8212 IEM_MC_END();
8213 break;
8214
8215 case IEMMODE_32BIT:
8216 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
8217 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8218 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8219 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8220 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8221 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8222 IEM_MC_REF_EFLAGS(pEFlags);
8223 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8224 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
8225 IEM_MC_ADVANCE_RIP_AND_FINISH();
8226 IEM_MC_END();
8227 break;
8228
8229 case IEMMODE_64BIT:
8230 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
8231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8232 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8233 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8234 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8235 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8236 IEM_MC_REF_EFLAGS(pEFlags);
8237 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8238 IEM_MC_ADVANCE_RIP_AND_FINISH();
8239 IEM_MC_END();
8240 break;
8241
8242 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8243 }
8244 }
8245 else
8246 {
8247 /* memory */
8248 switch (pVCpu->iem.s.enmEffOpSize)
8249 {
8250 case IEMMODE_16BIT:
8251 IEM_MC_BEGIN(3, 3, 0, 0);
8252 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8253 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8254
8255 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8257
8258 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8259 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8260 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8261
8262 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8263 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8264 IEM_MC_FETCH_EFLAGS(EFlags);
8265 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8266
8267 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
8268 IEM_MC_COMMIT_EFLAGS(EFlags);
8269 IEM_MC_ADVANCE_RIP_AND_FINISH();
8270 IEM_MC_END();
8271 break;
8272
8273 case IEMMODE_32BIT:
8274 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
8275 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8276 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8277
8278 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8280
8281 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8282 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8283 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8284
8285 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8286 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8287 IEM_MC_FETCH_EFLAGS(EFlags);
8288 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8289
8290 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
8291 IEM_MC_COMMIT_EFLAGS(EFlags);
8292 IEM_MC_ADVANCE_RIP_AND_FINISH();
8293 IEM_MC_END();
8294 break;
8295
8296 case IEMMODE_64BIT:
8297 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
8298 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8299 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8300
8301 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8303
8304 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8305 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8306 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8307
8308 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8309 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8310 IEM_MC_FETCH_EFLAGS(EFlags);
8311 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8312
8313 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
8314 IEM_MC_COMMIT_EFLAGS(EFlags);
8315 IEM_MC_ADVANCE_RIP_AND_FINISH();
8316 IEM_MC_END();
8317 break;
8318
8319 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8320 }
8321 }
8322}
8323
8324
8325/**
8326 * @opcode 0xc2
8327 */
8328FNIEMOP_DEF(iemOp_retn_Iw)
8329{
8330 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
8331 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8332 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8334 switch (pVCpu->iem.s.enmEffOpSize)
8335 {
8336 case IEMMODE_16BIT:
8337 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_retn_iw_16, u16Imm);
8338 case IEMMODE_32BIT:
8339 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_retn_iw_32, u16Imm);
8340 case IEMMODE_64BIT:
8341 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_retn_iw_64, u16Imm);
8342 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8343 }
8344}
8345
8346
8347/**
8348 * @opcode 0xc3
8349 */
8350FNIEMOP_DEF(iemOp_retn)
8351{
8352 IEMOP_MNEMONIC(retn, "retn");
8353 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8355 switch (pVCpu->iem.s.enmEffOpSize)
8356 {
8357 case IEMMODE_16BIT:
8358 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_retn_16);
8359 case IEMMODE_32BIT:
8360 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_retn_32);
8361 case IEMMODE_64BIT:
8362 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_retn_64);
8363 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8364 }
8365}
8366
8367
8368/**
8369 * @opcode 0xc4
8370 */
8371FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
8372{
8373 /* The LDS instruction is invalid 64-bit mode. In legacy and
8374 compatability mode it is invalid with MOD=3.
8375 The use as a VEX prefix is made possible by assigning the inverted
8376 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
8377 outside of 64-bit mode. VEX is not available in real or v86 mode. */
8378 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8379 if ( IEM_IS_64BIT_CODE(pVCpu)
8380 || IEM_IS_MODRM_REG_MODE(bRm) )
8381 {
8382 IEMOP_MNEMONIC(vex3_prefix, "vex3");
8383 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
8384 {
8385 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
8386 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
8387 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
8388 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
8389 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
8390 if ((bVex2 & 0x80 /* VEX.W */) && IEM_IS_64BIT_CODE(pVCpu))
8391 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
8392 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
8393 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
8394 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
8395 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
8396 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
8397 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
8398
8399 switch (bRm & 0x1f)
8400 {
8401 case 1: /* 0x0f lead opcode byte. */
8402#ifdef IEM_WITH_VEX
8403 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8404#else
8405 IEMOP_BITCH_ABOUT_STUB();
8406 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8407#endif
8408
8409 case 2: /* 0x0f 0x38 lead opcode bytes. */
8410#ifdef IEM_WITH_VEX
8411 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8412#else
8413 IEMOP_BITCH_ABOUT_STUB();
8414 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8415#endif
8416
8417 case 3: /* 0x0f 0x3a lead opcode bytes. */
8418#ifdef IEM_WITH_VEX
8419 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8420#else
8421 IEMOP_BITCH_ABOUT_STUB();
8422 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8423#endif
8424
8425 default:
8426 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
8427 IEMOP_RAISE_INVALID_OPCODE_RET();
8428 }
8429 }
8430 Log(("VEX3: VEX support disabled!\n"));
8431 IEMOP_RAISE_INVALID_OPCODE_RET();
8432 }
8433
8434 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
8435 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
8436}
8437
8438
8439/**
8440 * @opcode 0xc5
8441 */
8442FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
8443{
8444 /* The LES instruction is invalid 64-bit mode. In legacy and
8445 compatability mode it is invalid with MOD=3.
8446 The use as a VEX prefix is made possible by assigning the inverted
8447 REX.R to the top MOD bit, and the top bit in the inverted register
8448 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
8449 to accessing registers 0..7 in this VEX form. */
8450 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8451 if ( IEM_IS_64BIT_CODE(pVCpu)
8452 || IEM_IS_MODRM_REG_MODE(bRm))
8453 {
8454 IEMOP_MNEMONIC(vex2_prefix, "vex2");
8455 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
8456 {
8457 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
8458 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
8459 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
8460 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
8461 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
8462 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
8463 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
8464 pVCpu->iem.s.idxPrefix = bRm & 0x3;
8465
8466#ifdef IEM_WITH_VEX
8467 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8468#else
8469 IEMOP_BITCH_ABOUT_STUB();
8470 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8471#endif
8472 }
8473
8474 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
8475 Log(("VEX2: VEX support disabled!\n"));
8476 IEMOP_RAISE_INVALID_OPCODE_RET();
8477 }
8478
8479 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
8480 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
8481}
8482
8483
8484/**
8485 * @opcode 0xc6
8486 */
8487FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
8488{
8489 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8490 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
8491 IEMOP_RAISE_INVALID_OPCODE_RET();
8492 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
8493
8494 if (IEM_IS_MODRM_REG_MODE(bRm))
8495 {
8496 /* register access */
8497 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8498 IEM_MC_BEGIN(0, 0, 0, 0);
8499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8500 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u8Imm);
8501 IEM_MC_ADVANCE_RIP_AND_FINISH();
8502 IEM_MC_END();
8503 }
8504 else
8505 {
8506 /* memory access. */
8507 IEM_MC_BEGIN(0, 1, 0, 0);
8508 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8509 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8510 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8512 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
8513 IEM_MC_ADVANCE_RIP_AND_FINISH();
8514 IEM_MC_END();
8515 }
8516}
8517
8518
8519/**
8520 * @opcode 0xc7
8521 */
8522FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
8523{
8524 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8525 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Iz in this group. */
8526 IEMOP_RAISE_INVALID_OPCODE_RET();
8527 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
8528
8529 if (IEM_IS_MODRM_REG_MODE(bRm))
8530 {
8531 /* register access */
8532 switch (pVCpu->iem.s.enmEffOpSize)
8533 {
8534 case IEMMODE_16BIT:
8535 IEM_MC_BEGIN(0, 0, 0, 0);
8536 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8538 IEM_MC_STORE_GREG_U16_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u16Imm);
8539 IEM_MC_ADVANCE_RIP_AND_FINISH();
8540 IEM_MC_END();
8541 break;
8542
8543 case IEMMODE_32BIT:
8544 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8545 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8547 IEM_MC_STORE_GREG_U32_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u32Imm);
8548 IEM_MC_ADVANCE_RIP_AND_FINISH();
8549 IEM_MC_END();
8550 break;
8551
8552 case IEMMODE_64BIT:
8553 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
8554 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8556 IEM_MC_STORE_GREG_U64_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u64Imm);
8557 IEM_MC_ADVANCE_RIP_AND_FINISH();
8558 IEM_MC_END();
8559 break;
8560
8561 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8562 }
8563 }
8564 else
8565 {
8566 /* memory access. */
8567 switch (pVCpu->iem.s.enmEffOpSize)
8568 {
8569 case IEMMODE_16BIT:
8570 IEM_MC_BEGIN(0, 1, 0, 0);
8571 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8572 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
8573 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8575 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
8576 IEM_MC_ADVANCE_RIP_AND_FINISH();
8577 IEM_MC_END();
8578 break;
8579
8580 case IEMMODE_32BIT:
8581 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8582 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8584 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8586 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
8587 IEM_MC_ADVANCE_RIP_AND_FINISH();
8588 IEM_MC_END();
8589 break;
8590
8591 case IEMMODE_64BIT:
8592 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
8593 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8594 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8595 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8597 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
8598 IEM_MC_ADVANCE_RIP_AND_FINISH();
8599 IEM_MC_END();
8600 break;
8601
8602 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8603 }
8604 }
8605}
8606
8607
8608
8609
8610/**
8611 * @opcode 0xc8
8612 */
8613FNIEMOP_DEF(iemOp_enter_Iw_Ib)
8614{
8615 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
8616 IEMOP_HLP_MIN_186();
8617 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8618 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
8619 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
8620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8621 IEM_MC_DEFER_TO_CIMPL_3_RET(0,
8622 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8623 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP),
8624 iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
8625}
8626
8627
8628/**
8629 * @opcode 0xc9
8630 */
8631FNIEMOP_DEF(iemOp_leave)
8632{
8633 IEMOP_MNEMONIC(leave, "leave");
8634 IEMOP_HLP_MIN_186();
8635 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8637 IEM_MC_DEFER_TO_CIMPL_1_RET(0,
8638 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8639 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP),
8640 iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
8641}
8642
8643
8644/**
8645 * @opcode 0xca
8646 */
8647FNIEMOP_DEF(iemOp_retf_Iw)
8648{
8649 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
8650 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8652 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
8653 | IEM_CIMPL_F_MODE,
8654 RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
8655 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
8656 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8657 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
8658 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
8659 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
8660 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8661 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
8662 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
8663 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
8664 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8665 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS),
8666 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
8667}
8668
8669
8670/**
8671 * @opcode 0xcb
8672 */
8673FNIEMOP_DEF(iemOp_retf)
8674{
8675 IEMOP_MNEMONIC(retf, "retf");
8676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8677 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
8678 | IEM_CIMPL_F_MODE,
8679 RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
8680 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
8681 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8682 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
8683 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
8684 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
8685 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8686 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
8687 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
8688 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
8689 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8690 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS),
8691 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
8692}
8693
8694
8695/**
8696 * @opcode 0xcc
8697 */
8698FNIEMOP_DEF(iemOp_int3)
8699{
8700 IEMOP_MNEMONIC(int3, "int3");
8701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8702 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8703 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
8704 iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
8705}
8706
8707
8708/**
8709 * @opcode 0xcd
8710 */
8711FNIEMOP_DEF(iemOp_int_Ib)
8712{
8713 IEMOP_MNEMONIC(int_Ib, "int Ib");
8714 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
8715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8716 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8717 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS, UINT64_MAX,
8718 iemCImpl_int, u8Int, IEMINT_INTN);
8719 /** @todo make task-switches, ring-switches, ++ return non-zero status */
8720}
8721
8722
8723/**
8724 * @opcode 0xce
8725 */
8726FNIEMOP_DEF(iemOp_into)
8727{
8728 IEMOP_MNEMONIC(into, "into");
8729 IEMOP_HLP_NO_64BIT();
8730 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8731 | IEM_CIMPL_F_BRANCH_CONDITIONAL | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
8732 UINT64_MAX,
8733 iemCImpl_int, X86_XCPT_OF, IEMINT_INTO);
8734 /** @todo make task-switches, ring-switches, ++ return non-zero status */
8735}
8736
8737
8738/**
8739 * @opcode 0xcf
8740 */
8741FNIEMOP_DEF(iemOp_iret)
8742{
8743 IEMOP_MNEMONIC(iret, "iret");
8744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8745 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8746 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE | IEM_CIMPL_F_VMEXIT,
8747 RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
8748 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
8749 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
8750 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
8751 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
8752 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
8753 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8754 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8755 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8756 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
8757 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
8758 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS),
8759 iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
8760 /* Segment registers are sanitized when returning to an outer ring, or fully
8761 reloaded when returning to v86 mode. Thus the large flush list above. */
8762}
8763
8764
8765/**
8766 * @opcode 0xd0
8767 */
8768FNIEMOP_DEF(iemOp_Grp2_Eb_1)
8769{
8770 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8771 PCIEMOPSHIFTSIZES pImpl;
8772 switch (IEM_GET_MODRM_REG_8(bRm))
8773 {
8774 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
8775 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
8776 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
8777 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
8778 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
8779 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
8780 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
8781 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8782 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
8783 }
8784 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8785
8786 if (IEM_IS_MODRM_REG_MODE(bRm))
8787 {
8788 /* register */
8789 IEM_MC_BEGIN(3, 0, 0, 0);
8790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8791 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8792 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
8793 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8794 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8795 IEM_MC_REF_EFLAGS(pEFlags);
8796 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8797 IEM_MC_ADVANCE_RIP_AND_FINISH();
8798 IEM_MC_END();
8799 }
8800 else
8801 {
8802 /* memory */
8803 IEM_MC_BEGIN(3, 3, 0, 0);
8804 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8805 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
8806 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8807 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8808 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8809
8810 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8812 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8813 IEM_MC_FETCH_EFLAGS(EFlags);
8814 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8815
8816 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
8817 IEM_MC_COMMIT_EFLAGS(EFlags);
8818 IEM_MC_ADVANCE_RIP_AND_FINISH();
8819 IEM_MC_END();
8820 }
8821}
8822
8823
8824
8825/**
8826 * @opcode 0xd1
8827 */
8828FNIEMOP_DEF(iemOp_Grp2_Ev_1)
8829{
8830 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8831 PCIEMOPSHIFTSIZES pImpl;
8832 switch (IEM_GET_MODRM_REG_8(bRm))
8833 {
8834 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
8835 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
8836 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
8837 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
8838 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
8839 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
8840 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
8841 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8842 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
8843 }
8844 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8845
8846 if (IEM_IS_MODRM_REG_MODE(bRm))
8847 {
8848 /* register */
8849 switch (pVCpu->iem.s.enmEffOpSize)
8850 {
8851 case IEMMODE_16BIT:
8852 IEM_MC_BEGIN(3, 0, 0, 0);
8853 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8854 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8855 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8856 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8857 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8858 IEM_MC_REF_EFLAGS(pEFlags);
8859 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8860 IEM_MC_ADVANCE_RIP_AND_FINISH();
8861 IEM_MC_END();
8862 break;
8863
8864 case IEMMODE_32BIT:
8865 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
8866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8867 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8868 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8869 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8870 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8871 IEM_MC_REF_EFLAGS(pEFlags);
8872 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8873 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
8874 IEM_MC_ADVANCE_RIP_AND_FINISH();
8875 IEM_MC_END();
8876 break;
8877
8878 case IEMMODE_64BIT:
8879 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
8880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8881 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8882 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8883 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8884 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8885 IEM_MC_REF_EFLAGS(pEFlags);
8886 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8887 IEM_MC_ADVANCE_RIP_AND_FINISH();
8888 IEM_MC_END();
8889 break;
8890
8891 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8892 }
8893 }
8894 else
8895 {
8896 /* memory */
8897 switch (pVCpu->iem.s.enmEffOpSize)
8898 {
8899 case IEMMODE_16BIT:
8900 IEM_MC_BEGIN(3, 3, 0, 0);
8901 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8902 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8903 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8904 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8905 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8906
8907 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8909 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8910 IEM_MC_FETCH_EFLAGS(EFlags);
8911 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8912
8913 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
8914 IEM_MC_COMMIT_EFLAGS(EFlags);
8915 IEM_MC_ADVANCE_RIP_AND_FINISH();
8916 IEM_MC_END();
8917 break;
8918
8919 case IEMMODE_32BIT:
8920 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
8921 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8922 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8923 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8924 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8925 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8926
8927 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8929 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8930 IEM_MC_FETCH_EFLAGS(EFlags);
8931 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8932
8933 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
8934 IEM_MC_COMMIT_EFLAGS(EFlags);
8935 IEM_MC_ADVANCE_RIP_AND_FINISH();
8936 IEM_MC_END();
8937 break;
8938
8939 case IEMMODE_64BIT:
8940 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
8941 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8942 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8943 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8944 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8945 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8946
8947 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8948 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8949 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8950 IEM_MC_FETCH_EFLAGS(EFlags);
8951 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8952
8953 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
8954 IEM_MC_COMMIT_EFLAGS(EFlags);
8955 IEM_MC_ADVANCE_RIP_AND_FINISH();
8956 IEM_MC_END();
8957 break;
8958
8959 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8960 }
8961 }
8962}
8963
8964
8965/**
8966 * @opcode 0xd2
8967 */
8968FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
8969{
8970 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8971 PCIEMOPSHIFTSIZES pImpl;
8972 switch (IEM_GET_MODRM_REG_8(bRm))
8973 {
8974 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
8975 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
8976 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
8977 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
8978 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
8979 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
8980 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
8981 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8982 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
8983 }
8984 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8985
8986 if (IEM_IS_MODRM_REG_MODE(bRm))
8987 {
8988 /* register */
8989 IEM_MC_BEGIN(3, 0, 0, 0);
8990 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8991 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8992 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8993 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8994 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8995 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8996 IEM_MC_REF_EFLAGS(pEFlags);
8997 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8998 IEM_MC_ADVANCE_RIP_AND_FINISH();
8999 IEM_MC_END();
9000 }
9001 else
9002 {
9003 /* memory */
9004 IEM_MC_BEGIN(3, 3, 0, 0);
9005 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9006 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9007 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9008 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9009 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9010
9011 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9013 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9014 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9015 IEM_MC_FETCH_EFLAGS(EFlags);
9016 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
9017
9018 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo);
9019 IEM_MC_COMMIT_EFLAGS(EFlags);
9020 IEM_MC_ADVANCE_RIP_AND_FINISH();
9021 IEM_MC_END();
9022 }
9023}
9024
9025
9026/**
9027 * @opcode 0xd3
9028 */
9029FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
9030{
9031 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9032 PCIEMOPSHIFTSIZES pImpl;
9033 switch (IEM_GET_MODRM_REG_8(bRm))
9034 {
9035 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
9036 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
9037 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
9038 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
9039 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
9040 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
9041 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
9042 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9043 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
9044 }
9045 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
9046
9047 if (IEM_IS_MODRM_REG_MODE(bRm))
9048 {
9049 /* register */
9050 switch (pVCpu->iem.s.enmEffOpSize)
9051 {
9052 case IEMMODE_16BIT:
9053 IEM_MC_BEGIN(3, 0, 0, 0);
9054 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9055 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9056 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9057 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9058 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9059 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9060 IEM_MC_REF_EFLAGS(pEFlags);
9061 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
9062 IEM_MC_ADVANCE_RIP_AND_FINISH();
9063 IEM_MC_END();
9064 break;
9065
9066 case IEMMODE_32BIT:
9067 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
9068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9069 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9070 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9071 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9072 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9073 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9074 IEM_MC_REF_EFLAGS(pEFlags);
9075 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
9076 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
9077 IEM_MC_ADVANCE_RIP_AND_FINISH();
9078 IEM_MC_END();
9079 break;
9080
9081 case IEMMODE_64BIT:
9082 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
9083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9084 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9085 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9086 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9087 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9088 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9089 IEM_MC_REF_EFLAGS(pEFlags);
9090 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
9091 IEM_MC_ADVANCE_RIP_AND_FINISH();
9092 IEM_MC_END();
9093 break;
9094
9095 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9096 }
9097 }
9098 else
9099 {
9100 /* memory */
9101 switch (pVCpu->iem.s.enmEffOpSize)
9102 {
9103 case IEMMODE_16BIT:
9104 IEM_MC_BEGIN(3, 3, 0, 0);
9105 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9106 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9107 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9108 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9109 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9110
9111 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9113 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9114 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9115 IEM_MC_FETCH_EFLAGS(EFlags);
9116 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
9117
9118 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo);
9119 IEM_MC_COMMIT_EFLAGS(EFlags);
9120 IEM_MC_ADVANCE_RIP_AND_FINISH();
9121 IEM_MC_END();
9122 break;
9123
9124 case IEMMODE_32BIT:
9125 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
9126 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9127 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9128 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9129 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9130 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9131
9132 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9133 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9134 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9135 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9136 IEM_MC_FETCH_EFLAGS(EFlags);
9137 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
9138
9139 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo);
9140 IEM_MC_COMMIT_EFLAGS(EFlags);
9141 IEM_MC_ADVANCE_RIP_AND_FINISH();
9142 IEM_MC_END();
9143 break;
9144
9145 case IEMMODE_64BIT:
9146 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
9147 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9148 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9149 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9150 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9151 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9152
9153 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9155 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9156 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9157 IEM_MC_FETCH_EFLAGS(EFlags);
9158 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
9159
9160 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo);
9161 IEM_MC_COMMIT_EFLAGS(EFlags);
9162 IEM_MC_ADVANCE_RIP_AND_FINISH();
9163 IEM_MC_END();
9164 break;
9165
9166 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9167 }
9168 }
9169}
9170
9171/**
9172 * @opcode 0xd4
9173 */
9174FNIEMOP_DEF(iemOp_aam_Ib)
9175{
9176 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
9177 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
9178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9179 IEMOP_HLP_NO_64BIT();
9180 if (!bImm)
9181 IEMOP_RAISE_DIVIDE_ERROR_RET();
9182 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aam, bImm);
9183}
9184
9185
9186/**
9187 * @opcode 0xd5
9188 */
9189FNIEMOP_DEF(iemOp_aad_Ib)
9190{
9191 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
9192 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
9193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9194 IEMOP_HLP_NO_64BIT();
9195 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aad, bImm);
9196}
9197
9198
9199/**
9200 * @opcode 0xd6
9201 */
9202FNIEMOP_DEF(iemOp_salc)
9203{
9204 IEMOP_MNEMONIC(salc, "salc");
9205 IEMOP_HLP_NO_64BIT();
9206
9207 IEM_MC_BEGIN(0, 0, 0, 0);
9208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9209 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9210 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
9211 } IEM_MC_ELSE() {
9212 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
9213 } IEM_MC_ENDIF();
9214 IEM_MC_ADVANCE_RIP_AND_FINISH();
9215 IEM_MC_END();
9216}
9217
9218
9219/**
9220 * @opcode 0xd7
9221 */
9222FNIEMOP_DEF(iemOp_xlat)
9223{
9224 IEMOP_MNEMONIC(xlat, "xlat");
9225 switch (pVCpu->iem.s.enmEffAddrMode)
9226 {
9227 case IEMMODE_16BIT:
9228 IEM_MC_BEGIN(2, 0, 0, 0);
9229 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9230 IEM_MC_LOCAL(uint8_t, u8Tmp);
9231 IEM_MC_LOCAL(uint16_t, u16Addr);
9232 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
9233 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
9234 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
9235 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9236 IEM_MC_ADVANCE_RIP_AND_FINISH();
9237 IEM_MC_END();
9238 break;
9239
9240 case IEMMODE_32BIT:
9241 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0);
9242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9243 IEM_MC_LOCAL(uint8_t, u8Tmp);
9244 IEM_MC_LOCAL(uint32_t, u32Addr);
9245 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
9246 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
9247 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
9248 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9249 IEM_MC_ADVANCE_RIP_AND_FINISH();
9250 IEM_MC_END();
9251 break;
9252
9253 case IEMMODE_64BIT:
9254 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
9255 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9256 IEM_MC_LOCAL(uint8_t, u8Tmp);
9257 IEM_MC_LOCAL(uint64_t, u64Addr);
9258 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
9259 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
9260 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
9261 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9262 IEM_MC_ADVANCE_RIP_AND_FINISH();
9263 IEM_MC_END();
9264 break;
9265
9266 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9267 }
9268}
9269
9270
9271/**
9272 * Common worker for FPU instructions working on ST0 and STn, and storing the
9273 * result in ST0.
9274 *
9275 * @param bRm Mod R/M byte.
9276 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9277 */
9278FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
9279{
9280 IEM_MC_BEGIN(3, 1, 0, 0);
9281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9282 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9283 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9284 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9285 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9286
9287 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9288 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9289 IEM_MC_PREPARE_FPU_USAGE();
9290 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9291 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
9292 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9293 } IEM_MC_ELSE() {
9294 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9295 } IEM_MC_ENDIF();
9296 IEM_MC_ADVANCE_RIP_AND_FINISH();
9297
9298 IEM_MC_END();
9299}
9300
9301
9302/**
9303 * Common worker for FPU instructions working on ST0 and STn, and only affecting
9304 * flags.
9305 *
9306 * @param bRm Mod R/M byte.
9307 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9308 */
9309FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
9310{
9311 IEM_MC_BEGIN(3, 1, 0, 0);
9312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9313 IEM_MC_LOCAL(uint16_t, u16Fsw);
9314 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9315 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9316 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9317
9318 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9319 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9320 IEM_MC_PREPARE_FPU_USAGE();
9321 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9322 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
9323 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9324 } IEM_MC_ELSE() {
9325 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
9326 } IEM_MC_ENDIF();
9327 IEM_MC_ADVANCE_RIP_AND_FINISH();
9328
9329 IEM_MC_END();
9330}
9331
9332
9333/**
9334 * Common worker for FPU instructions working on ST0 and STn, only affecting
9335 * flags, and popping when done.
9336 *
9337 * @param bRm Mod R/M byte.
9338 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9339 */
9340FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
9341{
9342 IEM_MC_BEGIN(3, 1, 0, 0);
9343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9344 IEM_MC_LOCAL(uint16_t, u16Fsw);
9345 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9346 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9347 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9348
9349 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9350 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9351 IEM_MC_PREPARE_FPU_USAGE();
9352 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9353 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
9354 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9355 } IEM_MC_ELSE() {
9356 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
9357 } IEM_MC_ENDIF();
9358 IEM_MC_ADVANCE_RIP_AND_FINISH();
9359
9360 IEM_MC_END();
9361}
9362
9363
9364/** Opcode 0xd8 11/0. */
9365FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
9366{
9367 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
9368 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
9369}
9370
9371
9372/** Opcode 0xd8 11/1. */
9373FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
9374{
9375 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
9376 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
9377}
9378
9379
9380/** Opcode 0xd8 11/2. */
9381FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
9382{
9383 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
9384 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
9385}
9386
9387
9388/** Opcode 0xd8 11/3. */
9389FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
9390{
9391 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
9392 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
9393}
9394
9395
9396/** Opcode 0xd8 11/4. */
9397FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
9398{
9399 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
9400 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
9401}
9402
9403
9404/** Opcode 0xd8 11/5. */
9405FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
9406{
9407 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
9408 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
9409}
9410
9411
9412/** Opcode 0xd8 11/6. */
9413FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
9414{
9415 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
9416 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
9417}
9418
9419
9420/** Opcode 0xd8 11/7. */
9421FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
9422{
9423 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
9424 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
9425}
9426
9427
9428/**
9429 * Common worker for FPU instructions working on ST0 and an m32r, and storing
9430 * the result in ST0.
9431 *
9432 * @param bRm Mod R/M byte.
9433 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9434 */
9435FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
9436{
9437 IEM_MC_BEGIN(3, 3, 0, 0);
9438 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9439 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9440 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
9441 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9442 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9443 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
9444
9445 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9447
9448 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9449 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9450 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9451
9452 IEM_MC_PREPARE_FPU_USAGE();
9453 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9454 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
9455 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9456 } IEM_MC_ELSE() {
9457 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9458 } IEM_MC_ENDIF();
9459 IEM_MC_ADVANCE_RIP_AND_FINISH();
9460
9461 IEM_MC_END();
9462}
9463
9464
9465/** Opcode 0xd8 !11/0. */
9466FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
9467{
9468 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
9469 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
9470}
9471
9472
9473/** Opcode 0xd8 !11/1. */
9474FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
9475{
9476 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
9477 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
9478}
9479
9480
9481/** Opcode 0xd8 !11/2. */
9482FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
9483{
9484 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
9485
9486 IEM_MC_BEGIN(3, 3, 0, 0);
9487 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9488 IEM_MC_LOCAL(uint16_t, u16Fsw);
9489 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
9490 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9491 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9492 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
9493
9494 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9496
9497 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9498 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9499 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9500
9501 IEM_MC_PREPARE_FPU_USAGE();
9502 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9503 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
9504 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9505 } IEM_MC_ELSE() {
9506 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9507 } IEM_MC_ENDIF();
9508 IEM_MC_ADVANCE_RIP_AND_FINISH();
9509
9510 IEM_MC_END();
9511}
9512
9513
9514/** Opcode 0xd8 !11/3. */
9515FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
9516{
9517 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
9518
9519 IEM_MC_BEGIN(3, 3, 0, 0);
9520 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9521 IEM_MC_LOCAL(uint16_t, u16Fsw);
9522 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
9523 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9524 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9525 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
9526
9527 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9529
9530 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9531 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9532 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9533
9534 IEM_MC_PREPARE_FPU_USAGE();
9535 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9536 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
9537 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9538 } IEM_MC_ELSE() {
9539 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9540 } IEM_MC_ENDIF();
9541 IEM_MC_ADVANCE_RIP_AND_FINISH();
9542
9543 IEM_MC_END();
9544}
9545
9546
9547/** Opcode 0xd8 !11/4. */
9548FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
9549{
9550 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
9551 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
9552}
9553
9554
9555/** Opcode 0xd8 !11/5. */
9556FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
9557{
9558 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
9559 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
9560}
9561
9562
9563/** Opcode 0xd8 !11/6. */
9564FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
9565{
9566 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
9567 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
9568}
9569
9570
9571/** Opcode 0xd8 !11/7. */
9572FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
9573{
9574 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
9575 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
9576}
9577
9578
9579/**
9580 * @opcode 0xd8
9581 */
9582FNIEMOP_DEF(iemOp_EscF0)
9583{
9584 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9585 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
9586
9587 if (IEM_IS_MODRM_REG_MODE(bRm))
9588 {
9589 switch (IEM_GET_MODRM_REG_8(bRm))
9590 {
9591 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
9592 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
9593 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
9594 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
9595 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
9596 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
9597 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
9598 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
9599 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9600 }
9601 }
9602 else
9603 {
9604 switch (IEM_GET_MODRM_REG_8(bRm))
9605 {
9606 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
9607 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
9608 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
9609 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
9610 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
9611 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
9612 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
9613 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
9614 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9615 }
9616 }
9617}
9618
9619
9620/** Opcode 0xd9 /0 mem32real
9621 * @sa iemOp_fld_m64r */
9622FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
9623{
9624 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
9625
9626 IEM_MC_BEGIN(2, 3, 0, 0);
9627 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9628 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9629 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
9630 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9631 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
9632
9633 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9635
9636 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9637 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9638 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9639 IEM_MC_PREPARE_FPU_USAGE();
9640 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
9641 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r32, pFpuRes, pr32Val);
9642 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9643 } IEM_MC_ELSE() {
9644 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9645 } IEM_MC_ENDIF();
9646 IEM_MC_ADVANCE_RIP_AND_FINISH();
9647
9648 IEM_MC_END();
9649}
9650
9651
9652/** Opcode 0xd9 !11/2 mem32real */
9653FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
9654{
9655 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
9656 IEM_MC_BEGIN(3, 2, 0, 0);
9657 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9658 IEM_MC_LOCAL(uint16_t, u16Fsw);
9659 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9660 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
9661 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9662
9663 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9665 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9666 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9667
9668 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9669 IEM_MC_PREPARE_FPU_USAGE();
9670 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9671 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
9672 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9673 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9674 } IEM_MC_ELSE() {
9675 IEM_MC_IF_FCW_IM() {
9676 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
9677 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
9678 } IEM_MC_ENDIF();
9679 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9680 } IEM_MC_ENDIF();
9681 IEM_MC_ADVANCE_RIP_AND_FINISH();
9682
9683 IEM_MC_END();
9684}
9685
9686
9687/** Opcode 0xd9 !11/3 */
9688FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
9689{
9690 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
9691 IEM_MC_BEGIN(3, 2, 0, 0);
9692 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9693 IEM_MC_LOCAL(uint16_t, u16Fsw);
9694 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9695 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
9696 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9697
9698 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9700 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9701 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9702
9703 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9704 IEM_MC_PREPARE_FPU_USAGE();
9705 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9706 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
9707 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
9708 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9709 } IEM_MC_ELSE() {
9710 IEM_MC_IF_FCW_IM() {
9711 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
9712 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
9713 } IEM_MC_ENDIF();
9714 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9715 } IEM_MC_ENDIF();
9716 IEM_MC_ADVANCE_RIP_AND_FINISH();
9717
9718 IEM_MC_END();
9719}
9720
9721
9722/** Opcode 0xd9 !11/4 */
9723FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
9724{
9725 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
9726 IEM_MC_BEGIN(3, 0, 0, 0);
9727 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
9728 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9729
9730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9731 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9732 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9733
9734 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9735 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
9736 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
9737 IEM_MC_END();
9738}
9739
9740
9741/** Opcode 0xd9 !11/5 */
9742FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
9743{
9744 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
9745 IEM_MC_BEGIN(1, 1, 0, 0);
9746 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9747 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9748
9749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9750 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9751 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9752
9753 IEM_MC_ARG(uint16_t, u16Fsw, 0);
9754 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9755
9756 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_FPU, 0, iemCImpl_fldcw, u16Fsw);
9757 IEM_MC_END();
9758}
9759
9760
9761/** Opcode 0xd9 !11/6 */
9762FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
9763{
9764 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
9765 IEM_MC_BEGIN(3, 0, 0, 0);
9766 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
9767 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9768
9769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9770 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9771 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9772
9773 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9774 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
9775 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
9776 IEM_MC_END();
9777}
9778
9779
9780/** Opcode 0xd9 !11/7 */
9781FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
9782{
9783 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
9784 IEM_MC_BEGIN(2, 0, 0, 0);
9785 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9786 IEM_MC_LOCAL(uint16_t, u16Fcw);
9787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9789 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9790 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9791 IEM_MC_FETCH_FCW(u16Fcw);
9792 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
9793 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
9794 IEM_MC_END();
9795}
9796
9797
9798/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
9799FNIEMOP_DEF(iemOp_fnop)
9800{
9801 IEMOP_MNEMONIC(fnop, "fnop");
9802 IEM_MC_BEGIN(0, 0, 0, 0);
9803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9804 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9805 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9806 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9807 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
9808 * intel optimizations. Investigate. */
9809 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9810 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
9811 IEM_MC_END();
9812}
9813
9814
9815/** Opcode 0xd9 11/0 stN */
9816FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
9817{
9818 IEMOP_MNEMONIC(fld_stN, "fld stN");
9819 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
9820 * indicates that it does. */
9821 IEM_MC_BEGIN(0, 2, 0, 0);
9822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9823 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9824 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9825 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9826 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9827
9828 IEM_MC_PREPARE_FPU_USAGE();
9829 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, IEM_GET_MODRM_RM_8(bRm)) {
9830 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9831 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
9832 } IEM_MC_ELSE() {
9833 IEM_MC_FPU_STACK_PUSH_UNDERFLOW(pVCpu->iem.s.uFpuOpcode);
9834 } IEM_MC_ENDIF();
9835
9836 IEM_MC_ADVANCE_RIP_AND_FINISH();
9837 IEM_MC_END();
9838}
9839
9840
9841/** Opcode 0xd9 11/3 stN */
9842FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
9843{
9844 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
9845 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
9846 * indicates that it does. */
9847 IEM_MC_BEGIN(2, 3, 0, 0);
9848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9849 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
9850 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
9851 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9852 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ IEM_GET_MODRM_RM_8(bRm), 0);
9853 IEM_MC_ARG_CONST(uint16_t, uFpuOpcode, /*=*/ pVCpu->iem.s.uFpuOpcode, 1);
9854 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9855 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9856
9857 IEM_MC_PREPARE_FPU_USAGE();
9858 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9859 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
9860 IEM_MC_STORE_FPUREG_R80_SRC_REF(IEM_GET_MODRM_RM_8(bRm), pr80Value1);
9861 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9862 } IEM_MC_ELSE() {
9863 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, 0, iemCImpl_fxch_underflow, iStReg, uFpuOpcode);
9864 } IEM_MC_ENDIF();
9865
9866 IEM_MC_ADVANCE_RIP_AND_FINISH();
9867 IEM_MC_END();
9868}
9869
9870
9871/** Opcode 0xd9 11/4, 0xdd 11/2. */
9872FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
9873{
9874 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
9875
9876 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
9877 uint8_t const iDstReg = IEM_GET_MODRM_RM_8(bRm);
9878 if (!iDstReg)
9879 {
9880 IEM_MC_BEGIN(0, 1, 0, 0);
9881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9882 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
9883 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9884 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9885
9886 IEM_MC_PREPARE_FPU_USAGE();
9887 IEM_MC_IF_FPUREG_NOT_EMPTY(0) {
9888 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9889 } IEM_MC_ELSE() {
9890 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0, pVCpu->iem.s.uFpuOpcode);
9891 } IEM_MC_ENDIF();
9892
9893 IEM_MC_ADVANCE_RIP_AND_FINISH();
9894 IEM_MC_END();
9895 }
9896 else
9897 {
9898 IEM_MC_BEGIN(0, 2, 0, 0);
9899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9900 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9901 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9902 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9903 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9904
9905 IEM_MC_PREPARE_FPU_USAGE();
9906 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9907 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9908 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg, pVCpu->iem.s.uFpuOpcode);
9909 } IEM_MC_ELSE() {
9910 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg, pVCpu->iem.s.uFpuOpcode);
9911 } IEM_MC_ENDIF();
9912
9913 IEM_MC_ADVANCE_RIP_AND_FINISH();
9914 IEM_MC_END();
9915 }
9916}
9917
9918
9919/**
9920 * Common worker for FPU instructions working on ST0 and replaces it with the
9921 * result, i.e. unary operators.
9922 *
9923 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9924 */
9925FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
9926{
9927 IEM_MC_BEGIN(2, 1, 0, 0);
9928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9929 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9930 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9931 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9932
9933 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9934 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9935 IEM_MC_PREPARE_FPU_USAGE();
9936 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9937 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
9938 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9939 } IEM_MC_ELSE() {
9940 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9941 } IEM_MC_ENDIF();
9942 IEM_MC_ADVANCE_RIP_AND_FINISH();
9943
9944 IEM_MC_END();
9945}
9946
9947
9948/** Opcode 0xd9 0xe0. */
9949FNIEMOP_DEF(iemOp_fchs)
9950{
9951 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
9952 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
9953}
9954
9955
9956/** Opcode 0xd9 0xe1. */
9957FNIEMOP_DEF(iemOp_fabs)
9958{
9959 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
9960 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
9961}
9962
9963
9964/** Opcode 0xd9 0xe4. */
9965FNIEMOP_DEF(iemOp_ftst)
9966{
9967 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
9968 IEM_MC_BEGIN(2, 1, 0, 0);
9969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9970 IEM_MC_LOCAL(uint16_t, u16Fsw);
9971 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9972 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9973
9974 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9975 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9976 IEM_MC_PREPARE_FPU_USAGE();
9977 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9978 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_ftst_r80, pu16Fsw, pr80Value);
9979 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9980 } IEM_MC_ELSE() {
9981 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
9982 } IEM_MC_ENDIF();
9983 IEM_MC_ADVANCE_RIP_AND_FINISH();
9984
9985 IEM_MC_END();
9986}
9987
9988
9989/** Opcode 0xd9 0xe5. */
9990FNIEMOP_DEF(iemOp_fxam)
9991{
9992 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
9993 IEM_MC_BEGIN(2, 1, 0, 0);
9994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9995 IEM_MC_LOCAL(uint16_t, u16Fsw);
9996 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9997 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9998
9999 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10000 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10001 IEM_MC_PREPARE_FPU_USAGE();
10002 IEM_MC_REF_FPUREG(pr80Value, 0);
10003 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fxam_r80, pu16Fsw, pr80Value);
10004 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10005 IEM_MC_ADVANCE_RIP_AND_FINISH();
10006
10007 IEM_MC_END();
10008}
10009
10010
10011/**
10012 * Common worker for FPU instructions pushing a constant onto the FPU stack.
10013 *
10014 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10015 */
10016FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
10017{
10018 IEM_MC_BEGIN(1, 1, 0, 0);
10019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10020 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10021 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10022
10023 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10024 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10025 IEM_MC_PREPARE_FPU_USAGE();
10026 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10027 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
10028 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
10029 } IEM_MC_ELSE() {
10030 IEM_MC_FPU_STACK_PUSH_OVERFLOW(pVCpu->iem.s.uFpuOpcode);
10031 } IEM_MC_ENDIF();
10032 IEM_MC_ADVANCE_RIP_AND_FINISH();
10033
10034 IEM_MC_END();
10035}
10036
10037
10038/** Opcode 0xd9 0xe8. */
10039FNIEMOP_DEF(iemOp_fld1)
10040{
10041 IEMOP_MNEMONIC(fld1, "fld1");
10042 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
10043}
10044
10045
10046/** Opcode 0xd9 0xe9. */
10047FNIEMOP_DEF(iemOp_fldl2t)
10048{
10049 IEMOP_MNEMONIC(fldl2t, "fldl2t");
10050 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
10051}
10052
10053
10054/** Opcode 0xd9 0xea. */
10055FNIEMOP_DEF(iemOp_fldl2e)
10056{
10057 IEMOP_MNEMONIC(fldl2e, "fldl2e");
10058 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
10059}
10060
10061/** Opcode 0xd9 0xeb. */
10062FNIEMOP_DEF(iemOp_fldpi)
10063{
10064 IEMOP_MNEMONIC(fldpi, "fldpi");
10065 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
10066}
10067
10068
10069/** Opcode 0xd9 0xec. */
10070FNIEMOP_DEF(iemOp_fldlg2)
10071{
10072 IEMOP_MNEMONIC(fldlg2, "fldlg2");
10073 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
10074}
10075
10076/** Opcode 0xd9 0xed. */
10077FNIEMOP_DEF(iemOp_fldln2)
10078{
10079 IEMOP_MNEMONIC(fldln2, "fldln2");
10080 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
10081}
10082
10083
10084/** Opcode 0xd9 0xee. */
10085FNIEMOP_DEF(iemOp_fldz)
10086{
10087 IEMOP_MNEMONIC(fldz, "fldz");
10088 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
10089}
10090
10091
10092/** Opcode 0xd9 0xf0.
10093 *
10094 * The f2xm1 instruction works on values +1.0 thru -1.0, currently (the range on
10095 * 287 & 8087 was +0.5 thru 0.0 according to docs). In addition is does appear
10096 * to produce proper results for +Inf and -Inf.
10097 *
10098 * This is probably usful in the implementation pow() and similar.
10099 */
10100FNIEMOP_DEF(iemOp_f2xm1)
10101{
10102 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
10103 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
10104}
10105
10106
10107/**
10108 * Common worker for FPU instructions working on STn and ST0, storing the result
10109 * in STn, and popping the stack unless IE, DE or ZE was raised.
10110 *
10111 * @param bRm Mod R/M byte.
10112 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10113 */
10114FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
10115{
10116 IEM_MC_BEGIN(3, 1, 0, 0);
10117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10118 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10119 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10120 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10121 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10122
10123 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10124 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10125
10126 IEM_MC_PREPARE_FPU_USAGE();
10127 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
10128 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
10129 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10130 } IEM_MC_ELSE() {
10131 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10132 } IEM_MC_ENDIF();
10133 IEM_MC_ADVANCE_RIP_AND_FINISH();
10134
10135 IEM_MC_END();
10136}
10137
10138
10139/** Opcode 0xd9 0xf1. */
10140FNIEMOP_DEF(iemOp_fyl2x)
10141{
10142 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
10143 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
10144}
10145
10146
10147/**
10148 * Common worker for FPU instructions working on ST0 and having two outputs, one
10149 * replacing ST0 and one pushed onto the stack.
10150 *
10151 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10152 */
10153FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
10154{
10155 IEM_MC_BEGIN(2, 1, 0, 0);
10156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10157 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
10158 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
10159 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10160
10161 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10162 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10163 IEM_MC_PREPARE_FPU_USAGE();
10164 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10165 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
10166 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo, pVCpu->iem.s.uFpuOpcode);
10167 } IEM_MC_ELSE() {
10168 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO(pVCpu->iem.s.uFpuOpcode);
10169 } IEM_MC_ENDIF();
10170 IEM_MC_ADVANCE_RIP_AND_FINISH();
10171
10172 IEM_MC_END();
10173}
10174
10175
10176/** Opcode 0xd9 0xf2. */
10177FNIEMOP_DEF(iemOp_fptan)
10178{
10179 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
10180 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
10181}
10182
10183
10184/** Opcode 0xd9 0xf3. */
10185FNIEMOP_DEF(iemOp_fpatan)
10186{
10187 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
10188 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
10189}
10190
10191
10192/** Opcode 0xd9 0xf4. */
10193FNIEMOP_DEF(iemOp_fxtract)
10194{
10195 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
10196 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
10197}
10198
10199
10200/** Opcode 0xd9 0xf5. */
10201FNIEMOP_DEF(iemOp_fprem1)
10202{
10203 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
10204 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
10205}
10206
10207
10208/** Opcode 0xd9 0xf6. */
10209FNIEMOP_DEF(iemOp_fdecstp)
10210{
10211 IEMOP_MNEMONIC(fdecstp, "fdecstp");
10212 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
10213 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
10214 * FINCSTP and FDECSTP. */
10215 IEM_MC_BEGIN(0, 0, 0, 0);
10216 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10217
10218 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10219 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10220
10221 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10222 IEM_MC_FPU_STACK_DEC_TOP();
10223 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
10224
10225 IEM_MC_ADVANCE_RIP_AND_FINISH();
10226 IEM_MC_END();
10227}
10228
10229
10230/** Opcode 0xd9 0xf7. */
10231FNIEMOP_DEF(iemOp_fincstp)
10232{
10233 IEMOP_MNEMONIC(fincstp, "fincstp");
10234 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
10235 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
10236 * FINCSTP and FDECSTP. */
10237 IEM_MC_BEGIN(0, 0, 0, 0);
10238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10239
10240 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10241 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10242
10243 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10244 IEM_MC_FPU_STACK_INC_TOP();
10245 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
10246
10247 IEM_MC_ADVANCE_RIP_AND_FINISH();
10248 IEM_MC_END();
10249}
10250
10251
10252/** Opcode 0xd9 0xf8. */
10253FNIEMOP_DEF(iemOp_fprem)
10254{
10255 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
10256 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
10257}
10258
10259
10260/** Opcode 0xd9 0xf9. */
10261FNIEMOP_DEF(iemOp_fyl2xp1)
10262{
10263 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
10264 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
10265}
10266
10267
10268/** Opcode 0xd9 0xfa. */
10269FNIEMOP_DEF(iemOp_fsqrt)
10270{
10271 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
10272 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
10273}
10274
10275
10276/** Opcode 0xd9 0xfb. */
10277FNIEMOP_DEF(iemOp_fsincos)
10278{
10279 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
10280 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
10281}
10282
10283
10284/** Opcode 0xd9 0xfc. */
10285FNIEMOP_DEF(iemOp_frndint)
10286{
10287 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
10288 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
10289}
10290
10291
10292/** Opcode 0xd9 0xfd. */
10293FNIEMOP_DEF(iemOp_fscale)
10294{
10295 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
10296 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
10297}
10298
10299
10300/** Opcode 0xd9 0xfe. */
10301FNIEMOP_DEF(iemOp_fsin)
10302{
10303 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
10304 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
10305}
10306
10307
10308/** Opcode 0xd9 0xff. */
10309FNIEMOP_DEF(iemOp_fcos)
10310{
10311 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
10312 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
10313}
10314
10315
10316/** Used by iemOp_EscF1. */
10317IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
10318{
10319 /* 0xe0 */ iemOp_fchs,
10320 /* 0xe1 */ iemOp_fabs,
10321 /* 0xe2 */ iemOp_Invalid,
10322 /* 0xe3 */ iemOp_Invalid,
10323 /* 0xe4 */ iemOp_ftst,
10324 /* 0xe5 */ iemOp_fxam,
10325 /* 0xe6 */ iemOp_Invalid,
10326 /* 0xe7 */ iemOp_Invalid,
10327 /* 0xe8 */ iemOp_fld1,
10328 /* 0xe9 */ iemOp_fldl2t,
10329 /* 0xea */ iemOp_fldl2e,
10330 /* 0xeb */ iemOp_fldpi,
10331 /* 0xec */ iemOp_fldlg2,
10332 /* 0xed */ iemOp_fldln2,
10333 /* 0xee */ iemOp_fldz,
10334 /* 0xef */ iemOp_Invalid,
10335 /* 0xf0 */ iemOp_f2xm1,
10336 /* 0xf1 */ iemOp_fyl2x,
10337 /* 0xf2 */ iemOp_fptan,
10338 /* 0xf3 */ iemOp_fpatan,
10339 /* 0xf4 */ iemOp_fxtract,
10340 /* 0xf5 */ iemOp_fprem1,
10341 /* 0xf6 */ iemOp_fdecstp,
10342 /* 0xf7 */ iemOp_fincstp,
10343 /* 0xf8 */ iemOp_fprem,
10344 /* 0xf9 */ iemOp_fyl2xp1,
10345 /* 0xfa */ iemOp_fsqrt,
10346 /* 0xfb */ iemOp_fsincos,
10347 /* 0xfc */ iemOp_frndint,
10348 /* 0xfd */ iemOp_fscale,
10349 /* 0xfe */ iemOp_fsin,
10350 /* 0xff */ iemOp_fcos
10351};
10352
10353
10354/**
10355 * @opcode 0xd9
10356 */
10357FNIEMOP_DEF(iemOp_EscF1)
10358{
10359 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10360 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
10361
10362 if (IEM_IS_MODRM_REG_MODE(bRm))
10363 {
10364 switch (IEM_GET_MODRM_REG_8(bRm))
10365 {
10366 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
10367 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
10368 case 2:
10369 if (bRm == 0xd0)
10370 return FNIEMOP_CALL(iemOp_fnop);
10371 IEMOP_RAISE_INVALID_OPCODE_RET();
10372 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
10373 case 4:
10374 case 5:
10375 case 6:
10376 case 7:
10377 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
10378 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
10379 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10380 }
10381 }
10382 else
10383 {
10384 switch (IEM_GET_MODRM_REG_8(bRm))
10385 {
10386 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
10387 case 1: IEMOP_RAISE_INVALID_OPCODE_RET();
10388 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
10389 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
10390 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
10391 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
10392 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
10393 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
10394 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10395 }
10396 }
10397}
10398
10399
10400/** Opcode 0xda 11/0. */
10401FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
10402{
10403 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
10404 IEM_MC_BEGIN(0, 1, 0, 0);
10405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10406 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10407
10408 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10409 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10410
10411 IEM_MC_PREPARE_FPU_USAGE();
10412 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10413 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
10414 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10415 } IEM_MC_ENDIF();
10416 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10417 } IEM_MC_ELSE() {
10418 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10419 } IEM_MC_ENDIF();
10420 IEM_MC_ADVANCE_RIP_AND_FINISH();
10421
10422 IEM_MC_END();
10423}
10424
10425
10426/** Opcode 0xda 11/1. */
10427FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
10428{
10429 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
10430 IEM_MC_BEGIN(0, 1, 0, 0);
10431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10432 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10433
10434 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10435 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10436
10437 IEM_MC_PREPARE_FPU_USAGE();
10438 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10439 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10440 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10441 } IEM_MC_ENDIF();
10442 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10443 } IEM_MC_ELSE() {
10444 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10445 } IEM_MC_ENDIF();
10446 IEM_MC_ADVANCE_RIP_AND_FINISH();
10447
10448 IEM_MC_END();
10449}
10450
10451
10452/** Opcode 0xda 11/2. */
10453FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
10454{
10455 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
10456 IEM_MC_BEGIN(0, 1, 0, 0);
10457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10458 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10459
10460 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10461 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10462
10463 IEM_MC_PREPARE_FPU_USAGE();
10464 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10465 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10466 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10467 } IEM_MC_ENDIF();
10468 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10469 } IEM_MC_ELSE() {
10470 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10471 } IEM_MC_ENDIF();
10472 IEM_MC_ADVANCE_RIP_AND_FINISH();
10473
10474 IEM_MC_END();
10475}
10476
10477
10478/** Opcode 0xda 11/3. */
10479FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
10480{
10481 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
10482 IEM_MC_BEGIN(0, 1, 0, 0);
10483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10484 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10485
10486 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10487 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10488
10489 IEM_MC_PREPARE_FPU_USAGE();
10490 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10491 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
10492 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10493 } IEM_MC_ENDIF();
10494 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10495 } IEM_MC_ELSE() {
10496 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10497 } IEM_MC_ENDIF();
10498 IEM_MC_ADVANCE_RIP_AND_FINISH();
10499
10500 IEM_MC_END();
10501}
10502
10503
10504/**
10505 * Common worker for FPU instructions working on ST0 and ST1, only affecting
10506 * flags, and popping twice when done.
10507 *
10508 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10509 */
10510FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
10511{
10512 IEM_MC_BEGIN(3, 1, 0, 0);
10513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10514 IEM_MC_LOCAL(uint16_t, u16Fsw);
10515 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10516 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10517 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10518
10519 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10520 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10521
10522 IEM_MC_PREPARE_FPU_USAGE();
10523 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1) {
10524 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
10525 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10526 } IEM_MC_ELSE() {
10527 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP(pVCpu->iem.s.uFpuOpcode);
10528 } IEM_MC_ENDIF();
10529 IEM_MC_ADVANCE_RIP_AND_FINISH();
10530
10531 IEM_MC_END();
10532}
10533
10534
10535/** Opcode 0xda 0xe9. */
10536FNIEMOP_DEF(iemOp_fucompp)
10537{
10538 IEMOP_MNEMONIC(fucompp, "fucompp");
10539 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fucom_r80_by_r80);
10540}
10541
10542
10543/**
10544 * Common worker for FPU instructions working on ST0 and an m32i, and storing
10545 * the result in ST0.
10546 *
10547 * @param bRm Mod R/M byte.
10548 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10549 */
10550FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
10551{
10552 IEM_MC_BEGIN(3, 3, 0, 0);
10553 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10554 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10555 IEM_MC_LOCAL(int32_t, i32Val2);
10556 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10557 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10558 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10559
10560 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10562
10563 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10564 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10565 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10566
10567 IEM_MC_PREPARE_FPU_USAGE();
10568 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10569 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
10570 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10571 } IEM_MC_ELSE() {
10572 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10573 } IEM_MC_ENDIF();
10574 IEM_MC_ADVANCE_RIP_AND_FINISH();
10575
10576 IEM_MC_END();
10577}
10578
10579
10580/** Opcode 0xda !11/0. */
10581FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
10582{
10583 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
10584 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
10585}
10586
10587
10588/** Opcode 0xda !11/1. */
10589FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
10590{
10591 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
10592 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
10593}
10594
10595
10596/** Opcode 0xda !11/2. */
10597FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
10598{
10599 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
10600
10601 IEM_MC_BEGIN(3, 3, 0, 0);
10602 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10603 IEM_MC_LOCAL(uint16_t, u16Fsw);
10604 IEM_MC_LOCAL(int32_t, i32Val2);
10605 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10606 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10607 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10608
10609 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10611
10612 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10613 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10614 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10615
10616 IEM_MC_PREPARE_FPU_USAGE();
10617 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10618 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
10619 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10620 } IEM_MC_ELSE() {
10621 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10622 } IEM_MC_ENDIF();
10623 IEM_MC_ADVANCE_RIP_AND_FINISH();
10624
10625 IEM_MC_END();
10626}
10627
10628
10629/** Opcode 0xda !11/3. */
10630FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
10631{
10632 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
10633
10634 IEM_MC_BEGIN(3, 3, 0, 0);
10635 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10636 IEM_MC_LOCAL(uint16_t, u16Fsw);
10637 IEM_MC_LOCAL(int32_t, i32Val2);
10638 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10639 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10640 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10641
10642 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10644
10645 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10646 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10647 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10648
10649 IEM_MC_PREPARE_FPU_USAGE();
10650 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10651 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
10652 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10653 } IEM_MC_ELSE() {
10654 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10655 } IEM_MC_ENDIF();
10656 IEM_MC_ADVANCE_RIP_AND_FINISH();
10657
10658 IEM_MC_END();
10659}
10660
10661
10662/** Opcode 0xda !11/4. */
10663FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
10664{
10665 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
10666 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
10667}
10668
10669
10670/** Opcode 0xda !11/5. */
10671FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
10672{
10673 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
10674 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
10675}
10676
10677
10678/** Opcode 0xda !11/6. */
10679FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
10680{
10681 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
10682 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
10683}
10684
10685
10686/** Opcode 0xda !11/7. */
10687FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
10688{
10689 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
10690 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
10691}
10692
10693
10694/**
10695 * @opcode 0xda
10696 */
10697FNIEMOP_DEF(iemOp_EscF2)
10698{
10699 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10700 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
10701 if (IEM_IS_MODRM_REG_MODE(bRm))
10702 {
10703 switch (IEM_GET_MODRM_REG_8(bRm))
10704 {
10705 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
10706 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
10707 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
10708 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
10709 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
10710 case 5:
10711 if (bRm == 0xe9)
10712 return FNIEMOP_CALL(iemOp_fucompp);
10713 IEMOP_RAISE_INVALID_OPCODE_RET();
10714 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10715 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
10716 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10717 }
10718 }
10719 else
10720 {
10721 switch (IEM_GET_MODRM_REG_8(bRm))
10722 {
10723 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
10724 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
10725 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
10726 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
10727 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
10728 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
10729 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
10730 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
10731 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10732 }
10733 }
10734}
10735
10736
10737/** Opcode 0xdb !11/0. */
10738FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
10739{
10740 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
10741
10742 IEM_MC_BEGIN(2, 3, 0, 0);
10743 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10744 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10745 IEM_MC_LOCAL(int32_t, i32Val);
10746 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10747 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
10748
10749 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10751
10752 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10753 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10754 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10755
10756 IEM_MC_PREPARE_FPU_USAGE();
10757 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10758 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i32, pFpuRes, pi32Val);
10759 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10760 } IEM_MC_ELSE() {
10761 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10762 } IEM_MC_ENDIF();
10763 IEM_MC_ADVANCE_RIP_AND_FINISH();
10764
10765 IEM_MC_END();
10766}
10767
10768
10769/** Opcode 0xdb !11/1. */
10770FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
10771{
10772 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
10773 IEM_MC_BEGIN(3, 2, 0, 0);
10774 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10775 IEM_MC_LOCAL(uint16_t, u16Fsw);
10776 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10777 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10778 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10779
10780 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10782 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10783 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10784
10785 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10786 IEM_MC_PREPARE_FPU_USAGE();
10787 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10788 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10789 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
10790 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10791 } IEM_MC_ELSE() {
10792 IEM_MC_IF_FCW_IM() {
10793 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10794 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
10795 } IEM_MC_ENDIF();
10796 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10797 } IEM_MC_ENDIF();
10798 IEM_MC_ADVANCE_RIP_AND_FINISH();
10799
10800 IEM_MC_END();
10801}
10802
10803
10804/** Opcode 0xdb !11/2. */
10805FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
10806{
10807 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
10808 IEM_MC_BEGIN(3, 2, 0, 0);
10809 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10810 IEM_MC_LOCAL(uint16_t, u16Fsw);
10811 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10812 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10813 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10814
10815 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10817 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10818 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10819
10820 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10821 IEM_MC_PREPARE_FPU_USAGE();
10822 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10823 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10824 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
10825 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10826 } IEM_MC_ELSE() {
10827 IEM_MC_IF_FCW_IM() {
10828 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10829 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
10830 } IEM_MC_ENDIF();
10831 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10832 } IEM_MC_ENDIF();
10833 IEM_MC_ADVANCE_RIP_AND_FINISH();
10834
10835 IEM_MC_END();
10836}
10837
10838
10839/** Opcode 0xdb !11/3. */
10840FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
10841{
10842 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
10843 IEM_MC_BEGIN(3, 2, 0, 0);
10844 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10845 IEM_MC_LOCAL(uint16_t, u16Fsw);
10846 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10847 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10848 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10849
10850 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10852 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10853 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10854
10855 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10856 IEM_MC_PREPARE_FPU_USAGE();
10857 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10858 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10859 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
10860 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10861 } IEM_MC_ELSE() {
10862 IEM_MC_IF_FCW_IM() {
10863 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10864 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
10865 } IEM_MC_ENDIF();
10866 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10867 } IEM_MC_ENDIF();
10868 IEM_MC_ADVANCE_RIP_AND_FINISH();
10869
10870 IEM_MC_END();
10871}
10872
10873
10874/** Opcode 0xdb !11/5. */
10875FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
10876{
10877 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
10878
10879 IEM_MC_BEGIN(2, 3, 0, 0);
10880 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10881 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10882 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
10883 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10884 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
10885
10886 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10887 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10888
10889 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10890 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10891 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10892
10893 IEM_MC_PREPARE_FPU_USAGE();
10894 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10895 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
10896 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10897 } IEM_MC_ELSE() {
10898 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10899 } IEM_MC_ENDIF();
10900 IEM_MC_ADVANCE_RIP_AND_FINISH();
10901
10902 IEM_MC_END();
10903}
10904
10905
10906/** Opcode 0xdb !11/7. */
10907FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
10908{
10909 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
10910 IEM_MC_BEGIN(3, 2, 0, 0);
10911 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10912 IEM_MC_LOCAL(uint16_t, u16Fsw);
10913 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10914 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
10915 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10916
10917 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10918 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10919 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10920 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10921
10922 IEM_MC_MEM_MAP_EX(pr80Dst, IEM_ACCESS_DATA_W, sizeof(*pr80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
10923 IEM_MC_PREPARE_FPU_USAGE();
10924 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10925 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
10926 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
10927 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10928 } IEM_MC_ELSE() {
10929 IEM_MC_IF_FCW_IM() {
10930 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
10931 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
10932 } IEM_MC_ENDIF();
10933 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10934 } IEM_MC_ENDIF();
10935 IEM_MC_ADVANCE_RIP_AND_FINISH();
10936
10937 IEM_MC_END();
10938}
10939
10940
10941/** Opcode 0xdb 11/0. */
10942FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
10943{
10944 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
10945 IEM_MC_BEGIN(0, 1, 0, 0);
10946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10947 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10948
10949 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10950 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10951
10952 IEM_MC_PREPARE_FPU_USAGE();
10953 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10954 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF) {
10955 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10956 } IEM_MC_ENDIF();
10957 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10958 } IEM_MC_ELSE() {
10959 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10960 } IEM_MC_ENDIF();
10961 IEM_MC_ADVANCE_RIP_AND_FINISH();
10962
10963 IEM_MC_END();
10964}
10965
10966
10967/** Opcode 0xdb 11/1. */
10968FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
10969{
10970 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
10971 IEM_MC_BEGIN(0, 1, 0, 0);
10972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10973 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10974
10975 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10976 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10977
10978 IEM_MC_PREPARE_FPU_USAGE();
10979 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10980 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10981 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10982 } IEM_MC_ENDIF();
10983 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10984 } IEM_MC_ELSE() {
10985 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10986 } IEM_MC_ENDIF();
10987 IEM_MC_ADVANCE_RIP_AND_FINISH();
10988
10989 IEM_MC_END();
10990}
10991
10992
10993/** Opcode 0xdb 11/2. */
10994FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
10995{
10996 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
10997 IEM_MC_BEGIN(0, 1, 0, 0);
10998 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10999 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11000
11001 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11002 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11003
11004 IEM_MC_PREPARE_FPU_USAGE();
11005 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11006 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
11007 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11008 } IEM_MC_ENDIF();
11009 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11010 } IEM_MC_ELSE() {
11011 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11012 } IEM_MC_ENDIF();
11013 IEM_MC_ADVANCE_RIP_AND_FINISH();
11014
11015 IEM_MC_END();
11016}
11017
11018
11019/** Opcode 0xdb 11/3. */
11020FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
11021{
11022 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
11023 IEM_MC_BEGIN(0, 1, 0, 0);
11024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11025 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11026
11027 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11028 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11029
11030 IEM_MC_PREPARE_FPU_USAGE();
11031 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11032 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF) {
11033 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11034 } IEM_MC_ENDIF();
11035 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11036 } IEM_MC_ELSE() {
11037 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11038 } IEM_MC_ENDIF();
11039 IEM_MC_ADVANCE_RIP_AND_FINISH();
11040
11041 IEM_MC_END();
11042}
11043
11044
11045/** Opcode 0xdb 0xe0. */
11046FNIEMOP_DEF(iemOp_fneni)
11047{
11048 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
11049 IEM_MC_BEGIN(0, 0, 0, 0);
11050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11051 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11052 IEM_MC_ADVANCE_RIP_AND_FINISH();
11053 IEM_MC_END();
11054}
11055
11056
11057/** Opcode 0xdb 0xe1. */
11058FNIEMOP_DEF(iemOp_fndisi)
11059{
11060 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
11061 IEM_MC_BEGIN(0, 0, 0, 0);
11062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11063 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11064 IEM_MC_ADVANCE_RIP_AND_FINISH();
11065 IEM_MC_END();
11066}
11067
11068
11069/** Opcode 0xdb 0xe2. */
11070FNIEMOP_DEF(iemOp_fnclex)
11071{
11072 IEMOP_MNEMONIC(fnclex, "fnclex");
11073 IEM_MC_BEGIN(0, 0, 0, 0);
11074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11075 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11076 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11077 IEM_MC_CLEAR_FSW_EX();
11078 IEM_MC_ADVANCE_RIP_AND_FINISH();
11079 IEM_MC_END();
11080}
11081
11082
11083/** Opcode 0xdb 0xe3. */
11084FNIEMOP_DEF(iemOp_fninit)
11085{
11086 IEMOP_MNEMONIC(fninit, "fninit");
11087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11088 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_FPU, 0, iemCImpl_finit, false /*fCheckXcpts*/);
11089}
11090
11091
11092/** Opcode 0xdb 0xe4. */
11093FNIEMOP_DEF(iemOp_fnsetpm)
11094{
11095 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
11096 IEM_MC_BEGIN(0, 0, 0, 0);
11097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11098 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11099 IEM_MC_ADVANCE_RIP_AND_FINISH();
11100 IEM_MC_END();
11101}
11102
11103
11104/** Opcode 0xdb 0xe5. */
11105FNIEMOP_DEF(iemOp_frstpm)
11106{
11107 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
11108#if 0 /* #UDs on newer CPUs */
11109 IEM_MC_BEGIN(0, 0, 0, 0);
11110 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11111 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11112 IEM_MC_ADVANCE_RIP_AND_FINISH();
11113 IEM_MC_END();
11114 return VINF_SUCCESS;
11115#else
11116 IEMOP_RAISE_INVALID_OPCODE_RET();
11117#endif
11118}
11119
11120
11121/** Opcode 0xdb 11/5. */
11122FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
11123{
11124 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
11125 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
11126 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), true /*fUCmp*/,
11127 0 /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11128}
11129
11130
11131/** Opcode 0xdb 11/6. */
11132FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
11133{
11134 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
11135 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
11136 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
11137 false /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11138}
11139
11140
11141/**
11142 * @opcode 0xdb
11143 */
11144FNIEMOP_DEF(iemOp_EscF3)
11145{
11146 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11147 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
11148 if (IEM_IS_MODRM_REG_MODE(bRm))
11149 {
11150 switch (IEM_GET_MODRM_REG_8(bRm))
11151 {
11152 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
11153 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
11154 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
11155 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
11156 case 4:
11157 switch (bRm)
11158 {
11159 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
11160 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
11161 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
11162 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
11163 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
11164 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
11165 case 0xe6: IEMOP_RAISE_INVALID_OPCODE_RET();
11166 case 0xe7: IEMOP_RAISE_INVALID_OPCODE_RET();
11167 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11168 }
11169 break;
11170 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
11171 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
11172 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11173 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11174 }
11175 }
11176 else
11177 {
11178 switch (IEM_GET_MODRM_REG_8(bRm))
11179 {
11180 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
11181 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
11182 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
11183 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
11184 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
11185 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
11186 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
11187 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
11188 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11189 }
11190 }
11191}
11192
11193
11194/**
11195 * Common worker for FPU instructions working on STn and ST0, and storing the
11196 * result in STn unless IE, DE or ZE was raised.
11197 *
11198 * @param bRm Mod R/M byte.
11199 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11200 */
11201FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
11202{
11203 IEM_MC_BEGIN(3, 1, 0, 0);
11204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11205 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11206 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11207 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11208 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11209
11210 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11211 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11212
11213 IEM_MC_PREPARE_FPU_USAGE();
11214 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
11215 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
11216 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11217 } IEM_MC_ELSE() {
11218 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11219 } IEM_MC_ENDIF();
11220 IEM_MC_ADVANCE_RIP_AND_FINISH();
11221
11222 IEM_MC_END();
11223}
11224
11225
11226/** Opcode 0xdc 11/0. */
11227FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
11228{
11229 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
11230 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
11231}
11232
11233
11234/** Opcode 0xdc 11/1. */
11235FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
11236{
11237 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
11238 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
11239}
11240
11241
11242/** Opcode 0xdc 11/4. */
11243FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
11244{
11245 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
11246 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
11247}
11248
11249
11250/** Opcode 0xdc 11/5. */
11251FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
11252{
11253 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
11254 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
11255}
11256
11257
11258/** Opcode 0xdc 11/6. */
11259FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
11260{
11261 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
11262 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
11263}
11264
11265
11266/** Opcode 0xdc 11/7. */
11267FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
11268{
11269 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
11270 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
11271}
11272
11273
11274/**
11275 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
11276 * memory operand, and storing the result in ST0.
11277 *
11278 * @param bRm Mod R/M byte.
11279 * @param pfnImpl Pointer to the instruction implementation (assembly).
11280 */
11281FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
11282{
11283 IEM_MC_BEGIN(3, 3, 0, 0);
11284 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11285 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11286 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
11287 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11288 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
11289 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
11290
11291 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11293 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11294 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11295
11296 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11297 IEM_MC_PREPARE_FPU_USAGE();
11298 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0) {
11299 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
11300 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11301 } IEM_MC_ELSE() {
11302 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11303 } IEM_MC_ENDIF();
11304 IEM_MC_ADVANCE_RIP_AND_FINISH();
11305
11306 IEM_MC_END();
11307}
11308
11309
11310/** Opcode 0xdc !11/0. */
11311FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
11312{
11313 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
11314 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
11315}
11316
11317
11318/** Opcode 0xdc !11/1. */
11319FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
11320{
11321 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
11322 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
11323}
11324
11325
11326/** Opcode 0xdc !11/2. */
11327FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
11328{
11329 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
11330
11331 IEM_MC_BEGIN(3, 3, 0, 0);
11332 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11333 IEM_MC_LOCAL(uint16_t, u16Fsw);
11334 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
11335 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11336 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11337 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
11338
11339 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11341
11342 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11343 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11344 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11345
11346 IEM_MC_PREPARE_FPU_USAGE();
11347 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11348 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
11349 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11350 } IEM_MC_ELSE() {
11351 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11352 } IEM_MC_ENDIF();
11353 IEM_MC_ADVANCE_RIP_AND_FINISH();
11354
11355 IEM_MC_END();
11356}
11357
11358
11359/** Opcode 0xdc !11/3. */
11360FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
11361{
11362 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
11363
11364 IEM_MC_BEGIN(3, 3, 0, 0);
11365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11366 IEM_MC_LOCAL(uint16_t, u16Fsw);
11367 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
11368 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11369 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11370 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
11371
11372 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11374
11375 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11376 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11377 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11378
11379 IEM_MC_PREPARE_FPU_USAGE();
11380 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11381 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
11382 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11383 } IEM_MC_ELSE() {
11384 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11385 } IEM_MC_ENDIF();
11386 IEM_MC_ADVANCE_RIP_AND_FINISH();
11387
11388 IEM_MC_END();
11389}
11390
11391
11392/** Opcode 0xdc !11/4. */
11393FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
11394{
11395 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
11396 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
11397}
11398
11399
11400/** Opcode 0xdc !11/5. */
11401FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
11402{
11403 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
11404 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
11405}
11406
11407
11408/** Opcode 0xdc !11/6. */
11409FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
11410{
11411 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
11412 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
11413}
11414
11415
11416/** Opcode 0xdc !11/7. */
11417FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
11418{
11419 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
11420 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
11421}
11422
11423
11424/**
11425 * @opcode 0xdc
11426 */
11427FNIEMOP_DEF(iemOp_EscF4)
11428{
11429 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11430 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
11431 if (IEM_IS_MODRM_REG_MODE(bRm))
11432 {
11433 switch (IEM_GET_MODRM_REG_8(bRm))
11434 {
11435 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
11436 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
11437 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
11438 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
11439 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
11440 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
11441 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
11442 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
11443 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11444 }
11445 }
11446 else
11447 {
11448 switch (IEM_GET_MODRM_REG_8(bRm))
11449 {
11450 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
11451 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
11452 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
11453 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
11454 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
11455 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
11456 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
11457 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
11458 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11459 }
11460 }
11461}
11462
11463
11464/** Opcode 0xdd !11/0.
11465 * @sa iemOp_fld_m32r */
11466FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
11467{
11468 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
11469
11470 IEM_MC_BEGIN(2, 3, 0, 0);
11471 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11472 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11473 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
11474 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11475 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
11476
11477 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11479 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11480 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11481
11482 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11483 IEM_MC_PREPARE_FPU_USAGE();
11484 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11485 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r64, pFpuRes, pr64Val);
11486 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11487 } IEM_MC_ELSE() {
11488 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11489 } IEM_MC_ENDIF();
11490 IEM_MC_ADVANCE_RIP_AND_FINISH();
11491
11492 IEM_MC_END();
11493}
11494
11495
11496/** Opcode 0xdd !11/0. */
11497FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
11498{
11499 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
11500 IEM_MC_BEGIN(3, 2, 0, 0);
11501 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11502 IEM_MC_LOCAL(uint16_t, u16Fsw);
11503 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11504 IEM_MC_ARG(int64_t *, pi64Dst, 1);
11505 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11506
11507 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11509 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11510 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11511
11512 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11513 IEM_MC_PREPARE_FPU_USAGE();
11514 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11515 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
11516 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11517 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11518 } IEM_MC_ELSE() {
11519 IEM_MC_IF_FCW_IM() {
11520 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
11521 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
11522 } IEM_MC_ENDIF();
11523 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11524 } IEM_MC_ENDIF();
11525 IEM_MC_ADVANCE_RIP_AND_FINISH();
11526
11527 IEM_MC_END();
11528}
11529
11530
11531/** Opcode 0xdd !11/0. */
11532FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
11533{
11534 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
11535 IEM_MC_BEGIN(3, 2, 0, 0);
11536 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11537 IEM_MC_LOCAL(uint16_t, u16Fsw);
11538 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11539 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
11540 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11541
11542 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11544 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11545 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11546
11547 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11548 IEM_MC_PREPARE_FPU_USAGE();
11549 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11550 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
11551 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11552 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11553 } IEM_MC_ELSE() {
11554 IEM_MC_IF_FCW_IM() {
11555 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
11556 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
11557 } IEM_MC_ENDIF();
11558 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11559 } IEM_MC_ENDIF();
11560 IEM_MC_ADVANCE_RIP_AND_FINISH();
11561
11562 IEM_MC_END();
11563}
11564
11565
11566
11567
11568/** Opcode 0xdd !11/0. */
11569FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
11570{
11571 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
11572 IEM_MC_BEGIN(3, 2, 0, 0);
11573 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11574 IEM_MC_LOCAL(uint16_t, u16Fsw);
11575 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11576 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
11577 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11578
11579 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11581 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11582 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11583
11584 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
11585 IEM_MC_PREPARE_FPU_USAGE();
11586 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11587 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
11588 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
11589 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11590 } IEM_MC_ELSE() {
11591 IEM_MC_IF_FCW_IM() {
11592 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
11593 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
11594 } IEM_MC_ENDIF();
11595 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11596 } IEM_MC_ENDIF();
11597 IEM_MC_ADVANCE_RIP_AND_FINISH();
11598
11599 IEM_MC_END();
11600}
11601
11602
11603/** Opcode 0xdd !11/0. */
11604FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
11605{
11606 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
11607 IEM_MC_BEGIN(3, 0, 0, 0);
11608 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
11609 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11610
11611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11612 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11613 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11614
11615 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
11616 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
11617 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
11618 IEM_MC_END();
11619}
11620
11621
11622/** Opcode 0xdd !11/0. */
11623FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
11624{
11625 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
11626 IEM_MC_BEGIN(3, 0, 0, 0);
11627 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
11628 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11629
11630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11631 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11632 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); /* Note! Implicit fninit after the save, do not use FOR_READ here! */
11633
11634 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
11635 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
11636 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
11637 IEM_MC_END();
11638}
11639
11640/** Opcode 0xdd !11/0. */
11641FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
11642{
11643 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
11644
11645 IEM_MC_BEGIN(0, 2, 0, 0);
11646 IEM_MC_LOCAL(uint16_t, u16Tmp);
11647 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11648
11649 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11651 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11652
11653 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
11654 IEM_MC_FETCH_FSW(u16Tmp);
11655 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
11656 IEM_MC_ADVANCE_RIP_AND_FINISH();
11657
11658/** @todo Debug / drop a hint to the verifier that things may differ
11659 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
11660 * NT4SP1. (X86_FSW_PE) */
11661 IEM_MC_END();
11662}
11663
11664
11665/** Opcode 0xdd 11/0. */
11666FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
11667{
11668 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
11669 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
11670 unmodified. */
11671 IEM_MC_BEGIN(0, 0, 0, 0);
11672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11673
11674 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11675 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11676
11677 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11678 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
11679 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11680
11681 IEM_MC_ADVANCE_RIP_AND_FINISH();
11682 IEM_MC_END();
11683}
11684
11685
11686/** Opcode 0xdd 11/1. */
11687FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
11688{
11689 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
11690 IEM_MC_BEGIN(0, 2, 0, 0);
11691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11692 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
11693 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11694 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11695 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11696
11697 IEM_MC_PREPARE_FPU_USAGE();
11698 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11699 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
11700 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11701 } IEM_MC_ELSE() {
11702 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11703 } IEM_MC_ENDIF();
11704
11705 IEM_MC_ADVANCE_RIP_AND_FINISH();
11706 IEM_MC_END();
11707}
11708
11709
11710/** Opcode 0xdd 11/3. */
11711FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
11712{
11713 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
11714 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
11715}
11716
11717
11718/** Opcode 0xdd 11/4. */
11719FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
11720{
11721 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
11722 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
11723}
11724
11725
11726/**
11727 * @opcode 0xdd
11728 */
11729FNIEMOP_DEF(iemOp_EscF5)
11730{
11731 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11732 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
11733 if (IEM_IS_MODRM_REG_MODE(bRm))
11734 {
11735 switch (IEM_GET_MODRM_REG_8(bRm))
11736 {
11737 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
11738 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
11739 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
11740 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
11741 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
11742 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
11743 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
11744 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11745 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11746 }
11747 }
11748 else
11749 {
11750 switch (IEM_GET_MODRM_REG_8(bRm))
11751 {
11752 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
11753 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
11754 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
11755 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
11756 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
11757 case 5: IEMOP_RAISE_INVALID_OPCODE_RET();
11758 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
11759 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
11760 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11761 }
11762 }
11763}
11764
11765
11766/** Opcode 0xde 11/0. */
11767FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
11768{
11769 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
11770 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
11771}
11772
11773
11774/** Opcode 0xde 11/0. */
11775FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
11776{
11777 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
11778 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
11779}
11780
11781
11782/** Opcode 0xde 0xd9. */
11783FNIEMOP_DEF(iemOp_fcompp)
11784{
11785 IEMOP_MNEMONIC(fcompp, "fcompp");
11786 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fcom_r80_by_r80);
11787}
11788
11789
11790/** Opcode 0xde 11/4. */
11791FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
11792{
11793 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
11794 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
11795}
11796
11797
11798/** Opcode 0xde 11/5. */
11799FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
11800{
11801 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
11802 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
11803}
11804
11805
11806/** Opcode 0xde 11/6. */
11807FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
11808{
11809 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
11810 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
11811}
11812
11813
11814/** Opcode 0xde 11/7. */
11815FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
11816{
11817 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
11818 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
11819}
11820
11821
11822/**
11823 * Common worker for FPU instructions working on ST0 and an m16i, and storing
11824 * the result in ST0.
11825 *
11826 * @param bRm Mod R/M byte.
11827 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11828 */
11829FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
11830{
11831 IEM_MC_BEGIN(3, 3, 0, 0);
11832 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11833 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11834 IEM_MC_LOCAL(int16_t, i16Val2);
11835 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11836 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11837 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11838
11839 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11841
11842 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11843 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11844 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11845
11846 IEM_MC_PREPARE_FPU_USAGE();
11847 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11848 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
11849 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
11850 } IEM_MC_ELSE() {
11851 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11852 } IEM_MC_ENDIF();
11853 IEM_MC_ADVANCE_RIP_AND_FINISH();
11854
11855 IEM_MC_END();
11856}
11857
11858
11859/** Opcode 0xde !11/0. */
11860FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
11861{
11862 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
11863 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
11864}
11865
11866
11867/** Opcode 0xde !11/1. */
11868FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
11869{
11870 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
11871 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
11872}
11873
11874
11875/** Opcode 0xde !11/2. */
11876FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
11877{
11878 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
11879
11880 IEM_MC_BEGIN(3, 3, 0, 0);
11881 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11882 IEM_MC_LOCAL(uint16_t, u16Fsw);
11883 IEM_MC_LOCAL(int16_t, i16Val2);
11884 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11885 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11886 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11887
11888 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11890
11891 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11892 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11893 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11894
11895 IEM_MC_PREPARE_FPU_USAGE();
11896 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11897 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
11898 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11899 } IEM_MC_ELSE() {
11900 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11901 } IEM_MC_ENDIF();
11902 IEM_MC_ADVANCE_RIP_AND_FINISH();
11903
11904 IEM_MC_END();
11905}
11906
11907
11908/** Opcode 0xde !11/3. */
11909FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
11910{
11911 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
11912
11913 IEM_MC_BEGIN(3, 3, 0, 0);
11914 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11915 IEM_MC_LOCAL(uint16_t, u16Fsw);
11916 IEM_MC_LOCAL(int16_t, i16Val2);
11917 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11918 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11919 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11920
11921 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11923
11924 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11925 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11926 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11927
11928 IEM_MC_PREPARE_FPU_USAGE();
11929 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11930 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
11931 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11932 } IEM_MC_ELSE() {
11933 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11934 } IEM_MC_ENDIF();
11935 IEM_MC_ADVANCE_RIP_AND_FINISH();
11936
11937 IEM_MC_END();
11938}
11939
11940
11941/** Opcode 0xde !11/4. */
11942FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
11943{
11944 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
11945 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
11946}
11947
11948
11949/** Opcode 0xde !11/5. */
11950FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
11951{
11952 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
11953 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
11954}
11955
11956
11957/** Opcode 0xde !11/6. */
11958FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
11959{
11960 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
11961 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
11962}
11963
11964
11965/** Opcode 0xde !11/7. */
11966FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
11967{
11968 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
11969 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
11970}
11971
11972
11973/**
11974 * @opcode 0xde
11975 */
11976FNIEMOP_DEF(iemOp_EscF6)
11977{
11978 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11979 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
11980 if (IEM_IS_MODRM_REG_MODE(bRm))
11981 {
11982 switch (IEM_GET_MODRM_REG_8(bRm))
11983 {
11984 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
11985 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
11986 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
11987 case 3: if (bRm == 0xd9)
11988 return FNIEMOP_CALL(iemOp_fcompp);
11989 IEMOP_RAISE_INVALID_OPCODE_RET();
11990 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
11991 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
11992 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
11993 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
11994 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11995 }
11996 }
11997 else
11998 {
11999 switch (IEM_GET_MODRM_REG_8(bRm))
12000 {
12001 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
12002 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
12003 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
12004 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
12005 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
12006 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
12007 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
12008 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
12009 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12010 }
12011 }
12012}
12013
12014
12015/** Opcode 0xdf 11/0.
12016 * Undocument instruction, assumed to work like ffree + fincstp. */
12017FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
12018{
12019 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
12020 IEM_MC_BEGIN(0, 0, 0, 0);
12021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12022
12023 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12024 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12025
12026 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12027 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
12028 IEM_MC_FPU_STACK_INC_TOP();
12029 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12030
12031 IEM_MC_ADVANCE_RIP_AND_FINISH();
12032 IEM_MC_END();
12033}
12034
12035
12036/** Opcode 0xdf 0xe0. */
12037FNIEMOP_DEF(iemOp_fnstsw_ax)
12038{
12039 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
12040 IEM_MC_BEGIN(0, 1, 0, 0);
12041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12042 IEM_MC_LOCAL(uint16_t, u16Tmp);
12043 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12044 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
12045 IEM_MC_FETCH_FSW(u16Tmp);
12046 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
12047 IEM_MC_ADVANCE_RIP_AND_FINISH();
12048 IEM_MC_END();
12049}
12050
12051
12052/** Opcode 0xdf 11/5. */
12053FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
12054{
12055 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
12056 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
12057 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
12058 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
12059}
12060
12061
12062/** Opcode 0xdf 11/6. */
12063FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
12064{
12065 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
12066 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
12067 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
12068 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
12069}
12070
12071
12072/** Opcode 0xdf !11/0. */
12073FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
12074{
12075 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
12076
12077 IEM_MC_BEGIN(2, 3, 0, 0);
12078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12079 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12080 IEM_MC_LOCAL(int16_t, i16Val);
12081 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12082 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
12083
12084 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12086
12087 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12088 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12089 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12090
12091 IEM_MC_PREPARE_FPU_USAGE();
12092 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12093 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i16, pFpuRes, pi16Val);
12094 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12095 } IEM_MC_ELSE() {
12096 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12097 } IEM_MC_ENDIF();
12098 IEM_MC_ADVANCE_RIP_AND_FINISH();
12099
12100 IEM_MC_END();
12101}
12102
12103
12104/** Opcode 0xdf !11/1. */
12105FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
12106{
12107 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
12108 IEM_MC_BEGIN(3, 2, 0, 0);
12109 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12110 IEM_MC_LOCAL(uint16_t, u16Fsw);
12111 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12112 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12113 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12114
12115 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12116 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12117 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12118 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12119
12120 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
12121 IEM_MC_PREPARE_FPU_USAGE();
12122 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12123 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12124 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
12125 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12126 } IEM_MC_ELSE() {
12127 IEM_MC_IF_FCW_IM() {
12128 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12129 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
12130 } IEM_MC_ENDIF();
12131 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12132 } IEM_MC_ENDIF();
12133 IEM_MC_ADVANCE_RIP_AND_FINISH();
12134
12135 IEM_MC_END();
12136}
12137
12138
12139/** Opcode 0xdf !11/2. */
12140FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
12141{
12142 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
12143 IEM_MC_BEGIN(3, 2, 0, 0);
12144 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12145 IEM_MC_LOCAL(uint16_t, u16Fsw);
12146 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12147 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12148 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12149
12150 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12152 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12153 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12154
12155 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
12156 IEM_MC_PREPARE_FPU_USAGE();
12157 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12158 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12159 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
12160 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12161 } IEM_MC_ELSE() {
12162 IEM_MC_IF_FCW_IM() {
12163 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12164 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
12165 } IEM_MC_ENDIF();
12166 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12167 } IEM_MC_ENDIF();
12168 IEM_MC_ADVANCE_RIP_AND_FINISH();
12169
12170 IEM_MC_END();
12171}
12172
12173
12174/** Opcode 0xdf !11/3. */
12175FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
12176{
12177 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
12178 IEM_MC_BEGIN(3, 2, 0, 0);
12179 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12180 IEM_MC_LOCAL(uint16_t, u16Fsw);
12181 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12182 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12183 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12184
12185 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12187 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12188 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12189
12190 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
12191 IEM_MC_PREPARE_FPU_USAGE();
12192 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12193 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12194 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
12195 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12196 } IEM_MC_ELSE() {
12197 IEM_MC_IF_FCW_IM() {
12198 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12199 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
12200 } IEM_MC_ENDIF();
12201 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12202 } IEM_MC_ENDIF();
12203 IEM_MC_ADVANCE_RIP_AND_FINISH();
12204
12205 IEM_MC_END();
12206}
12207
12208
12209/** Opcode 0xdf !11/4. */
12210FNIEMOP_DEF_1(iemOp_fbld_m80d, uint8_t, bRm)
12211{
12212 IEMOP_MNEMONIC(fbld_m80d, "fbld m80d");
12213
12214 IEM_MC_BEGIN(2, 3, 0, 0);
12215 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12216 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12217 IEM_MC_LOCAL(RTPBCD80U, d80Val);
12218 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12219 IEM_MC_ARG_LOCAL_REF(PCRTPBCD80U, pd80Val, d80Val, 1);
12220
12221 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12223
12224 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12225 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12226 IEM_MC_FETCH_MEM_D80(d80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12227
12228 IEM_MC_PREPARE_FPU_USAGE();
12229 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12230 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_d80, pFpuRes, pd80Val);
12231 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12232 } IEM_MC_ELSE() {
12233 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12234 } IEM_MC_ENDIF();
12235 IEM_MC_ADVANCE_RIP_AND_FINISH();
12236
12237 IEM_MC_END();
12238}
12239
12240
12241/** Opcode 0xdf !11/5. */
12242FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
12243{
12244 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
12245
12246 IEM_MC_BEGIN(2, 3, 0, 0);
12247 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12248 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12249 IEM_MC_LOCAL(int64_t, i64Val);
12250 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12251 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
12252
12253 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12255
12256 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12257 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12258 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12259
12260 IEM_MC_PREPARE_FPU_USAGE();
12261 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12262 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i64, pFpuRes, pi64Val);
12263 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12264 } IEM_MC_ELSE() {
12265 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12266 } IEM_MC_ENDIF();
12267 IEM_MC_ADVANCE_RIP_AND_FINISH();
12268
12269 IEM_MC_END();
12270}
12271
12272
12273/** Opcode 0xdf !11/6. */
12274FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
12275{
12276 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
12277 IEM_MC_BEGIN(3, 2, 0, 0);
12278 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12279 IEM_MC_LOCAL(uint16_t, u16Fsw);
12280 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12281 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
12282 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12283
12284 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12286 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12287 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12288
12289 IEM_MC_MEM_MAP_EX(pd80Dst, IEM_ACCESS_DATA_W, sizeof(*pd80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
12290 IEM_MC_PREPARE_FPU_USAGE();
12291 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12292 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
12293 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pd80Dst, IEM_ACCESS_DATA_W, u16Fsw);
12294 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12295 } IEM_MC_ELSE() {
12296 IEM_MC_IF_FCW_IM() {
12297 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
12298 IEM_MC_MEM_COMMIT_AND_UNMAP(pd80Dst, IEM_ACCESS_DATA_W);
12299 } IEM_MC_ENDIF();
12300 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12301 } IEM_MC_ENDIF();
12302 IEM_MC_ADVANCE_RIP_AND_FINISH();
12303
12304 IEM_MC_END();
12305}
12306
12307
12308/** Opcode 0xdf !11/7. */
12309FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
12310{
12311 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
12312 IEM_MC_BEGIN(3, 2, 0, 0);
12313 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12314 IEM_MC_LOCAL(uint16_t, u16Fsw);
12315 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12316 IEM_MC_ARG(int64_t *, pi64Dst, 1);
12317 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12318
12319 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12321 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12322 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12323
12324 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
12325 IEM_MC_PREPARE_FPU_USAGE();
12326 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12327 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
12328 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
12329 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12330 } IEM_MC_ELSE() {
12331 IEM_MC_IF_FCW_IM() {
12332 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
12333 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
12334 } IEM_MC_ENDIF();
12335 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12336 } IEM_MC_ENDIF();
12337 IEM_MC_ADVANCE_RIP_AND_FINISH();
12338
12339 IEM_MC_END();
12340}
12341
12342
12343/**
12344 * @opcode 0xdf
12345 */
12346FNIEMOP_DEF(iemOp_EscF7)
12347{
12348 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12349 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdf & 0x7);
12350 if (IEM_IS_MODRM_REG_MODE(bRm))
12351 {
12352 switch (IEM_GET_MODRM_REG_8(bRm))
12353 {
12354 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
12355 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
12356 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
12357 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
12358 case 4: if (bRm == 0xe0)
12359 return FNIEMOP_CALL(iemOp_fnstsw_ax);
12360 IEMOP_RAISE_INVALID_OPCODE_RET();
12361 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
12362 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
12363 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
12364 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12365 }
12366 }
12367 else
12368 {
12369 switch (IEM_GET_MODRM_REG_8(bRm))
12370 {
12371 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
12372 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
12373 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
12374 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
12375 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
12376 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
12377 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
12378 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
12379 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12380 }
12381 }
12382}
12383
12384
12385/**
12386 * @opcode 0xe0
12387 */
12388FNIEMOP_DEF(iemOp_loopne_Jb)
12389{
12390 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
12391 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12392 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12393
12394 switch (pVCpu->iem.s.enmEffAddrMode)
12395 {
12396 case IEMMODE_16BIT:
12397 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12398 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12399 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
12400 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12401 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12402 } IEM_MC_ELSE() {
12403 IEM_MC_ADVANCE_RIP_AND_FINISH();
12404 } IEM_MC_ENDIF();
12405 IEM_MC_END();
12406 break;
12407
12408 case IEMMODE_32BIT:
12409 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12410 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12411 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
12412 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12413 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12414 } IEM_MC_ELSE() {
12415 IEM_MC_ADVANCE_RIP_AND_FINISH();
12416 } IEM_MC_ENDIF();
12417 IEM_MC_END();
12418 break;
12419
12420 case IEMMODE_64BIT:
12421 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12423 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
12424 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12425 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12426 } IEM_MC_ELSE() {
12427 IEM_MC_ADVANCE_RIP_AND_FINISH();
12428 } IEM_MC_ENDIF();
12429 IEM_MC_END();
12430 break;
12431
12432 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12433 }
12434}
12435
12436
12437/**
12438 * @opcode 0xe1
12439 */
12440FNIEMOP_DEF(iemOp_loope_Jb)
12441{
12442 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
12443 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12444 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12445
12446 switch (pVCpu->iem.s.enmEffAddrMode)
12447 {
12448 case IEMMODE_16BIT:
12449 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12451 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
12452 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
12453 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12454 } IEM_MC_ELSE() {
12455 IEM_MC_ADVANCE_RIP_AND_FINISH();
12456 } IEM_MC_ENDIF();
12457 IEM_MC_END();
12458 break;
12459
12460 case IEMMODE_32BIT:
12461 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12463 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
12464 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
12465 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12466 } IEM_MC_ELSE() {
12467 IEM_MC_ADVANCE_RIP_AND_FINISH();
12468 } IEM_MC_ENDIF();
12469 IEM_MC_END();
12470 break;
12471
12472 case IEMMODE_64BIT:
12473 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12475 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
12476 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
12477 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12478 } IEM_MC_ELSE() {
12479 IEM_MC_ADVANCE_RIP_AND_FINISH();
12480 } IEM_MC_ENDIF();
12481 IEM_MC_END();
12482 break;
12483
12484 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12485 }
12486}
12487
12488
12489/**
12490 * @opcode 0xe2
12491 */
12492FNIEMOP_DEF(iemOp_loop_Jb)
12493{
12494 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
12495 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12496 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12497
12498 /** @todo Check out the \#GP case if EIP < CS.Base or EIP > CS.Limit when
12499 * using the 32-bit operand size override. How can that be restarted? See
12500 * weird pseudo code in intel manual. */
12501
12502 /* NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
12503 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
12504 * the loop causes guest crashes, but when logging it's nice to skip a few million
12505 * lines of useless output. */
12506#if defined(LOG_ENABLED)
12507 if ((LogIs3Enabled() || LogIs4Enabled()) && -(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm)
12508 switch (pVCpu->iem.s.enmEffAddrMode)
12509 {
12510 case IEMMODE_16BIT:
12511 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12513 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
12514 IEM_MC_ADVANCE_RIP_AND_FINISH();
12515 IEM_MC_END();
12516 break;
12517
12518 case IEMMODE_32BIT:
12519 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12521 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
12522 IEM_MC_ADVANCE_RIP_AND_FINISH();
12523 IEM_MC_END();
12524 break;
12525
12526 case IEMMODE_64BIT:
12527 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12529 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
12530 IEM_MC_ADVANCE_RIP_AND_FINISH();
12531 IEM_MC_END();
12532 break;
12533
12534 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12535 }
12536#endif
12537
12538 switch (pVCpu->iem.s.enmEffAddrMode)
12539 {
12540 case IEMMODE_16BIT:
12541 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12543 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
12544 IEM_MC_IF_CX_IS_NZ() {
12545 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12546 } IEM_MC_ELSE() {
12547 IEM_MC_ADVANCE_RIP_AND_FINISH();
12548 } IEM_MC_ENDIF();
12549 IEM_MC_END();
12550 break;
12551
12552 case IEMMODE_32BIT:
12553 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12555 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
12556 IEM_MC_IF_ECX_IS_NZ() {
12557 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12558 } IEM_MC_ELSE() {
12559 IEM_MC_ADVANCE_RIP_AND_FINISH();
12560 } IEM_MC_ENDIF();
12561 IEM_MC_END();
12562 break;
12563
12564 case IEMMODE_64BIT:
12565 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12567 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
12568 IEM_MC_IF_RCX_IS_NZ() {
12569 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12570 } IEM_MC_ELSE() {
12571 IEM_MC_ADVANCE_RIP_AND_FINISH();
12572 } IEM_MC_ENDIF();
12573 IEM_MC_END();
12574 break;
12575
12576 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12577 }
12578}
12579
12580
12581/**
12582 * @opcode 0xe3
12583 */
12584FNIEMOP_DEF(iemOp_jecxz_Jb)
12585{
12586 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
12587 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12588 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12589
12590 switch (pVCpu->iem.s.enmEffAddrMode)
12591 {
12592 case IEMMODE_16BIT:
12593 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12595 IEM_MC_IF_CX_IS_NZ() {
12596 IEM_MC_ADVANCE_RIP_AND_FINISH();
12597 } IEM_MC_ELSE() {
12598 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12599 } IEM_MC_ENDIF();
12600 IEM_MC_END();
12601 break;
12602
12603 case IEMMODE_32BIT:
12604 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12605 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12606 IEM_MC_IF_ECX_IS_NZ() {
12607 IEM_MC_ADVANCE_RIP_AND_FINISH();
12608 } IEM_MC_ELSE() {
12609 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12610 } IEM_MC_ENDIF();
12611 IEM_MC_END();
12612 break;
12613
12614 case IEMMODE_64BIT:
12615 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12617 IEM_MC_IF_RCX_IS_NZ() {
12618 IEM_MC_ADVANCE_RIP_AND_FINISH();
12619 } IEM_MC_ELSE() {
12620 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12621 } IEM_MC_ENDIF();
12622 IEM_MC_END();
12623 break;
12624
12625 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12626 }
12627}
12628
12629
12630/** Opcode 0xe4 */
12631FNIEMOP_DEF(iemOp_in_AL_Ib)
12632{
12633 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
12634 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12636 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
12637 iemCImpl_in, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12638}
12639
12640
12641/** Opcode 0xe5 */
12642FNIEMOP_DEF(iemOp_in_eAX_Ib)
12643{
12644 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
12645 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12647 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
12648 iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12649 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12650}
12651
12652
12653/** Opcode 0xe6 */
12654FNIEMOP_DEF(iemOp_out_Ib_AL)
12655{
12656 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
12657 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12659 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
12660 iemCImpl_out, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12661}
12662
12663
12664/** Opcode 0xe7 */
12665FNIEMOP_DEF(iemOp_out_Ib_eAX)
12666{
12667 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
12668 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12670 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
12671 iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12672 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12673}
12674
12675
12676/**
12677 * @opcode 0xe8
12678 */
12679FNIEMOP_DEF(iemOp_call_Jv)
12680{
12681 IEMOP_MNEMONIC(call_Jv, "call Jv");
12682 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12683 switch (pVCpu->iem.s.enmEffOpSize)
12684 {
12685 case IEMMODE_16BIT:
12686 {
12687 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12688 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
12689 iemCImpl_call_rel_16, (int16_t)u16Imm);
12690 }
12691
12692 case IEMMODE_32BIT:
12693 {
12694 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12695 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
12696 iemCImpl_call_rel_32, (int32_t)u32Imm);
12697 }
12698
12699 case IEMMODE_64BIT:
12700 {
12701 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12702 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
12703 iemCImpl_call_rel_64, u64Imm);
12704 }
12705
12706 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12707 }
12708}
12709
12710
12711/**
12712 * @opcode 0xe9
12713 */
12714FNIEMOP_DEF(iemOp_jmp_Jv)
12715{
12716 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
12717 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12718 switch (pVCpu->iem.s.enmEffOpSize)
12719 {
12720 case IEMMODE_16BIT:
12721 IEM_MC_BEGIN(0, 0, 0, 0);
12722 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
12723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12724 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
12725 IEM_MC_END();
12726 break;
12727
12728 case IEMMODE_64BIT:
12729 case IEMMODE_32BIT:
12730 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12731 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
12732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12733 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
12734 IEM_MC_END();
12735 break;
12736
12737 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12738 }
12739}
12740
12741
12742/**
12743 * @opcode 0xea
12744 */
12745FNIEMOP_DEF(iemOp_jmp_Ap)
12746{
12747 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
12748 IEMOP_HLP_NO_64BIT();
12749
12750 /* Decode the far pointer address and pass it on to the far call C implementation. */
12751 uint32_t off32Seg;
12752 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
12753 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
12754 else
12755 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
12756 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
12757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12758 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
12759 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, UINT64_MAX,
12760 iemCImpl_FarJmp, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
12761 /** @todo make task-switches, ring-switches, ++ return non-zero status */
12762}
12763
12764
12765/**
12766 * @opcode 0xeb
12767 */
12768FNIEMOP_DEF(iemOp_jmp_Jb)
12769{
12770 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
12771 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12772 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12773
12774 IEM_MC_BEGIN(0, 0, 0, 0);
12775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12776 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12777 IEM_MC_END();
12778}
12779
12780
12781/** Opcode 0xec */
12782FNIEMOP_DEF(iemOp_in_AL_DX)
12783{
12784 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
12785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12786 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12787 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
12788 iemCImpl_in_eAX_DX, 1, pVCpu->iem.s.enmEffAddrMode);
12789}
12790
12791
12792/** Opcode 0xed */
12793FNIEMOP_DEF(iemOp_in_eAX_DX)
12794{
12795 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
12796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12797 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12798 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
12799 iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12800 pVCpu->iem.s.enmEffAddrMode);
12801}
12802
12803
12804/** Opcode 0xee */
12805FNIEMOP_DEF(iemOp_out_DX_AL)
12806{
12807 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
12808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12809 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
12810 iemCImpl_out_DX_eAX, 1, pVCpu->iem.s.enmEffAddrMode);
12811}
12812
12813
12814/** Opcode 0xef */
12815FNIEMOP_DEF(iemOp_out_DX_eAX)
12816{
12817 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
12818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12819 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
12820 iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12821 pVCpu->iem.s.enmEffAddrMode);
12822}
12823
12824
12825/**
12826 * @opcode 0xf0
12827 */
12828FNIEMOP_DEF(iemOp_lock)
12829{
12830 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
12831 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
12832 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
12833
12834 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12835 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12836}
12837
12838
12839/**
12840 * @opcode 0xf1
12841 */
12842FNIEMOP_DEF(iemOp_int1)
12843{
12844 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
12845 /** @todo Does not generate \#UD on 286, or so they say... Was allegedly a
12846 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
12847 * LOADALL memo. Needs some testing. */
12848 IEMOP_HLP_MIN_386();
12849 /** @todo testcase! */
12850 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
12851 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
12852 iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
12853}
12854
12855
12856/**
12857 * @opcode 0xf2
12858 */
12859FNIEMOP_DEF(iemOp_repne)
12860{
12861 /* This overrides any previous REPE prefix. */
12862 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
12863 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
12864 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
12865
12866 /* For the 4 entry opcode tables, REPNZ overrides any previous
12867 REPZ and operand size prefixes. */
12868 pVCpu->iem.s.idxPrefix = 3;
12869
12870 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12871 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12872}
12873
12874
12875/**
12876 * @opcode 0xf3
12877 */
12878FNIEMOP_DEF(iemOp_repe)
12879{
12880 /* This overrides any previous REPNE prefix. */
12881 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
12882 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
12883 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
12884
12885 /* For the 4 entry opcode tables, REPNZ overrides any previous
12886 REPNZ and operand size prefixes. */
12887 pVCpu->iem.s.idxPrefix = 2;
12888
12889 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12890 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12891}
12892
12893
12894/**
12895 * @opcode 0xf4
12896 */
12897FNIEMOP_DEF(iemOp_hlt)
12898{
12899 IEMOP_MNEMONIC(hlt, "hlt");
12900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12901 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, 0, iemCImpl_hlt);
12902}
12903
12904
12905/**
12906 * @opcode 0xf5
12907 */
12908FNIEMOP_DEF(iemOp_cmc)
12909{
12910 IEMOP_MNEMONIC(cmc, "cmc");
12911 IEM_MC_BEGIN(0, 0, 0, 0);
12912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12913 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
12914 IEM_MC_ADVANCE_RIP_AND_FINISH();
12915 IEM_MC_END();
12916}
12917
12918
12919/**
12920 * Body for of 'inc/dec/not/neg Eb'.
12921 */
12922#define IEMOP_BODY_UNARY_Eb(a_bRm, a_fnNormalU8, a_fnLockedU8) \
12923 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
12924 { \
12925 /* register access */ \
12926 IEM_MC_BEGIN(2, 0, 0, 0); \
12927 IEMOP_HLP_DONE_DECODING(); \
12928 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12929 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12930 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
12931 IEM_MC_REF_EFLAGS(pEFlags); \
12932 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
12933 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12934 IEM_MC_END(); \
12935 } \
12936 else \
12937 { \
12938 /* memory access. */ \
12939 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
12940 { \
12941 IEM_MC_BEGIN(2, 2, 0, 0); \
12942 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12943 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12944 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12945 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12946 \
12947 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
12948 IEMOP_HLP_DONE_DECODING(); \
12949 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12950 IEM_MC_FETCH_EFLAGS(EFlags); \
12951 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
12952 \
12953 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
12954 IEM_MC_COMMIT_EFLAGS(EFlags); \
12955 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12956 IEM_MC_END(); \
12957 } \
12958 else \
12959 { \
12960 IEM_MC_BEGIN(2, 2, 0, 0); \
12961 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12962 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12963 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12964 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12965 \
12966 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
12967 IEMOP_HLP_DONE_DECODING(); \
12968 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12969 IEM_MC_FETCH_EFLAGS(EFlags); \
12970 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU8, pu8Dst, pEFlags); \
12971 \
12972 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu8Dst, bUnmapInfo); \
12973 IEM_MC_COMMIT_EFLAGS(EFlags); \
12974 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12975 IEM_MC_END(); \
12976 } \
12977 } \
12978 (void)0
12979
12980
12981/**
12982 * Body for 'inc/dec/not/neg Ev' (groups 3 and 5).
12983 */
12984#define IEMOP_BODY_UNARY_Ev(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
12985 if (IEM_IS_MODRM_REG_MODE(bRm)) \
12986 { \
12987 /* \
12988 * Register target \
12989 */ \
12990 switch (pVCpu->iem.s.enmEffOpSize) \
12991 { \
12992 case IEMMODE_16BIT: \
12993 IEM_MC_BEGIN(2, 0, 0, 0); \
12994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
12995 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
12996 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12997 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
12998 IEM_MC_REF_EFLAGS(pEFlags); \
12999 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
13000 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13001 IEM_MC_END(); \
13002 break; \
13003 \
13004 case IEMMODE_32BIT: \
13005 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0); \
13006 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13007 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13008 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13009 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13010 IEM_MC_REF_EFLAGS(pEFlags); \
13011 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
13012 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
13013 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13014 IEM_MC_END(); \
13015 break; \
13016 \
13017 case IEMMODE_64BIT: \
13018 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0); \
13019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13020 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13021 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13022 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13023 IEM_MC_REF_EFLAGS(pEFlags); \
13024 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
13025 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13026 IEM_MC_END(); \
13027 break; \
13028 \
13029 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13030 } \
13031 } \
13032 else \
13033 { \
13034 /* \
13035 * Memory target. \
13036 */ \
13037 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
13038 { \
13039 switch (pVCpu->iem.s.enmEffOpSize) \
13040 { \
13041 case IEMMODE_16BIT: \
13042 IEM_MC_BEGIN(2, 3, 0, 0); \
13043 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13044 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13045 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13046 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13047 \
13048 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13050 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13051 IEM_MC_FETCH_EFLAGS(EFlags); \
13052 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
13053 \
13054 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
13055 IEM_MC_COMMIT_EFLAGS(EFlags); \
13056 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13057 IEM_MC_END(); \
13058 break; \
13059 \
13060 case IEMMODE_32BIT: \
13061 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386, 0); \
13062 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13063 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13064 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13065 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13066 \
13067 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13069 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13070 IEM_MC_FETCH_EFLAGS(EFlags); \
13071 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
13072 \
13073 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
13074 IEM_MC_COMMIT_EFLAGS(EFlags); \
13075 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13076 IEM_MC_END(); \
13077 break; \
13078 \
13079 case IEMMODE_64BIT: \
13080 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT, 0); \
13081 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13082 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13083 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13084 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13085 \
13086 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13088 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13089 IEM_MC_FETCH_EFLAGS(EFlags); \
13090 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
13091 \
13092 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
13093 IEM_MC_COMMIT_EFLAGS(EFlags); \
13094 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13095 IEM_MC_END(); \
13096 break; \
13097 \
13098 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13099 } \
13100 } \
13101 else \
13102 { \
13103 (void)0
13104
13105#define IEMOP_BODY_UNARY_Ev_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
13106 switch (pVCpu->iem.s.enmEffOpSize) \
13107 { \
13108 case IEMMODE_16BIT: \
13109 IEM_MC_BEGIN(2, 3, 0, 0); \
13110 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13111 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13112 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13113 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13114 \
13115 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13116 IEMOP_HLP_DONE_DECODING(); \
13117 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13118 IEM_MC_FETCH_EFLAGS(EFlags); \
13119 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU16, pu16Dst, pEFlags); \
13120 \
13121 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu16Dst, bUnmapInfo); \
13122 IEM_MC_COMMIT_EFLAGS(EFlags); \
13123 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13124 IEM_MC_END(); \
13125 break; \
13126 \
13127 case IEMMODE_32BIT: \
13128 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386, 0); \
13129 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13130 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13131 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13132 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13133 \
13134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13135 IEMOP_HLP_DONE_DECODING(); \
13136 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13137 IEM_MC_FETCH_EFLAGS(EFlags); \
13138 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU32, pu32Dst, pEFlags); \
13139 \
13140 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu32Dst, bUnmapInfo); \
13141 IEM_MC_COMMIT_EFLAGS(EFlags); \
13142 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13143 IEM_MC_END(); \
13144 break; \
13145 \
13146 case IEMMODE_64BIT: \
13147 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT, 0); \
13148 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13149 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13150 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13151 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13152 \
13153 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13154 IEMOP_HLP_DONE_DECODING(); \
13155 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13156 IEM_MC_FETCH_EFLAGS(EFlags); \
13157 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU64, pu64Dst, pEFlags); \
13158 \
13159 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(pu64Dst, bUnmapInfo); \
13160 IEM_MC_COMMIT_EFLAGS(EFlags); \
13161 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13162 IEM_MC_END(); \
13163 break; \
13164 \
13165 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13166 } \
13167 } \
13168 } \
13169 (void)0
13170
13171
13172/**
13173 * @opmaps grp3_f6
13174 * @opcode /0
13175 * @todo also /1
13176 */
13177FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
13178{
13179 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
13180 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
13181
13182 if (IEM_IS_MODRM_REG_MODE(bRm))
13183 {
13184 /* register access */
13185 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13186 IEM_MC_BEGIN(3, 0, 0, 0);
13187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13188 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13189 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
13190 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13191 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13192 IEM_MC_REF_EFLAGS(pEFlags);
13193 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
13194 IEM_MC_ADVANCE_RIP_AND_FINISH();
13195 IEM_MC_END();
13196 }
13197 else
13198 {
13199 /* memory access. */
13200 IEM_MC_BEGIN(3, 3, 0, 0);
13201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13203
13204 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13206
13207 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13208 IEM_MC_ARG(uint8_t const *, pu8Dst, 0);
13209 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13210
13211 IEM_MC_ARG_CONST(uint8_t, u8Src, u8Imm, 1);
13212 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13213 IEM_MC_FETCH_EFLAGS(EFlags);
13214 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
13215
13216 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu8Dst, bUnmapInfo);
13217 IEM_MC_COMMIT_EFLAGS(EFlags);
13218 IEM_MC_ADVANCE_RIP_AND_FINISH();
13219 IEM_MC_END();
13220 }
13221}
13222
13223
13224/** Opcode 0xf6 /4, /5, /6 and /7. */
13225FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
13226{
13227 if (IEM_IS_MODRM_REG_MODE(bRm))
13228 {
13229 /* register access */
13230 IEM_MC_BEGIN(3, 1, 0, 0);
13231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13232 IEM_MC_ARG(uint16_t *, pu16AX, 0);
13233 IEM_MC_ARG(uint8_t, u8Value, 1);
13234 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13235 IEM_MC_LOCAL(int32_t, rc);
13236
13237 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
13238 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
13239 IEM_MC_REF_EFLAGS(pEFlags);
13240 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
13241 IEM_MC_IF_LOCAL_IS_Z(rc) {
13242 IEM_MC_ADVANCE_RIP_AND_FINISH();
13243 } IEM_MC_ELSE() {
13244 IEM_MC_RAISE_DIVIDE_ERROR();
13245 } IEM_MC_ENDIF();
13246
13247 IEM_MC_END();
13248 }
13249 else
13250 {
13251 /* memory access. */
13252 IEM_MC_BEGIN(3, 2, 0, 0);
13253 IEM_MC_ARG(uint16_t *, pu16AX, 0);
13254 IEM_MC_ARG(uint8_t, u8Value, 1);
13255 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13256 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13257 IEM_MC_LOCAL(int32_t, rc);
13258
13259 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13261 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13262 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
13263 IEM_MC_REF_EFLAGS(pEFlags);
13264 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
13265 IEM_MC_IF_LOCAL_IS_Z(rc) {
13266 IEM_MC_ADVANCE_RIP_AND_FINISH();
13267 } IEM_MC_ELSE() {
13268 IEM_MC_RAISE_DIVIDE_ERROR();
13269 } IEM_MC_ENDIF();
13270
13271 IEM_MC_END();
13272 }
13273}
13274
13275
13276/** Opcode 0xf7 /4, /5, /6 and /7. */
13277FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
13278{
13279 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13280
13281 if (IEM_IS_MODRM_REG_MODE(bRm))
13282 {
13283 /* register access */
13284 switch (pVCpu->iem.s.enmEffOpSize)
13285 {
13286 case IEMMODE_16BIT:
13287 IEM_MC_BEGIN(4, 1, 0, 0);
13288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13289 IEM_MC_ARG(uint16_t *, pu16AX, 0);
13290 IEM_MC_ARG(uint16_t *, pu16DX, 1);
13291 IEM_MC_ARG(uint16_t, u16Value, 2);
13292 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13293 IEM_MC_LOCAL(int32_t, rc);
13294
13295 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
13296 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
13297 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
13298 IEM_MC_REF_EFLAGS(pEFlags);
13299 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
13300 IEM_MC_IF_LOCAL_IS_Z(rc) {
13301 IEM_MC_ADVANCE_RIP_AND_FINISH();
13302 } IEM_MC_ELSE() {
13303 IEM_MC_RAISE_DIVIDE_ERROR();
13304 } IEM_MC_ENDIF();
13305
13306 IEM_MC_END();
13307 break;
13308
13309 case IEMMODE_32BIT:
13310 IEM_MC_BEGIN(4, 1, IEM_MC_F_MIN_386, 0);
13311 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13312 IEM_MC_ARG(uint32_t *, pu32AX, 0);
13313 IEM_MC_ARG(uint32_t *, pu32DX, 1);
13314 IEM_MC_ARG(uint32_t, u32Value, 2);
13315 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13316 IEM_MC_LOCAL(int32_t, rc);
13317
13318 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
13319 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
13320 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
13321 IEM_MC_REF_EFLAGS(pEFlags);
13322 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
13323 IEM_MC_IF_LOCAL_IS_Z(rc) {
13324 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX);
13325 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX);
13326 IEM_MC_ADVANCE_RIP_AND_FINISH();
13327 } IEM_MC_ELSE() {
13328 IEM_MC_RAISE_DIVIDE_ERROR();
13329 } IEM_MC_ENDIF();
13330
13331 IEM_MC_END();
13332 break;
13333
13334 case IEMMODE_64BIT:
13335 IEM_MC_BEGIN(4, 1, IEM_MC_F_64BIT, 0);
13336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13337 IEM_MC_ARG(uint64_t *, pu64AX, 0);
13338 IEM_MC_ARG(uint64_t *, pu64DX, 1);
13339 IEM_MC_ARG(uint64_t, u64Value, 2);
13340 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13341 IEM_MC_LOCAL(int32_t, rc);
13342
13343 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
13344 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
13345 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
13346 IEM_MC_REF_EFLAGS(pEFlags);
13347 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
13348 IEM_MC_IF_LOCAL_IS_Z(rc) {
13349 IEM_MC_ADVANCE_RIP_AND_FINISH();
13350 } IEM_MC_ELSE() {
13351 IEM_MC_RAISE_DIVIDE_ERROR();
13352 } IEM_MC_ENDIF();
13353
13354 IEM_MC_END();
13355 break;
13356
13357 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13358 }
13359 }
13360 else
13361 {
13362 /* memory access. */
13363 switch (pVCpu->iem.s.enmEffOpSize)
13364 {
13365 case IEMMODE_16BIT:
13366 IEM_MC_BEGIN(4, 2, 0, 0);
13367 IEM_MC_ARG(uint16_t *, pu16AX, 0);
13368 IEM_MC_ARG(uint16_t *, pu16DX, 1);
13369 IEM_MC_ARG(uint16_t, u16Value, 2);
13370 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13371 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13372 IEM_MC_LOCAL(int32_t, rc);
13373
13374 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13376 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13377 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
13378 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
13379 IEM_MC_REF_EFLAGS(pEFlags);
13380 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
13381 IEM_MC_IF_LOCAL_IS_Z(rc) {
13382 IEM_MC_ADVANCE_RIP_AND_FINISH();
13383 } IEM_MC_ELSE() {
13384 IEM_MC_RAISE_DIVIDE_ERROR();
13385 } IEM_MC_ENDIF();
13386
13387 IEM_MC_END();
13388 break;
13389
13390 case IEMMODE_32BIT:
13391 IEM_MC_BEGIN(4, 2, IEM_MC_F_MIN_386, 0);
13392 IEM_MC_ARG(uint32_t *, pu32AX, 0);
13393 IEM_MC_ARG(uint32_t *, pu32DX, 1);
13394 IEM_MC_ARG(uint32_t, u32Value, 2);
13395 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13397 IEM_MC_LOCAL(int32_t, rc);
13398
13399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13401 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13402 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
13403 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
13404 IEM_MC_REF_EFLAGS(pEFlags);
13405 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
13406 IEM_MC_IF_LOCAL_IS_Z(rc) {
13407 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX);
13408 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX);
13409 IEM_MC_ADVANCE_RIP_AND_FINISH();
13410 } IEM_MC_ELSE() {
13411 IEM_MC_RAISE_DIVIDE_ERROR();
13412 } IEM_MC_ENDIF();
13413
13414 IEM_MC_END();
13415 break;
13416
13417 case IEMMODE_64BIT:
13418 IEM_MC_BEGIN(4, 2, IEM_MC_F_64BIT, 0);
13419 IEM_MC_ARG(uint64_t *, pu64AX, 0);
13420 IEM_MC_ARG(uint64_t *, pu64DX, 1);
13421 IEM_MC_ARG(uint64_t, u64Value, 2);
13422 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13423 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13424 IEM_MC_LOCAL(int32_t, rc);
13425
13426 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13428 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13429 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
13430 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
13431 IEM_MC_REF_EFLAGS(pEFlags);
13432 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
13433 IEM_MC_IF_LOCAL_IS_Z(rc) {
13434 IEM_MC_ADVANCE_RIP_AND_FINISH();
13435 } IEM_MC_ELSE() {
13436 IEM_MC_RAISE_DIVIDE_ERROR();
13437 } IEM_MC_ENDIF();
13438
13439 IEM_MC_END();
13440 break;
13441
13442 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13443 }
13444 }
13445}
13446
13447
13448/**
13449 * @opmaps grp3_f6
13450 * @opcode /2
13451 */
13452FNIEMOP_DEF_1(iemOp_grp3_not_Eb, uint8_t, bRm)
13453{
13454 IEMOP_MNEMONIC(not_Eb, "not Eb");
13455 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_not_u8, iemAImpl_not_u8_locked);
13456}
13457
13458
13459/**
13460 * @opmaps grp3_f6
13461 * @opcode /3
13462 */
13463FNIEMOP_DEF_1(iemOp_grp3_neg_Eb, uint8_t, bRm)
13464{
13465 IEMOP_MNEMONIC(net_Eb, "neg Eb");
13466 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_neg_u8, iemAImpl_neg_u8_locked);
13467}
13468
13469
13470/**
13471 * @opcode 0xf6
13472 */
13473FNIEMOP_DEF(iemOp_Grp3_Eb)
13474{
13475 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13476 switch (IEM_GET_MODRM_REG_8(bRm))
13477 {
13478 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
13479 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
13480 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Eb, bRm);
13481 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Eb, bRm);
13482 case 4:
13483 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
13484 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13485 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
13486 case 5:
13487 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
13488 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13489 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
13490 case 6:
13491 IEMOP_MNEMONIC(div_Eb, "div Eb");
13492 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13493 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
13494 case 7:
13495 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
13496 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13497 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
13498 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13499 }
13500}
13501
13502
13503/** Opcode 0xf7 /0. */
13504FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
13505{
13506 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
13507 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
13508
13509 if (IEM_IS_MODRM_REG_MODE(bRm))
13510 {
13511 /* register access */
13512 switch (pVCpu->iem.s.enmEffOpSize)
13513 {
13514 case IEMMODE_16BIT:
13515 IEM_MC_BEGIN(3, 0, 0, 0);
13516 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13518 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13519 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
13520 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13521 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13522 IEM_MC_REF_EFLAGS(pEFlags);
13523 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
13524 IEM_MC_ADVANCE_RIP_AND_FINISH();
13525 IEM_MC_END();
13526 break;
13527
13528 case IEMMODE_32BIT:
13529 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
13530 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13532 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13533 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
13534 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13535 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13536 IEM_MC_REF_EFLAGS(pEFlags);
13537 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
13538 /* No clearing the high dword here - test doesn't write back the result. */
13539 IEM_MC_ADVANCE_RIP_AND_FINISH();
13540 IEM_MC_END();
13541 break;
13542
13543 case IEMMODE_64BIT:
13544 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
13545 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13547 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13548 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
13549 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13550 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13551 IEM_MC_REF_EFLAGS(pEFlags);
13552 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
13553 IEM_MC_ADVANCE_RIP_AND_FINISH();
13554 IEM_MC_END();
13555 break;
13556
13557 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13558 }
13559 }
13560 else
13561 {
13562 /* memory access. */
13563 switch (pVCpu->iem.s.enmEffOpSize)
13564 {
13565 case IEMMODE_16BIT:
13566 IEM_MC_BEGIN(3, 3, 0, 0);
13567 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13568 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
13569
13570 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13572
13573 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13574 IEM_MC_ARG(uint16_t const *, pu16Dst, 0);
13575 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13576
13577 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
13578 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13579 IEM_MC_FETCH_EFLAGS(EFlags);
13580 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
13581
13582 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu16Dst, bUnmapInfo);
13583 IEM_MC_COMMIT_EFLAGS(EFlags);
13584 IEM_MC_ADVANCE_RIP_AND_FINISH();
13585 IEM_MC_END();
13586 break;
13587
13588 case IEMMODE_32BIT:
13589 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
13590 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13591 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13592
13593 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13595
13596 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13597 IEM_MC_ARG(uint32_t const *, pu32Dst, 0);
13598 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13599
13600 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
13601 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13602 IEM_MC_FETCH_EFLAGS(EFlags);
13603 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
13604
13605 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu32Dst, bUnmapInfo);
13606 IEM_MC_COMMIT_EFLAGS(EFlags);
13607 IEM_MC_ADVANCE_RIP_AND_FINISH();
13608 IEM_MC_END();
13609 break;
13610
13611 case IEMMODE_64BIT:
13612 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
13613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13615
13616 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13618
13619 IEM_MC_ARG(uint64_t const *, pu64Dst, 0);
13620 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13621 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13622
13623 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1);
13624 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13625 IEM_MC_FETCH_EFLAGS(EFlags);
13626 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
13627
13628 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(pu64Dst, bUnmapInfo);
13629 IEM_MC_COMMIT_EFLAGS(EFlags);
13630 IEM_MC_ADVANCE_RIP_AND_FINISH();
13631 IEM_MC_END();
13632 break;
13633
13634 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13635 }
13636 }
13637}
13638
13639
13640/** Opcode 0xf7 /2. */
13641FNIEMOP_DEF_1(iemOp_grp3_not_Ev, uint8_t, bRm)
13642{
13643 IEMOP_MNEMONIC(not_Ev, "not Ev");
13644 IEMOP_BODY_UNARY_Ev( iemAImpl_not_u16, iemAImpl_not_u32, iemAImpl_not_u64);
13645 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_not_u16_locked, iemAImpl_not_u32_locked, iemAImpl_not_u64_locked);
13646}
13647
13648
13649/** Opcode 0xf7 /3. */
13650FNIEMOP_DEF_1(iemOp_grp3_neg_Ev, uint8_t, bRm)
13651{
13652 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
13653 IEMOP_BODY_UNARY_Ev( iemAImpl_neg_u16, iemAImpl_neg_u32, iemAImpl_neg_u64);
13654 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_neg_u16_locked, iemAImpl_neg_u32_locked, iemAImpl_neg_u64_locked);
13655}
13656
13657
13658/**
13659 * @opcode 0xf7
13660 */
13661FNIEMOP_DEF(iemOp_Grp3_Ev)
13662{
13663 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13664 switch (IEM_GET_MODRM_REG_8(bRm))
13665 {
13666 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
13667 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
13668 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Ev, bRm);
13669 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Ev, bRm);
13670 case 4:
13671 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
13672 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13673 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
13674 case 5:
13675 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
13676 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13677 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
13678 case 6:
13679 IEMOP_MNEMONIC(div_Ev, "div Ev");
13680 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13681 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
13682 case 7:
13683 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
13684 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13685 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
13686 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13687 }
13688}
13689
13690
13691/**
13692 * @opcode 0xf8
13693 */
13694FNIEMOP_DEF(iemOp_clc)
13695{
13696 IEMOP_MNEMONIC(clc, "clc");
13697 IEM_MC_BEGIN(0, 0, 0, 0);
13698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13699 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
13700 IEM_MC_ADVANCE_RIP_AND_FINISH();
13701 IEM_MC_END();
13702}
13703
13704
13705/**
13706 * @opcode 0xf9
13707 */
13708FNIEMOP_DEF(iemOp_stc)
13709{
13710 IEMOP_MNEMONIC(stc, "stc");
13711 IEM_MC_BEGIN(0, 0, 0, 0);
13712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13713 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
13714 IEM_MC_ADVANCE_RIP_AND_FINISH();
13715 IEM_MC_END();
13716}
13717
13718
13719/**
13720 * @opcode 0xfa
13721 */
13722FNIEMOP_DEF(iemOp_cli)
13723{
13724 IEMOP_MNEMONIC(cli, "cli");
13725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13726 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_CHECK_IRQ_BEFORE, 0, iemCImpl_cli);
13727}
13728
13729
13730FNIEMOP_DEF(iemOp_sti)
13731{
13732 IEMOP_MNEMONIC(sti, "sti");
13733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13734 IEM_MC_DEFER_TO_CIMPL_0_RET( IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_AFTER
13735 | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_INHIBIT_SHADOW, 0, iemCImpl_sti);
13736}
13737
13738
13739/**
13740 * @opcode 0xfc
13741 */
13742FNIEMOP_DEF(iemOp_cld)
13743{
13744 IEMOP_MNEMONIC(cld, "cld");
13745 IEM_MC_BEGIN(0, 0, 0, 0);
13746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13747 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
13748 IEM_MC_ADVANCE_RIP_AND_FINISH();
13749 IEM_MC_END();
13750}
13751
13752
13753/**
13754 * @opcode 0xfd
13755 */
13756FNIEMOP_DEF(iemOp_std)
13757{
13758 IEMOP_MNEMONIC(std, "std");
13759 IEM_MC_BEGIN(0, 0, 0, 0);
13760 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13761 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
13762 IEM_MC_ADVANCE_RIP_AND_FINISH();
13763 IEM_MC_END();
13764}
13765
13766
13767/**
13768 * @opmaps grp4
13769 * @opcode /0
13770 */
13771FNIEMOP_DEF_1(iemOp_Grp4_inc_Eb, uint8_t, bRm)
13772{
13773 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
13774 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_inc_u8, iemAImpl_inc_u8_locked);
13775}
13776
13777
13778/**
13779 * @opmaps grp4
13780 * @opcode /1
13781 */
13782FNIEMOP_DEF_1(iemOp_Grp4_dec_Eb, uint8_t, bRm)
13783{
13784 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
13785 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_dec_u8, iemAImpl_dec_u8_locked);
13786}
13787
13788
13789/**
13790 * @opcode 0xfe
13791 */
13792FNIEMOP_DEF(iemOp_Grp4)
13793{
13794 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13795 switch (IEM_GET_MODRM_REG_8(bRm))
13796 {
13797 case 0: return FNIEMOP_CALL_1(iemOp_Grp4_inc_Eb, bRm);
13798 case 1: return FNIEMOP_CALL_1(iemOp_Grp4_dec_Eb, bRm);
13799 default:
13800 /** @todo is the eff-addr decoded? */
13801 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
13802 IEMOP_RAISE_INVALID_OPCODE_RET();
13803 }
13804}
13805
13806/** Opcode 0xff /0. */
13807FNIEMOP_DEF_1(iemOp_Grp5_inc_Ev, uint8_t, bRm)
13808{
13809 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
13810 IEMOP_BODY_UNARY_Ev( iemAImpl_inc_u16, iemAImpl_inc_u32, iemAImpl_inc_u64);
13811 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_inc_u16_locked, iemAImpl_inc_u32_locked, iemAImpl_inc_u64_locked);
13812}
13813
13814
13815/** Opcode 0xff /1. */
13816FNIEMOP_DEF_1(iemOp_Grp5_dec_Ev, uint8_t, bRm)
13817{
13818 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
13819 IEMOP_BODY_UNARY_Ev( iemAImpl_dec_u16, iemAImpl_dec_u32, iemAImpl_dec_u64);
13820 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_dec_u16_locked, iemAImpl_dec_u32_locked, iemAImpl_dec_u64_locked);
13821}
13822
13823
13824/**
13825 * Opcode 0xff /2.
13826 * @param bRm The RM byte.
13827 */
13828FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
13829{
13830 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
13831 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13832
13833 if (IEM_IS_MODRM_REG_MODE(bRm))
13834 {
13835 /* The new RIP is taken from a register. */
13836 switch (pVCpu->iem.s.enmEffOpSize)
13837 {
13838 case IEMMODE_16BIT:
13839 IEM_MC_BEGIN(1, 0, 0, 0);
13840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13841 IEM_MC_ARG(uint16_t, u16Target, 0);
13842 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13843 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_16, u16Target);
13844 IEM_MC_END();
13845 break;
13846
13847 case IEMMODE_32BIT:
13848 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_386, 0);
13849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13850 IEM_MC_ARG(uint32_t, u32Target, 0);
13851 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13852 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_32, u32Target);
13853 IEM_MC_END();
13854 break;
13855
13856 case IEMMODE_64BIT:
13857 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT, 0);
13858 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13859 IEM_MC_ARG(uint64_t, u64Target, 0);
13860 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13861 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_64, u64Target);
13862 IEM_MC_END();
13863 break;
13864
13865 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13866 }
13867 }
13868 else
13869 {
13870 /* The new RIP is taken from a register. */
13871 switch (pVCpu->iem.s.enmEffOpSize)
13872 {
13873 case IEMMODE_16BIT:
13874 IEM_MC_BEGIN(1, 1, 0, 0);
13875 IEM_MC_ARG(uint16_t, u16Target, 0);
13876 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13877 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13879 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13880 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_16, u16Target);
13881 IEM_MC_END();
13882 break;
13883
13884 case IEMMODE_32BIT:
13885 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_386, 0);
13886 IEM_MC_ARG(uint32_t, u32Target, 0);
13887 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13888 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13890 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13891 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_32, u32Target);
13892 IEM_MC_END();
13893 break;
13894
13895 case IEMMODE_64BIT:
13896 IEM_MC_BEGIN(1, 1, IEM_MC_F_64BIT, 0);
13897 IEM_MC_ARG(uint64_t, u64Target, 0);
13898 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13899 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13901 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13902 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_64, u64Target);
13903 IEM_MC_END();
13904 break;
13905
13906 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13907 }
13908 }
13909}
13910
13911#define IEMOP_BODY_GRP5_FAR_EP(a_bRm, a_fnCImpl, a_fCImplExtra) \
13912 /* Registers? How?? */ \
13913 if (RT_LIKELY(IEM_IS_MODRM_MEM_MODE(a_bRm))) \
13914 { /* likely */ } \
13915 else \
13916 IEMOP_RAISE_INVALID_OPCODE_RET(); /* callf eax is not legal */ \
13917 \
13918 /* 64-bit mode: Default is 32-bit, but only intel respects a REX.W prefix. */ \
13919 /** @todo what does VIA do? */ \
13920 if (!IEM_IS_64BIT_CODE(pVCpu) || pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT || IEM_IS_GUEST_CPU_INTEL(pVCpu)) \
13921 { /* likely */ } \
13922 else \
13923 pVCpu->iem.s.enmEffOpSize = IEMMODE_32BIT; \
13924 \
13925 /* Far pointer loaded from memory. */ \
13926 switch (pVCpu->iem.s.enmEffOpSize) \
13927 { \
13928 case IEMMODE_16BIT: \
13929 IEM_MC_BEGIN(3, 1, 0, 0); \
13930 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13931 IEM_MC_ARG(uint16_t, offSeg, 1); \
13932 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2); \
13933 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13934 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13936 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13937 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2); \
13938 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
13939 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 0, \
13940 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13941 IEM_MC_END(); \
13942 break; \
13943 \
13944 case IEMMODE_32BIT: \
13945 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0); \
13946 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13947 IEM_MC_ARG(uint32_t, offSeg, 1); \
13948 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2); \
13949 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13950 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13952 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13953 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4); \
13954 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
13955 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 0, \
13956 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13957 IEM_MC_END(); \
13958 break; \
13959 \
13960 case IEMMODE_64BIT: \
13961 Assert(!IEM_IS_GUEST_CPU_AMD(pVCpu)); \
13962 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0); \
13963 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13964 IEM_MC_ARG(uint64_t, offSeg, 1); \
13965 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_64BIT, 2); \
13966 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13967 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13969 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13970 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8); \
13971 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
13972 | IEM_CIMPL_F_MODE /* no gates */, 0, \
13973 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13974 IEM_MC_END(); \
13975 break; \
13976 \
13977 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13978 } do {} while (0)
13979
13980
13981/**
13982 * Opcode 0xff /3.
13983 * @param bRm The RM byte.
13984 */
13985FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
13986{
13987 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
13988 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_callf, IEM_CIMPL_F_BRANCH_STACK);
13989}
13990
13991
13992/**
13993 * Opcode 0xff /4.
13994 * @param bRm The RM byte.
13995 */
13996FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
13997{
13998 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
13999 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
14000
14001 if (IEM_IS_MODRM_REG_MODE(bRm))
14002 {
14003 /* The new RIP is taken from a register. */
14004 switch (pVCpu->iem.s.enmEffOpSize)
14005 {
14006 case IEMMODE_16BIT:
14007 IEM_MC_BEGIN(0, 1, 0, 0);
14008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14009 IEM_MC_LOCAL(uint16_t, u16Target);
14010 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14011 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
14012 IEM_MC_END();
14013 break;
14014
14015 case IEMMODE_32BIT:
14016 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
14017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14018 IEM_MC_LOCAL(uint32_t, u32Target);
14019 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14020 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
14021 IEM_MC_END();
14022 break;
14023
14024 case IEMMODE_64BIT:
14025 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
14026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14027 IEM_MC_LOCAL(uint64_t, u64Target);
14028 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14029 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
14030 IEM_MC_END();
14031 break;
14032
14033 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14034 }
14035 }
14036 else
14037 {
14038 /* The new RIP is taken from a memory location. */
14039 switch (pVCpu->iem.s.enmEffOpSize)
14040 {
14041 case IEMMODE_16BIT:
14042 IEM_MC_BEGIN(0, 2, 0, 0);
14043 IEM_MC_LOCAL(uint16_t, u16Target);
14044 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14045 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14047 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14048 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
14049 IEM_MC_END();
14050 break;
14051
14052 case IEMMODE_32BIT:
14053 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
14054 IEM_MC_LOCAL(uint32_t, u32Target);
14055 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14056 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14058 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14059 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
14060 IEM_MC_END();
14061 break;
14062
14063 case IEMMODE_64BIT:
14064 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
14065 IEM_MC_LOCAL(uint64_t, u64Target);
14066 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14067 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14069 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14070 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
14071 IEM_MC_END();
14072 break;
14073
14074 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14075 }
14076 }
14077}
14078
14079
14080/**
14081 * Opcode 0xff /5.
14082 * @param bRm The RM byte.
14083 */
14084FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
14085{
14086 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
14087 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_FarJmp, 0);
14088}
14089
14090
14091/**
14092 * Opcode 0xff /6.
14093 * @param bRm The RM byte.
14094 */
14095FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
14096{
14097 IEMOP_MNEMONIC(push_Ev, "push Ev");
14098
14099 /* Registers are handled by a common worker. */
14100 if (IEM_IS_MODRM_REG_MODE(bRm))
14101 return FNIEMOP_CALL_1(iemOpCommonPushGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
14102
14103 /* Memory we do here. */
14104 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
14105 switch (pVCpu->iem.s.enmEffOpSize)
14106 {
14107 case IEMMODE_16BIT:
14108 IEM_MC_BEGIN(0, 2, 0, 0);
14109 IEM_MC_LOCAL(uint16_t, u16Src);
14110 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14111 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14113 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14114 IEM_MC_PUSH_U16(u16Src);
14115 IEM_MC_ADVANCE_RIP_AND_FINISH();
14116 IEM_MC_END();
14117 break;
14118
14119 case IEMMODE_32BIT:
14120 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
14121 IEM_MC_LOCAL(uint32_t, u32Src);
14122 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14123 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14125 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14126 IEM_MC_PUSH_U32(u32Src);
14127 IEM_MC_ADVANCE_RIP_AND_FINISH();
14128 IEM_MC_END();
14129 break;
14130
14131 case IEMMODE_64BIT:
14132 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
14133 IEM_MC_LOCAL(uint64_t, u64Src);
14134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14137 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14138 IEM_MC_PUSH_U64(u64Src);
14139 IEM_MC_ADVANCE_RIP_AND_FINISH();
14140 IEM_MC_END();
14141 break;
14142
14143 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14144 }
14145}
14146
14147
14148/**
14149 * @opcode 0xff
14150 */
14151FNIEMOP_DEF(iemOp_Grp5)
14152{
14153 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14154 switch (IEM_GET_MODRM_REG_8(bRm))
14155 {
14156 case 0:
14157 return FNIEMOP_CALL_1(iemOp_Grp5_inc_Ev, bRm);
14158 case 1:
14159 return FNIEMOP_CALL_1(iemOp_Grp5_dec_Ev, bRm);
14160 case 2:
14161 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
14162 case 3:
14163 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
14164 case 4:
14165 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
14166 case 5:
14167 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
14168 case 6:
14169 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
14170 case 7:
14171 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
14172 IEMOP_RAISE_INVALID_OPCODE_RET();
14173 }
14174 AssertFailedReturn(VERR_IEM_IPE_3);
14175}
14176
14177
14178
14179const PFNIEMOP g_apfnOneByteMap[256] =
14180{
14181 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
14182 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
14183 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
14184 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
14185 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
14186 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
14187 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
14188 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
14189 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
14190 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
14191 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
14192 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
14193 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
14194 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
14195 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
14196 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
14197 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
14198 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
14199 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
14200 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
14201 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
14202 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
14203 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
14204 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
14205 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
14206 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
14207 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
14208 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
14209 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
14210 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
14211 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
14212 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
14213 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
14214 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
14215 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
14216 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
14217 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
14218 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
14219 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
14220 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
14221 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
14222 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
14223 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
14224 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
14225 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
14226 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
14227 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
14228 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
14229 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
14230 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
14231 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
14232 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
14233 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
14234 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
14235 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
14236 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
14237 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
14238 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
14239 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
14240 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
14241 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
14242 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
14243 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
14244 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
14245};
14246
14247
14248/** @} */
14249
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette