VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstOneByte.cpp.h@ 102572

Last change on this file since 102572 was 102572, checked in by vboxsync, 12 months ago

VMM/IEM: IEM_MC_POP_Uxx -> IEM_MC_POP_GREG_Uxx, popping by register number instead of reference (pointer). bugref:10371

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 532.0 KB
Line 
1/* $Id: IEMAllInstOneByte.cpp.h 102572 2023-12-11 15:20:48Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*******************************************************************************
30* Global Variables *
31*******************************************************************************/
32extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
33
34/* Instruction group definitions: */
35
36/** @defgroup og_gen General
37 * @{ */
38 /** @defgroup og_gen_arith Arithmetic
39 * @{ */
40 /** @defgroup og_gen_arith_bin Binary numbers */
41 /** @defgroup og_gen_arith_dec Decimal numbers */
42 /** @} */
43/** @} */
44
45/** @defgroup og_stack Stack
46 * @{ */
47 /** @defgroup og_stack_sreg Segment registers */
48/** @} */
49
50/** @defgroup og_prefix Prefixes */
51/** @defgroup og_escapes Escape bytes */
52
53
54
55/** @name One byte opcodes.
56 * @{
57 */
58
59/**
60 * Body for instructions like ADD, AND, OR, TEST, CMP, ++ with a byte
61 * memory/register as the destination.
62 *
63 * Used with IEMOP_BODY_BINARY_rm_r8_NO_LOCK or IEMOP_BODY_BINARY_rm_r8_LOCKED.
64 */
65#define IEMOP_BODY_BINARY_rm_r8_RW(a_fnNormalU8) \
66 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
67 \
68 /* \
69 * If rm is denoting a register, no more instruction bytes. \
70 */ \
71 if (IEM_IS_MODRM_REG_MODE(bRm)) \
72 { \
73 IEM_MC_BEGIN(3, 0, 0, 0); \
74 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
75 IEM_MC_ARG(uint8_t, u8Src, 1); \
76 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
77 \
78 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
79 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
80 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
81 IEM_MC_REF_EFLAGS(pEFlags); \
82 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
83 \
84 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
85 IEM_MC_END(); \
86 } \
87 else \
88 { \
89 /* \
90 * We're accessing memory. \
91 * Note! We're putting the eflags on the stack here so we can commit them \
92 * after the memory. \
93 */ \
94 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
95 { \
96 IEM_MC_BEGIN(3, 3, 0, 0); \
97 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
98 IEM_MC_ARG(uint8_t, u8Src, 1); \
99 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
101 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
102 \
103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
104 IEMOP_HLP_DONE_DECODING(); \
105 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
106 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
107 IEM_MC_FETCH_EFLAGS(EFlags); \
108 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
109 \
110 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
111 IEM_MC_COMMIT_EFLAGS(EFlags); \
112 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
113 IEM_MC_END(); \
114 } \
115 else \
116 { \
117 (void)0
118
119/**
120 * Body for instructions like TEST & CMP, ++ with a byte memory/registers as
121 * operands.
122 *
123 * Used with IEMOP_BODY_BINARY_rm_r8_NO_LOCK or IEMOP_BODY_BINARY_rm_r8_LOCKED.
124 */
125#define IEMOP_BODY_BINARY_rm_r8_RO(a_fnNormalU8) \
126 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
127 \
128 /* \
129 * If rm is denoting a register, no more instruction bytes. \
130 */ \
131 if (IEM_IS_MODRM_REG_MODE(bRm)) \
132 { \
133 IEM_MC_BEGIN(3, 0, 0, 0); \
134 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
135 IEM_MC_ARG(uint8_t, u8Src, 1); \
136 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
137 \
138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
139 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
140 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
141 IEM_MC_REF_EFLAGS(pEFlags); \
142 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
143 \
144 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
145 IEM_MC_END(); \
146 } \
147 else \
148 { \
149 /* \
150 * We're accessing memory. \
151 * Note! We're putting the eflags on the stack here so we can commit them \
152 * after the memory. \
153 */ \
154 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
155 { \
156 IEM_MC_BEGIN(3, 3, 0, 0); \
157 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
158 IEM_MC_ARG(uint8_t, u8Src, 1); \
159 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
160 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
161 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
162 \
163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
164 IEMOP_HLP_DONE_DECODING(); \
165 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
166 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
167 IEM_MC_FETCH_EFLAGS(EFlags); \
168 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
169 \
170 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
171 IEM_MC_COMMIT_EFLAGS(EFlags); \
172 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
173 IEM_MC_END(); \
174 } \
175 else \
176 { \
177 (void)0
178
179#define IEMOP_BODY_BINARY_rm_r8_NO_LOCK() \
180 IEMOP_HLP_DONE_DECODING(); \
181 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
182 } \
183 } \
184 (void)0
185
186#define IEMOP_BODY_BINARY_rm_r8_LOCKED(a_fnLockedU8) \
187 IEM_MC_BEGIN(3, 3, 0, 0); \
188 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
189 IEM_MC_ARG(uint8_t, u8Src, 1); \
190 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
192 IEM_MC_LOCAL(uint8_t, bMapInfoDst); \
193 \
194 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
195 IEMOP_HLP_DONE_DECODING(); \
196 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bMapInfoDst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
197 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
198 IEM_MC_FETCH_EFLAGS(EFlags); \
199 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
200 \
201 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bMapInfoDst); \
202 IEM_MC_COMMIT_EFLAGS(EFlags); \
203 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
204 IEM_MC_END(); \
205 } \
206 } \
207 (void)0
208
209/**
210 * Body for byte instructions like ADD, AND, OR, ++ with a register as the
211 * destination.
212 */
213#define IEMOP_BODY_BINARY_r8_rm(a_fnNormalU8) \
214 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
215 \
216 /* \
217 * If rm is denoting a register, no more instruction bytes. \
218 */ \
219 if (IEM_IS_MODRM_REG_MODE(bRm)) \
220 { \
221 IEM_MC_BEGIN(3, 0, 0, 0); \
222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
223 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
224 IEM_MC_ARG(uint8_t, u8Src, 1); \
225 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
226 \
227 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
228 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
229 IEM_MC_REF_EFLAGS(pEFlags); \
230 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
231 \
232 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
233 IEM_MC_END(); \
234 } \
235 else \
236 { \
237 /* \
238 * We're accessing memory. \
239 */ \
240 IEM_MC_BEGIN(3, 1, 0, 0); \
241 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
242 IEM_MC_ARG(uint8_t, u8Src, 1); \
243 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
244 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
245 \
246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
248 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
249 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
250 IEM_MC_REF_EFLAGS(pEFlags); \
251 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
252 \
253 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
254 IEM_MC_END(); \
255 } \
256 (void)0
257
258
259/**
260 * Body for word/dword/qword instructions like ADD, AND, OR, ++ with
261 * memory/register as the destination.
262 */
263#define IEMOP_BODY_BINARY_rm_rv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
264 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
265 \
266 /* \
267 * If rm is denoting a register, no more instruction bytes. \
268 */ \
269 if (IEM_IS_MODRM_REG_MODE(bRm)) \
270 { \
271 switch (pVCpu->iem.s.enmEffOpSize) \
272 { \
273 case IEMMODE_16BIT: \
274 IEM_MC_BEGIN(3, 0, 0, 0); \
275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
276 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
277 IEM_MC_ARG(uint16_t, u16Src, 1); \
278 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
279 \
280 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
281 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
282 IEM_MC_REF_EFLAGS(pEFlags); \
283 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
284 \
285 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
286 IEM_MC_END(); \
287 break; \
288 \
289 case IEMMODE_32BIT: \
290 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
292 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
293 IEM_MC_ARG(uint32_t, u32Src, 1); \
294 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
295 \
296 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
297 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
298 IEM_MC_REF_EFLAGS(pEFlags); \
299 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
300 \
301 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
302 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
303 IEM_MC_END(); \
304 break; \
305 \
306 case IEMMODE_64BIT: \
307 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
309 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
310 IEM_MC_ARG(uint64_t, u64Src, 1); \
311 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
312 \
313 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
314 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
315 IEM_MC_REF_EFLAGS(pEFlags); \
316 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
317 \
318 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
319 IEM_MC_END(); \
320 break; \
321 \
322 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
323 } \
324 } \
325 else \
326 { \
327 /* \
328 * We're accessing memory. \
329 * Note! We're putting the eflags on the stack here so we can commit them \
330 * after the memory. \
331 */ \
332 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
333 { \
334 switch (pVCpu->iem.s.enmEffOpSize) \
335 { \
336 case IEMMODE_16BIT: \
337 IEM_MC_BEGIN(3, 3, 0, 0); \
338 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
339 IEM_MC_ARG(uint16_t, u16Src, 1); \
340 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
341 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
342 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
343 \
344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
345 IEMOP_HLP_DONE_DECODING(); \
346 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
347 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
348 IEM_MC_FETCH_EFLAGS(EFlags); \
349 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
350 \
351 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
352 IEM_MC_COMMIT_EFLAGS(EFlags); \
353 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
354 IEM_MC_END(); \
355 break; \
356 \
357 case IEMMODE_32BIT: \
358 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
359 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
360 IEM_MC_ARG(uint32_t, u32Src, 1); \
361 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
362 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
363 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
364 \
365 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
366 IEMOP_HLP_DONE_DECODING(); \
367 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
368 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
369 IEM_MC_FETCH_EFLAGS(EFlags); \
370 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
371 \
372 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
373 IEM_MC_COMMIT_EFLAGS(EFlags); \
374 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
375 IEM_MC_END(); \
376 break; \
377 \
378 case IEMMODE_64BIT: \
379 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
380 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
381 IEM_MC_ARG(uint64_t, u64Src, 1); \
382 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
383 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
384 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
385 \
386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
387 IEMOP_HLP_DONE_DECODING(); \
388 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
389 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
390 IEM_MC_FETCH_EFLAGS(EFlags); \
391 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
392 \
393 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
394 IEM_MC_COMMIT_EFLAGS(EFlags); \
395 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
396 IEM_MC_END(); \
397 break; \
398 \
399 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
400 } \
401 } \
402 else \
403 { \
404 (void)0
405/* Separate macro to work around parsing issue in IEMAllInstPython.py */
406#define IEMOP_BODY_BINARY_rm_rv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
407 switch (pVCpu->iem.s.enmEffOpSize) \
408 { \
409 case IEMMODE_16BIT: \
410 IEM_MC_BEGIN(3, 3, 0, 0); \
411 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
412 IEM_MC_ARG(uint16_t, u16Src, 1); \
413 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
415 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
416 \
417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
418 IEMOP_HLP_DONE_DECODING(); \
419 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
420 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
421 IEM_MC_FETCH_EFLAGS(EFlags); \
422 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
423 \
424 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
425 IEM_MC_COMMIT_EFLAGS(EFlags); \
426 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
427 IEM_MC_END(); \
428 break; \
429 \
430 case IEMMODE_32BIT: \
431 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
432 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
433 IEM_MC_ARG(uint32_t, u32Src, 1); \
434 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
436 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
437 \
438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
439 IEMOP_HLP_DONE_DECODING(); \
440 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
441 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
442 IEM_MC_FETCH_EFLAGS(EFlags); \
443 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
444 \
445 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo /* CMP,TEST */); \
446 IEM_MC_COMMIT_EFLAGS(EFlags); \
447 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
448 IEM_MC_END(); \
449 break; \
450 \
451 case IEMMODE_64BIT: \
452 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
453 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
454 IEM_MC_ARG(uint64_t, u64Src, 1); \
455 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
456 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
457 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
458 \
459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
460 IEMOP_HLP_DONE_DECODING(); \
461 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
462 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
463 IEM_MC_FETCH_EFLAGS(EFlags); \
464 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
465 \
466 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
467 IEM_MC_COMMIT_EFLAGS(EFlags); \
468 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
469 IEM_MC_END(); \
470 break; \
471 \
472 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
473 } \
474 } \
475 } \
476 (void)0
477
478/**
479 * Body for read-only word/dword/qword instructions like TEST and CMP with
480 * memory/register as the destination.
481 */
482#define IEMOP_BODY_BINARY_rm_rv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
483 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
484 \
485 /* \
486 * If rm is denoting a register, no more instruction bytes. \
487 */ \
488 if (IEM_IS_MODRM_REG_MODE(bRm)) \
489 { \
490 switch (pVCpu->iem.s.enmEffOpSize) \
491 { \
492 case IEMMODE_16BIT: \
493 IEM_MC_BEGIN(3, 0, 0, 0); \
494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
495 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
496 IEM_MC_ARG(uint16_t, u16Src, 1); \
497 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
498 \
499 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
500 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
501 IEM_MC_REF_EFLAGS(pEFlags); \
502 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
503 \
504 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
505 IEM_MC_END(); \
506 break; \
507 \
508 case IEMMODE_32BIT: \
509 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
511 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
512 IEM_MC_ARG(uint32_t, u32Src, 1); \
513 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
514 \
515 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
516 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
517 IEM_MC_REF_EFLAGS(pEFlags); \
518 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
519 \
520 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
521 IEM_MC_END(); \
522 break; \
523 \
524 case IEMMODE_64BIT: \
525 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
527 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
528 IEM_MC_ARG(uint64_t, u64Src, 1); \
529 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
530 \
531 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
532 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
533 IEM_MC_REF_EFLAGS(pEFlags); \
534 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
535 \
536 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
537 IEM_MC_END(); \
538 break; \
539 \
540 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
541 } \
542 } \
543 else \
544 { \
545 /* \
546 * We're accessing memory. \
547 * Note! We're putting the eflags on the stack here so we can commit them \
548 * after the memory. \
549 */ \
550 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
551 { \
552 switch (pVCpu->iem.s.enmEffOpSize) \
553 { \
554 case IEMMODE_16BIT: \
555 IEM_MC_BEGIN(3, 3, 0, 0); \
556 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
557 IEM_MC_ARG(uint16_t, u16Src, 1); \
558 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
560 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
561 \
562 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
563 IEMOP_HLP_DONE_DECODING(); \
564 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
565 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
566 IEM_MC_FETCH_EFLAGS(EFlags); \
567 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
568 \
569 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
570 IEM_MC_COMMIT_EFLAGS(EFlags); \
571 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
572 IEM_MC_END(); \
573 break; \
574 \
575 case IEMMODE_32BIT: \
576 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
577 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
578 IEM_MC_ARG(uint32_t, u32Src, 1); \
579 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
580 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
581 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
582 \
583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
584 IEMOP_HLP_DONE_DECODING(); \
585 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
586 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
587 IEM_MC_FETCH_EFLAGS(EFlags); \
588 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
589 \
590 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
591 IEM_MC_COMMIT_EFLAGS(EFlags); \
592 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
593 IEM_MC_END(); \
594 break; \
595 \
596 case IEMMODE_64BIT: \
597 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
598 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
599 IEM_MC_ARG(uint64_t, u64Src, 1); \
600 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
602 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
603 \
604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
605 IEMOP_HLP_DONE_DECODING(); \
606 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
607 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
608 IEM_MC_FETCH_EFLAGS(EFlags); \
609 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
610 \
611 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
612 IEM_MC_COMMIT_EFLAGS(EFlags); \
613 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
614 IEM_MC_END(); \
615 break; \
616 \
617 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
618 } \
619 } \
620 else \
621 { \
622 IEMOP_HLP_DONE_DECODING(); \
623 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
624 } \
625 } \
626 (void)0
627
628
629/**
630 * Body for instructions like ADD, AND, OR, ++ with working on AL with
631 * a byte immediate.
632 */
633#define IEMOP_BODY_BINARY_AL_Ib(a_fnNormalU8) \
634 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
635 \
636 IEM_MC_BEGIN(3, 0, 0, 0); \
637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
638 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
639 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1); \
640 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
641 \
642 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX); \
643 IEM_MC_REF_EFLAGS(pEFlags); \
644 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
645 \
646 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
647 IEM_MC_END()
648
649/**
650 * Body for instructions like ADD, AND, OR, ++ with working on
651 * AX/EAX/RAX with a word/dword immediate.
652 */
653#define IEMOP_BODY_BINARY_rAX_Iz(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fModifiesDstReg) \
654 switch (pVCpu->iem.s.enmEffOpSize) \
655 { \
656 case IEMMODE_16BIT: \
657 { \
658 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
659 \
660 IEM_MC_BEGIN(3, 0, 0, 0); \
661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
662 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
663 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1); \
664 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
665 \
666 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX); \
667 IEM_MC_REF_EFLAGS(pEFlags); \
668 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
669 \
670 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
671 IEM_MC_END(); \
672 } \
673 \
674 case IEMMODE_32BIT: \
675 { \
676 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
677 \
678 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
680 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
681 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1); \
682 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
683 \
684 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX); \
685 IEM_MC_REF_EFLAGS(pEFlags); \
686 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
687 \
688 if (a_fModifiesDstReg) \
689 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
690 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
691 IEM_MC_END(); \
692 } \
693 \
694 case IEMMODE_64BIT: \
695 { \
696 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
697 \
698 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
700 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
701 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1); \
702 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
703 \
704 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX); \
705 IEM_MC_REF_EFLAGS(pEFlags); \
706 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
707 \
708 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
709 IEM_MC_END(); \
710 } \
711 \
712 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
713 } \
714 (void)0
715
716
717
718/* Instruction specification format - work in progress: */
719
720/**
721 * @opcode 0x00
722 * @opmnemonic add
723 * @op1 rm:Eb
724 * @op2 reg:Gb
725 * @opmaps one
726 * @openc ModR/M
727 * @opflmodify cf,pf,af,zf,sf,of
728 * @ophints harmless ignores_op_sizes
729 * @opstats add_Eb_Gb
730 * @opgroup og_gen_arith_bin
731 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
732 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
733 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
734 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
735 */
736FNIEMOP_DEF(iemOp_add_Eb_Gb)
737{
738 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
739 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_add_u8);
740 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_add_u8_locked);
741}
742
743
744/**
745 * @opcode 0x01
746 * @opgroup og_gen_arith_bin
747 * @opflmodify cf,pf,af,zf,sf,of
748 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
749 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
750 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
751 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
752 */
753FNIEMOP_DEF(iemOp_add_Ev_Gv)
754{
755 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
756 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
757 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
758}
759
760
761/**
762 * @opcode 0x02
763 * @opgroup og_gen_arith_bin
764 * @opflmodify cf,pf,af,zf,sf,of
765 * @opcopytests iemOp_add_Eb_Gb
766 */
767FNIEMOP_DEF(iemOp_add_Gb_Eb)
768{
769 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
770 IEMOP_BODY_BINARY_r8_rm(iemAImpl_add_u8);
771}
772
773
774/**
775 * @opcode 0x03
776 * @opgroup og_gen_arith_bin
777 * @opflmodify cf,pf,af,zf,sf,of
778 * @opcopytests iemOp_add_Ev_Gv
779 */
780FNIEMOP_DEF(iemOp_add_Gv_Ev)
781{
782 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
783 IEMOP_BODY_BINARY_rv_rm(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1, 0);
784}
785
786
787/**
788 * @opcode 0x04
789 * @opgroup og_gen_arith_bin
790 * @opflmodify cf,pf,af,zf,sf,of
791 * @opcopytests iemOp_add_Eb_Gb
792 */
793FNIEMOP_DEF(iemOp_add_Al_Ib)
794{
795 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
796 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_add_u8);
797}
798
799
800/**
801 * @opcode 0x05
802 * @opgroup og_gen_arith_bin
803 * @opflmodify cf,pf,af,zf,sf,of
804 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
805 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
806 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
807 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
808 */
809FNIEMOP_DEF(iemOp_add_eAX_Iz)
810{
811 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
812 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
813}
814
815
816/**
817 * @opcode 0x06
818 * @opgroup og_stack_sreg
819 */
820FNIEMOP_DEF(iemOp_push_ES)
821{
822 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
823 IEMOP_HLP_NO_64BIT();
824 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
825}
826
827
828/**
829 * @opcode 0x07
830 * @opgroup og_stack_sreg
831 */
832FNIEMOP_DEF(iemOp_pop_ES)
833{
834 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
835 IEMOP_HLP_NO_64BIT();
836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
837 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE,
838 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
839 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
840 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
841 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES),
842 iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
843}
844
845
846/**
847 * @opcode 0x08
848 * @opgroup og_gen_arith_bin
849 * @opflmodify cf,pf,af,zf,sf,of
850 * @opflundef af
851 * @opflclear of,cf
852 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
853 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
854 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
855 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
856 */
857FNIEMOP_DEF(iemOp_or_Eb_Gb)
858{
859 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
860 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
861 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_or_u8);
862 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_or_u8_locked);
863}
864
865
866/*
867 * @opcode 0x09
868 * @opgroup og_gen_arith_bin
869 * @opflmodify cf,pf,af,zf,sf,of
870 * @opflundef af
871 * @opflclear of,cf
872 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
873 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
874 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
875 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
876 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
877 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
878 */
879FNIEMOP_DEF(iemOp_or_Ev_Gv)
880{
881 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
882 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
883 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
884 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
885}
886
887
888/**
889 * @opcode 0x0a
890 * @opgroup og_gen_arith_bin
891 * @opflmodify cf,pf,af,zf,sf,of
892 * @opflundef af
893 * @opflclear of,cf
894 * @opcopytests iemOp_or_Eb_Gb
895 */
896FNIEMOP_DEF(iemOp_or_Gb_Eb)
897{
898 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
899 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
900 IEMOP_BODY_BINARY_r8_rm(iemAImpl_or_u8);
901}
902
903
904/**
905 * @opcode 0x0b
906 * @opgroup og_gen_arith_bin
907 * @opflmodify cf,pf,af,zf,sf,of
908 * @opflundef af
909 * @opflclear of,cf
910 * @opcopytests iemOp_or_Ev_Gv
911 */
912FNIEMOP_DEF(iemOp_or_Gv_Ev)
913{
914 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
915 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
916 IEMOP_BODY_BINARY_rv_rm(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1, 0);
917}
918
919
920/**
921 * @opcode 0x0c
922 * @opgroup og_gen_arith_bin
923 * @opflmodify cf,pf,af,zf,sf,of
924 * @opflundef af
925 * @opflclear of,cf
926 * @opcopytests iemOp_or_Eb_Gb
927 */
928FNIEMOP_DEF(iemOp_or_Al_Ib)
929{
930 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
931 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
932 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_or_u8);
933}
934
935
936/**
937 * @opcode 0x0d
938 * @opgroup og_gen_arith_bin
939 * @opflmodify cf,pf,af,zf,sf,of
940 * @opflundef af
941 * @opflclear of,cf
942 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
943 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
944 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
945 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
946 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
947 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
948 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
949 */
950FNIEMOP_DEF(iemOp_or_eAX_Iz)
951{
952 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
953 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
954 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
955}
956
957
958/**
959 * @opcode 0x0e
960 * @opgroup og_stack_sreg
961 */
962FNIEMOP_DEF(iemOp_push_CS)
963{
964 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, 0);
965 IEMOP_HLP_NO_64BIT();
966 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
967}
968
969
970/**
971 * @opcode 0x0f
972 * @opmnemonic EscTwo0f
973 * @openc two0f
974 * @opdisenum OP_2B_ESC
975 * @ophints harmless
976 * @opgroup og_escapes
977 */
978FNIEMOP_DEF(iemOp_2byteEscape)
979{
980#if 0 /// @todo def VBOX_STRICT
981 /* Sanity check the table the first time around. */
982 static bool s_fTested = false;
983 if (RT_LIKELY(s_fTested)) { /* likely */ }
984 else
985 {
986 s_fTested = true;
987 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
988 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
989 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
990 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
991 }
992#endif
993
994 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
995 {
996 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
997 IEMOP_HLP_MIN_286();
998 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
999 }
1000 /* @opdone */
1001
1002 /*
1003 * On the 8086 this is a POP CS instruction.
1004 * For the time being we don't specify this this.
1005 */
1006 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, IEMOPHINT_SKIP_PYTHON);
1007 IEMOP_HLP_NO_64BIT();
1008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1009 /** @todo eliminate END_TB here */
1010 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_END_TB,
1011 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1012 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_CS),
1013 iemCImpl_pop_Sreg, X86_SREG_CS, pVCpu->iem.s.enmEffOpSize);
1014}
1015
1016/**
1017 * @opcode 0x10
1018 * @opgroup og_gen_arith_bin
1019 * @opfltest cf
1020 * @opflmodify cf,pf,af,zf,sf,of
1021 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1022 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1023 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1024 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1025 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1026 */
1027FNIEMOP_DEF(iemOp_adc_Eb_Gb)
1028{
1029 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1030 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_adc_u8);
1031 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_adc_u8_locked);
1032}
1033
1034
1035/**
1036 * @opcode 0x11
1037 * @opgroup og_gen_arith_bin
1038 * @opfltest cf
1039 * @opflmodify cf,pf,af,zf,sf,of
1040 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1041 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1042 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1043 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1044 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1045 */
1046FNIEMOP_DEF(iemOp_adc_Ev_Gv)
1047{
1048 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1049 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
1050 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
1051}
1052
1053
1054/**
1055 * @opcode 0x12
1056 * @opgroup og_gen_arith_bin
1057 * @opfltest cf
1058 * @opflmodify cf,pf,af,zf,sf,of
1059 * @opcopytests iemOp_adc_Eb_Gb
1060 */
1061FNIEMOP_DEF(iemOp_adc_Gb_Eb)
1062{
1063 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1064 IEMOP_BODY_BINARY_r8_rm(iemAImpl_adc_u8);
1065}
1066
1067
1068/**
1069 * @opcode 0x13
1070 * @opgroup og_gen_arith_bin
1071 * @opfltest cf
1072 * @opflmodify cf,pf,af,zf,sf,of
1073 * @opcopytests iemOp_adc_Ev_Gv
1074 */
1075FNIEMOP_DEF(iemOp_adc_Gv_Ev)
1076{
1077 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1078 IEMOP_BODY_BINARY_rv_rm(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1, 0);
1079}
1080
1081
1082/**
1083 * @opcode 0x14
1084 * @opgroup og_gen_arith_bin
1085 * @opfltest cf
1086 * @opflmodify cf,pf,af,zf,sf,of
1087 * @opcopytests iemOp_adc_Eb_Gb
1088 */
1089FNIEMOP_DEF(iemOp_adc_Al_Ib)
1090{
1091 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1092 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_adc_u8);
1093}
1094
1095
1096/**
1097 * @opcode 0x15
1098 * @opgroup og_gen_arith_bin
1099 * @opfltest cf
1100 * @opflmodify cf,pf,af,zf,sf,of
1101 * @opcopytests iemOp_adc_Ev_Gv
1102 */
1103FNIEMOP_DEF(iemOp_adc_eAX_Iz)
1104{
1105 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1106 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
1107}
1108
1109
1110/**
1111 * @opcode 0x16
1112 */
1113FNIEMOP_DEF(iemOp_push_SS)
1114{
1115 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1116 IEMOP_HLP_NO_64BIT();
1117 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
1118}
1119
1120
1121/**
1122 * @opcode 0x17
1123 * @opgroup og_gen_arith_bin
1124 * @opfltest cf
1125 * @opflmodify cf,pf,af,zf,sf,of
1126 */
1127FNIEMOP_DEF(iemOp_pop_SS)
1128{
1129 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
1130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1131 IEMOP_HLP_NO_64BIT();
1132 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_INHIBIT_SHADOW,
1133 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1134 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_SS)
1135 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_SS)
1136 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_SS),
1137 iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
1138}
1139
1140
1141/**
1142 * @opcode 0x18
1143 * @opgroup og_gen_arith_bin
1144 * @opfltest cf
1145 * @opflmodify cf,pf,af,zf,sf,of
1146 */
1147FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
1148{
1149 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1150 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_sbb_u8);
1151 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sbb_u8_locked);
1152}
1153
1154
1155/**
1156 * @opcode 0x19
1157 * @opgroup og_gen_arith_bin
1158 * @opfltest cf
1159 * @opflmodify cf,pf,af,zf,sf,of
1160 */
1161FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
1162{
1163 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1164 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
1165 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
1166}
1167
1168
1169/**
1170 * @opcode 0x1a
1171 * @opgroup og_gen_arith_bin
1172 * @opfltest cf
1173 * @opflmodify cf,pf,af,zf,sf,of
1174 */
1175FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
1176{
1177 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1178 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sbb_u8);
1179}
1180
1181
1182/**
1183 * @opcode 0x1b
1184 * @opgroup og_gen_arith_bin
1185 * @opfltest cf
1186 * @opflmodify cf,pf,af,zf,sf,of
1187 */
1188FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
1189{
1190 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1191 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1, 0);
1192}
1193
1194
1195/**
1196 * @opcode 0x1c
1197 * @opgroup og_gen_arith_bin
1198 * @opfltest cf
1199 * @opflmodify cf,pf,af,zf,sf,of
1200 */
1201FNIEMOP_DEF(iemOp_sbb_Al_Ib)
1202{
1203 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1204 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sbb_u8);
1205}
1206
1207
1208/**
1209 * @opcode 0x1d
1210 * @opgroup og_gen_arith_bin
1211 * @opfltest cf
1212 * @opflmodify cf,pf,af,zf,sf,of
1213 */
1214FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
1215{
1216 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1217 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
1218}
1219
1220
1221/**
1222 * @opcode 0x1e
1223 * @opgroup og_stack_sreg
1224 */
1225FNIEMOP_DEF(iemOp_push_DS)
1226{
1227 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1228 IEMOP_HLP_NO_64BIT();
1229 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
1230}
1231
1232
1233/**
1234 * @opcode 0x1f
1235 * @opgroup og_stack_sreg
1236 */
1237FNIEMOP_DEF(iemOp_pop_DS)
1238{
1239 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1241 IEMOP_HLP_NO_64BIT();
1242 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE,
1243 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1244 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
1245 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
1246 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS),
1247 iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
1248}
1249
1250
1251/**
1252 * @opcode 0x20
1253 * @opgroup og_gen_arith_bin
1254 * @opflmodify cf,pf,af,zf,sf,of
1255 * @opflundef af
1256 * @opflclear of,cf
1257 */
1258FNIEMOP_DEF(iemOp_and_Eb_Gb)
1259{
1260 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1261 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1262 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_and_u8);
1263 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_and_u8_locked);
1264}
1265
1266
1267/**
1268 * @opcode 0x21
1269 * @opgroup og_gen_arith_bin
1270 * @opflmodify cf,pf,af,zf,sf,of
1271 * @opflundef af
1272 * @opflclear of,cf
1273 */
1274FNIEMOP_DEF(iemOp_and_Ev_Gv)
1275{
1276 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1277 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1278 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
1279 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
1280}
1281
1282
1283/**
1284 * @opcode 0x22
1285 * @opgroup og_gen_arith_bin
1286 * @opflmodify cf,pf,af,zf,sf,of
1287 * @opflundef af
1288 * @opflclear of,cf
1289 */
1290FNIEMOP_DEF(iemOp_and_Gb_Eb)
1291{
1292 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1293 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1294 IEMOP_BODY_BINARY_r8_rm(iemAImpl_and_u8);
1295}
1296
1297
1298/**
1299 * @opcode 0x23
1300 * @opgroup og_gen_arith_bin
1301 * @opflmodify cf,pf,af,zf,sf,of
1302 * @opflundef af
1303 * @opflclear of,cf
1304 */
1305FNIEMOP_DEF(iemOp_and_Gv_Ev)
1306{
1307 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1308 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1309 IEMOP_BODY_BINARY_rv_rm(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1, 0);
1310}
1311
1312
1313/**
1314 * @opcode 0x24
1315 * @opgroup og_gen_arith_bin
1316 * @opflmodify cf,pf,af,zf,sf,of
1317 * @opflundef af
1318 * @opflclear of,cf
1319 */
1320FNIEMOP_DEF(iemOp_and_Al_Ib)
1321{
1322 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
1323 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1324 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_and_u8);
1325}
1326
1327
1328/**
1329 * @opcode 0x25
1330 * @opgroup og_gen_arith_bin
1331 * @opflmodify cf,pf,af,zf,sf,of
1332 * @opflundef af
1333 * @opflclear of,cf
1334 */
1335FNIEMOP_DEF(iemOp_and_eAX_Iz)
1336{
1337 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1338 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1339 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1340}
1341
1342
1343/**
1344 * @opcode 0x26
1345 * @opmnemonic SEG
1346 * @op1 ES
1347 * @opgroup og_prefix
1348 * @openc prefix
1349 * @opdisenum OP_SEG
1350 * @ophints harmless
1351 */
1352FNIEMOP_DEF(iemOp_seg_ES)
1353{
1354 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
1355 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
1356 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
1357
1358 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1359 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1360}
1361
1362
1363/**
1364 * @opcode 0x27
1365 * @opfltest af,cf
1366 * @opflmodify cf,pf,af,zf,sf,of
1367 * @opflundef of
1368 */
1369FNIEMOP_DEF(iemOp_daa)
1370{
1371 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1372 IEMOP_HLP_NO_64BIT();
1373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1374 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1375 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_daa);
1376}
1377
1378
1379/**
1380 * @opcode 0x28
1381 * @opgroup og_gen_arith_bin
1382 * @opflmodify cf,pf,af,zf,sf,of
1383 */
1384FNIEMOP_DEF(iemOp_sub_Eb_Gb)
1385{
1386 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1387 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_sub_u8);
1388 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sub_u8_locked);
1389}
1390
1391
1392/**
1393 * @opcode 0x29
1394 * @opgroup og_gen_arith_bin
1395 * @opflmodify cf,pf,af,zf,sf,of
1396 */
1397FNIEMOP_DEF(iemOp_sub_Ev_Gv)
1398{
1399 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1400 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
1401 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
1402}
1403
1404
1405/**
1406 * @opcode 0x2a
1407 * @opgroup og_gen_arith_bin
1408 * @opflmodify cf,pf,af,zf,sf,of
1409 */
1410FNIEMOP_DEF(iemOp_sub_Gb_Eb)
1411{
1412 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1413 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sub_u8);
1414}
1415
1416
1417/**
1418 * @opcode 0x2b
1419 * @opgroup og_gen_arith_bin
1420 * @opflmodify cf,pf,af,zf,sf,of
1421 */
1422FNIEMOP_DEF(iemOp_sub_Gv_Ev)
1423{
1424 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1425 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1, 0);
1426}
1427
1428
1429/**
1430 * @opcode 0x2c
1431 * @opgroup og_gen_arith_bin
1432 * @opflmodify cf,pf,af,zf,sf,of
1433 */
1434FNIEMOP_DEF(iemOp_sub_Al_Ib)
1435{
1436 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1437 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sub_u8);
1438}
1439
1440
1441/**
1442 * @opcode 0x2d
1443 * @opgroup og_gen_arith_bin
1444 * @opflmodify cf,pf,af,zf,sf,of
1445 */
1446FNIEMOP_DEF(iemOp_sub_eAX_Iz)
1447{
1448 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1449 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1450}
1451
1452
1453/**
1454 * @opcode 0x2e
1455 * @opmnemonic SEG
1456 * @op1 CS
1457 * @opgroup og_prefix
1458 * @openc prefix
1459 * @opdisenum OP_SEG
1460 * @ophints harmless
1461 */
1462FNIEMOP_DEF(iemOp_seg_CS)
1463{
1464 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
1465 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
1466 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
1467
1468 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1469 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1470}
1471
1472
1473/**
1474 * @opcode 0x2f
1475 * @opfltest af,cf
1476 * @opflmodify cf,pf,af,zf,sf,of
1477 * @opflundef of
1478 */
1479FNIEMOP_DEF(iemOp_das)
1480{
1481 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1482 IEMOP_HLP_NO_64BIT();
1483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1484 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1485 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_das);
1486}
1487
1488
1489/**
1490 * @opcode 0x30
1491 * @opgroup og_gen_arith_bin
1492 * @opflmodify cf,pf,af,zf,sf,of
1493 * @opflundef af
1494 * @opflclear of,cf
1495 */
1496FNIEMOP_DEF(iemOp_xor_Eb_Gb)
1497{
1498 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1499 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1500 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_xor_u8);
1501 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_xor_u8_locked);
1502}
1503
1504
1505/**
1506 * @opcode 0x31
1507 * @opgroup og_gen_arith_bin
1508 * @opflmodify cf,pf,af,zf,sf,of
1509 * @opflundef af
1510 * @opflclear of,cf
1511 */
1512FNIEMOP_DEF(iemOp_xor_Ev_Gv)
1513{
1514 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1515 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1516 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
1517 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
1518}
1519
1520
1521/**
1522 * @opcode 0x32
1523 * @opgroup og_gen_arith_bin
1524 * @opflmodify cf,pf,af,zf,sf,of
1525 * @opflundef af
1526 * @opflclear of,cf
1527 */
1528FNIEMOP_DEF(iemOp_xor_Gb_Eb)
1529{
1530 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1531 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1532 IEMOP_BODY_BINARY_r8_rm(iemAImpl_xor_u8);
1533}
1534
1535
1536/**
1537 * @opcode 0x33
1538 * @opgroup og_gen_arith_bin
1539 * @opflmodify cf,pf,af,zf,sf,of
1540 * @opflundef af
1541 * @opflclear of,cf
1542 */
1543FNIEMOP_DEF(iemOp_xor_Gv_Ev)
1544{
1545 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1546 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1547 IEMOP_BODY_BINARY_rv_rm(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1, 0);
1548}
1549
1550
1551/**
1552 * @opcode 0x34
1553 * @opgroup og_gen_arith_bin
1554 * @opflmodify cf,pf,af,zf,sf,of
1555 * @opflundef af
1556 * @opflclear of,cf
1557 */
1558FNIEMOP_DEF(iemOp_xor_Al_Ib)
1559{
1560 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1561 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1562 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_xor_u8);
1563}
1564
1565
1566/**
1567 * @opcode 0x35
1568 * @opgroup og_gen_arith_bin
1569 * @opflmodify cf,pf,af,zf,sf,of
1570 * @opflundef af
1571 * @opflclear of,cf
1572 */
1573FNIEMOP_DEF(iemOp_xor_eAX_Iz)
1574{
1575 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1576 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1577 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1578}
1579
1580
1581/**
1582 * @opcode 0x36
1583 * @opmnemonic SEG
1584 * @op1 SS
1585 * @opgroup og_prefix
1586 * @openc prefix
1587 * @opdisenum OP_SEG
1588 * @ophints harmless
1589 */
1590FNIEMOP_DEF(iemOp_seg_SS)
1591{
1592 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
1593 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
1594 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
1595
1596 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1597 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1598}
1599
1600
1601/**
1602 * @opcode 0x37
1603 * @opfltest af,cf
1604 * @opflmodify cf,pf,af,zf,sf,of
1605 * @opflundef pf,zf,sf,of
1606 * @opgroup og_gen_arith_dec
1607 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
1608 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
1609 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1610 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1611 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1612 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
1613 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
1614 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
1615 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
1616 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
1617 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
1618 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
1619 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
1620 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
1621 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
1622 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1623 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1624 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1625 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1626 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
1627 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
1628 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
1629 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
1630 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
1631 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
1632 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1633 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
1634 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
1635 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
1636 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
1637 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
1638 */
1639FNIEMOP_DEF(iemOp_aaa)
1640{
1641 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1642 IEMOP_HLP_NO_64BIT();
1643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1644 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1645
1646 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aaa);
1647}
1648
1649
1650/**
1651 * @opcode 0x38
1652 */
1653FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
1654{
1655 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
1656 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_cmp_u8);
1657 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
1658}
1659
1660
1661/**
1662 * @opcode 0x39
1663 */
1664FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
1665{
1666 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
1667 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
1668}
1669
1670
1671/**
1672 * @opcode 0x3a
1673 */
1674FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
1675{
1676 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
1677 IEMOP_BODY_BINARY_r8_rm(iemAImpl_cmp_u8);
1678}
1679
1680
1681/**
1682 * @opcode 0x3b
1683 */
1684FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
1685{
1686 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
1687 IEMOP_BODY_BINARY_rv_rm(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0, 0);
1688}
1689
1690
1691/**
1692 * @opcode 0x3c
1693 */
1694FNIEMOP_DEF(iemOp_cmp_Al_Ib)
1695{
1696 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
1697 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_cmp_u8);
1698}
1699
1700
1701/**
1702 * @opcode 0x3d
1703 */
1704FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1705{
1706 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1707 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1708}
1709
1710
1711/**
1712 * @opcode 0x3e
1713 */
1714FNIEMOP_DEF(iemOp_seg_DS)
1715{
1716 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1717 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1718 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1719
1720 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1721 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1722}
1723
1724
1725/**
1726 * @opcode 0x3f
1727 * @opfltest af,cf
1728 * @opflmodify cf,pf,af,zf,sf,of
1729 * @opflundef pf,zf,sf,of
1730 * @opgroup og_gen_arith_dec
1731 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1732 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1733 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1734 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1735 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1736 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1737 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1738 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1739 * @optest amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1740 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1741 * @optest amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1742 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1743 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1744 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1745 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1746 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1747 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1748 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1749 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1750 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1751 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
1752 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1753 * @optest amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
1754 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1755 * @optest amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
1756 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1757 * @optest amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
1758 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1759 * @optest amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
1760 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1761 * @optest amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
1762 * @optest intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1763 * @optest amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1764 * @optest intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1765 * @optest amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1766 */
1767FNIEMOP_DEF(iemOp_aas)
1768{
1769 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1770 IEMOP_HLP_NO_64BIT();
1771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1772 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1773
1774 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aas);
1775}
1776
1777
1778/**
1779 * Common 'inc/dec register' helper.
1780 *
1781 * Not for 64-bit code, only for what became the rex prefixes.
1782 */
1783#define IEMOP_BODY_UNARY_GReg(a_fnNormalU16, a_fnNormalU32, a_iReg) \
1784 switch (pVCpu->iem.s.enmEffOpSize) \
1785 { \
1786 case IEMMODE_16BIT: \
1787 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_64BIT, 0); \
1788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1789 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
1790 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1791 IEM_MC_REF_GREG_U16(pu16Dst, a_iReg); \
1792 IEM_MC_REF_EFLAGS(pEFlags); \
1793 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
1794 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1795 IEM_MC_END(); \
1796 break; \
1797 \
1798 case IEMMODE_32BIT: \
1799 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0); \
1800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1801 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
1802 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1803 IEM_MC_REF_GREG_U32(pu32Dst, a_iReg); \
1804 IEM_MC_REF_EFLAGS(pEFlags); \
1805 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
1806 IEM_MC_CLEAR_HIGH_GREG_U64(a_iReg); \
1807 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1808 IEM_MC_END(); \
1809 break; \
1810 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1811 } \
1812 (void)0
1813
1814/**
1815 * @opcode 0x40
1816 */
1817FNIEMOP_DEF(iemOp_inc_eAX)
1818{
1819 /*
1820 * This is a REX prefix in 64-bit mode.
1821 */
1822 if (IEM_IS_64BIT_CODE(pVCpu))
1823 {
1824 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1825 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1826
1827 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1828 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1829 }
1830
1831 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1832 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xAX);
1833}
1834
1835
1836/**
1837 * @opcode 0x41
1838 */
1839FNIEMOP_DEF(iemOp_inc_eCX)
1840{
1841 /*
1842 * This is a REX prefix in 64-bit mode.
1843 */
1844 if (IEM_IS_64BIT_CODE(pVCpu))
1845 {
1846 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1847 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1848 pVCpu->iem.s.uRexB = 1 << 3;
1849
1850 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1851 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1852 }
1853
1854 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1855 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xCX);
1856}
1857
1858
1859/**
1860 * @opcode 0x42
1861 */
1862FNIEMOP_DEF(iemOp_inc_eDX)
1863{
1864 /*
1865 * This is a REX prefix in 64-bit mode.
1866 */
1867 if (IEM_IS_64BIT_CODE(pVCpu))
1868 {
1869 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1870 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1871 pVCpu->iem.s.uRexIndex = 1 << 3;
1872
1873 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1874 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1875 }
1876
1877 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1878 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDX);
1879}
1880
1881
1882
1883/**
1884 * @opcode 0x43
1885 */
1886FNIEMOP_DEF(iemOp_inc_eBX)
1887{
1888 /*
1889 * This is a REX prefix in 64-bit mode.
1890 */
1891 if (IEM_IS_64BIT_CODE(pVCpu))
1892 {
1893 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1894 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1895 pVCpu->iem.s.uRexB = 1 << 3;
1896 pVCpu->iem.s.uRexIndex = 1 << 3;
1897
1898 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1899 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1900 }
1901
1902 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1903 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBX);
1904}
1905
1906
1907/**
1908 * @opcode 0x44
1909 */
1910FNIEMOP_DEF(iemOp_inc_eSP)
1911{
1912 /*
1913 * This is a REX prefix in 64-bit mode.
1914 */
1915 if (IEM_IS_64BIT_CODE(pVCpu))
1916 {
1917 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1918 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1919 pVCpu->iem.s.uRexReg = 1 << 3;
1920
1921 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1922 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1923 }
1924
1925 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1926 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSP);
1927}
1928
1929
1930/**
1931 * @opcode 0x45
1932 */
1933FNIEMOP_DEF(iemOp_inc_eBP)
1934{
1935 /*
1936 * This is a REX prefix in 64-bit mode.
1937 */
1938 if (IEM_IS_64BIT_CODE(pVCpu))
1939 {
1940 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1941 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1942 pVCpu->iem.s.uRexReg = 1 << 3;
1943 pVCpu->iem.s.uRexB = 1 << 3;
1944
1945 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1946 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1947 }
1948
1949 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1950 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBP);
1951}
1952
1953
1954/**
1955 * @opcode 0x46
1956 */
1957FNIEMOP_DEF(iemOp_inc_eSI)
1958{
1959 /*
1960 * This is a REX prefix in 64-bit mode.
1961 */
1962 if (IEM_IS_64BIT_CODE(pVCpu))
1963 {
1964 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1965 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1966 pVCpu->iem.s.uRexReg = 1 << 3;
1967 pVCpu->iem.s.uRexIndex = 1 << 3;
1968
1969 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1970 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1971 }
1972
1973 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1974 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSI);
1975}
1976
1977
1978/**
1979 * @opcode 0x47
1980 */
1981FNIEMOP_DEF(iemOp_inc_eDI)
1982{
1983 /*
1984 * This is a REX prefix in 64-bit mode.
1985 */
1986 if (IEM_IS_64BIT_CODE(pVCpu))
1987 {
1988 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1989 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1990 pVCpu->iem.s.uRexReg = 1 << 3;
1991 pVCpu->iem.s.uRexB = 1 << 3;
1992 pVCpu->iem.s.uRexIndex = 1 << 3;
1993
1994 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1995 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1996 }
1997
1998 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1999 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDI);
2000}
2001
2002
2003/**
2004 * @opcode 0x48
2005 */
2006FNIEMOP_DEF(iemOp_dec_eAX)
2007{
2008 /*
2009 * This is a REX prefix in 64-bit mode.
2010 */
2011 if (IEM_IS_64BIT_CODE(pVCpu))
2012 {
2013 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
2014 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
2015 iemRecalEffOpSize(pVCpu);
2016
2017 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2018 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2019 }
2020
2021 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
2022 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xAX);
2023}
2024
2025
2026/**
2027 * @opcode 0x49
2028 */
2029FNIEMOP_DEF(iemOp_dec_eCX)
2030{
2031 /*
2032 * This is a REX prefix in 64-bit mode.
2033 */
2034 if (IEM_IS_64BIT_CODE(pVCpu))
2035 {
2036 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
2037 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2038 pVCpu->iem.s.uRexB = 1 << 3;
2039 iemRecalEffOpSize(pVCpu);
2040
2041 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2042 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2043 }
2044
2045 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
2046 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xCX);
2047}
2048
2049
2050/**
2051 * @opcode 0x4a
2052 */
2053FNIEMOP_DEF(iemOp_dec_eDX)
2054{
2055 /*
2056 * This is a REX prefix in 64-bit mode.
2057 */
2058 if (IEM_IS_64BIT_CODE(pVCpu))
2059 {
2060 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
2061 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2062 pVCpu->iem.s.uRexIndex = 1 << 3;
2063 iemRecalEffOpSize(pVCpu);
2064
2065 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2066 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2067 }
2068
2069 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
2070 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDX);
2071}
2072
2073
2074/**
2075 * @opcode 0x4b
2076 */
2077FNIEMOP_DEF(iemOp_dec_eBX)
2078{
2079 /*
2080 * This is a REX prefix in 64-bit mode.
2081 */
2082 if (IEM_IS_64BIT_CODE(pVCpu))
2083 {
2084 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
2085 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2086 pVCpu->iem.s.uRexB = 1 << 3;
2087 pVCpu->iem.s.uRexIndex = 1 << 3;
2088 iemRecalEffOpSize(pVCpu);
2089
2090 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2091 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2092 }
2093
2094 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
2095 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBX);
2096}
2097
2098
2099/**
2100 * @opcode 0x4c
2101 */
2102FNIEMOP_DEF(iemOp_dec_eSP)
2103{
2104 /*
2105 * This is a REX prefix in 64-bit mode.
2106 */
2107 if (IEM_IS_64BIT_CODE(pVCpu))
2108 {
2109 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
2110 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
2111 pVCpu->iem.s.uRexReg = 1 << 3;
2112 iemRecalEffOpSize(pVCpu);
2113
2114 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2115 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2116 }
2117
2118 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
2119 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSP);
2120}
2121
2122
2123/**
2124 * @opcode 0x4d
2125 */
2126FNIEMOP_DEF(iemOp_dec_eBP)
2127{
2128 /*
2129 * This is a REX prefix in 64-bit mode.
2130 */
2131 if (IEM_IS_64BIT_CODE(pVCpu))
2132 {
2133 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
2134 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2135 pVCpu->iem.s.uRexReg = 1 << 3;
2136 pVCpu->iem.s.uRexB = 1 << 3;
2137 iemRecalEffOpSize(pVCpu);
2138
2139 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2140 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2141 }
2142
2143 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
2144 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBP);
2145}
2146
2147
2148/**
2149 * @opcode 0x4e
2150 */
2151FNIEMOP_DEF(iemOp_dec_eSI)
2152{
2153 /*
2154 * This is a REX prefix in 64-bit mode.
2155 */
2156 if (IEM_IS_64BIT_CODE(pVCpu))
2157 {
2158 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
2159 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2160 pVCpu->iem.s.uRexReg = 1 << 3;
2161 pVCpu->iem.s.uRexIndex = 1 << 3;
2162 iemRecalEffOpSize(pVCpu);
2163
2164 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2165 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2166 }
2167
2168 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
2169 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSI);
2170}
2171
2172
2173/**
2174 * @opcode 0x4f
2175 */
2176FNIEMOP_DEF(iemOp_dec_eDI)
2177{
2178 /*
2179 * This is a REX prefix in 64-bit mode.
2180 */
2181 if (IEM_IS_64BIT_CODE(pVCpu))
2182 {
2183 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
2184 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2185 pVCpu->iem.s.uRexReg = 1 << 3;
2186 pVCpu->iem.s.uRexB = 1 << 3;
2187 pVCpu->iem.s.uRexIndex = 1 << 3;
2188 iemRecalEffOpSize(pVCpu);
2189
2190 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2191 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2192 }
2193
2194 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
2195 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDI);
2196}
2197
2198
2199/**
2200 * Common 'push register' helper.
2201 */
2202FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
2203{
2204 if (IEM_IS_64BIT_CODE(pVCpu))
2205 {
2206 iReg |= pVCpu->iem.s.uRexB;
2207 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2208 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2209 }
2210
2211 switch (pVCpu->iem.s.enmEffOpSize)
2212 {
2213 case IEMMODE_16BIT:
2214 IEM_MC_BEGIN(0, 1, 0, 0);
2215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2216 IEM_MC_LOCAL(uint16_t, u16Value);
2217 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
2218 IEM_MC_PUSH_U16(u16Value);
2219 IEM_MC_ADVANCE_RIP_AND_FINISH();
2220 IEM_MC_END();
2221 break;
2222
2223 case IEMMODE_32BIT:
2224 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2226 IEM_MC_LOCAL(uint32_t, u32Value);
2227 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
2228 IEM_MC_PUSH_U32(u32Value);
2229 IEM_MC_ADVANCE_RIP_AND_FINISH();
2230 IEM_MC_END();
2231 break;
2232
2233 case IEMMODE_64BIT:
2234 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2236 IEM_MC_LOCAL(uint64_t, u64Value);
2237 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
2238 IEM_MC_PUSH_U64(u64Value);
2239 IEM_MC_ADVANCE_RIP_AND_FINISH();
2240 IEM_MC_END();
2241 break;
2242
2243 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2244 }
2245}
2246
2247
2248/**
2249 * @opcode 0x50
2250 */
2251FNIEMOP_DEF(iemOp_push_eAX)
2252{
2253 IEMOP_MNEMONIC(push_rAX, "push rAX");
2254 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
2255}
2256
2257
2258/**
2259 * @opcode 0x51
2260 */
2261FNIEMOP_DEF(iemOp_push_eCX)
2262{
2263 IEMOP_MNEMONIC(push_rCX, "push rCX");
2264 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
2265}
2266
2267
2268/**
2269 * @opcode 0x52
2270 */
2271FNIEMOP_DEF(iemOp_push_eDX)
2272{
2273 IEMOP_MNEMONIC(push_rDX, "push rDX");
2274 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
2275}
2276
2277
2278/**
2279 * @opcode 0x53
2280 */
2281FNIEMOP_DEF(iemOp_push_eBX)
2282{
2283 IEMOP_MNEMONIC(push_rBX, "push rBX");
2284 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
2285}
2286
2287
2288/**
2289 * @opcode 0x54
2290 */
2291FNIEMOP_DEF(iemOp_push_eSP)
2292{
2293 IEMOP_MNEMONIC(push_rSP, "push rSP");
2294 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
2295 {
2296 IEM_MC_BEGIN(0, 1, IEM_MC_F_ONLY_8086, 0);
2297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2298 IEM_MC_LOCAL(uint16_t, u16Value);
2299 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
2300 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
2301 IEM_MC_PUSH_U16(u16Value);
2302 IEM_MC_ADVANCE_RIP_AND_FINISH();
2303 IEM_MC_END();
2304 }
2305 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
2306}
2307
2308
2309/**
2310 * @opcode 0x55
2311 */
2312FNIEMOP_DEF(iemOp_push_eBP)
2313{
2314 IEMOP_MNEMONIC(push_rBP, "push rBP");
2315 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
2316}
2317
2318
2319/**
2320 * @opcode 0x56
2321 */
2322FNIEMOP_DEF(iemOp_push_eSI)
2323{
2324 IEMOP_MNEMONIC(push_rSI, "push rSI");
2325 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
2326}
2327
2328
2329/**
2330 * @opcode 0x57
2331 */
2332FNIEMOP_DEF(iemOp_push_eDI)
2333{
2334 IEMOP_MNEMONIC(push_rDI, "push rDI");
2335 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
2336}
2337
2338
2339/**
2340 * Common 'pop register' helper.
2341 */
2342FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
2343{
2344 if (IEM_IS_64BIT_CODE(pVCpu))
2345 {
2346 iReg |= pVCpu->iem.s.uRexB;
2347 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2348 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2349 }
2350
2351 switch (pVCpu->iem.s.enmEffOpSize)
2352 {
2353 case IEMMODE_16BIT:
2354 IEM_MC_BEGIN(0, 0, 0, 0);
2355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2356 IEM_MC_POP_GREG_U16(iReg);
2357 IEM_MC_ADVANCE_RIP_AND_FINISH();
2358 IEM_MC_END();
2359 break;
2360
2361 case IEMMODE_32BIT:
2362 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2364 IEM_MC_POP_GREG_U32(iReg);
2365 IEM_MC_ADVANCE_RIP_AND_FINISH();
2366 IEM_MC_END();
2367 break;
2368
2369 case IEMMODE_64BIT:
2370 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
2371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2372 IEM_MC_POP_GREG_U64(iReg);
2373 IEM_MC_ADVANCE_RIP_AND_FINISH();
2374 IEM_MC_END();
2375 break;
2376
2377 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2378 }
2379}
2380
2381
2382/**
2383 * @opcode 0x58
2384 */
2385FNIEMOP_DEF(iemOp_pop_eAX)
2386{
2387 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
2388 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
2389}
2390
2391
2392/**
2393 * @opcode 0x59
2394 */
2395FNIEMOP_DEF(iemOp_pop_eCX)
2396{
2397 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
2398 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
2399}
2400
2401
2402/**
2403 * @opcode 0x5a
2404 */
2405FNIEMOP_DEF(iemOp_pop_eDX)
2406{
2407 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
2408 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
2409}
2410
2411
2412/**
2413 * @opcode 0x5b
2414 */
2415FNIEMOP_DEF(iemOp_pop_eBX)
2416{
2417 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
2418 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
2419}
2420
2421
2422/**
2423 * @opcode 0x5c
2424 */
2425FNIEMOP_DEF(iemOp_pop_eSP)
2426{
2427 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
2428 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
2429}
2430
2431
2432/**
2433 * @opcode 0x5d
2434 */
2435FNIEMOP_DEF(iemOp_pop_eBP)
2436{
2437 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
2438 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
2439}
2440
2441
2442/**
2443 * @opcode 0x5e
2444 */
2445FNIEMOP_DEF(iemOp_pop_eSI)
2446{
2447 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
2448 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
2449}
2450
2451
2452/**
2453 * @opcode 0x5f
2454 */
2455FNIEMOP_DEF(iemOp_pop_eDI)
2456{
2457 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
2458 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
2459}
2460
2461
2462/**
2463 * @opcode 0x60
2464 */
2465FNIEMOP_DEF(iemOp_pusha)
2466{
2467 IEMOP_MNEMONIC(pusha, "pusha");
2468 IEMOP_HLP_MIN_186();
2469 IEMOP_HLP_NO_64BIT();
2470 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2471 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pusha_16);
2472 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2473 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pusha_32);
2474}
2475
2476
2477/**
2478 * @opcode 0x61
2479 */
2480FNIEMOP_DEF(iemOp_popa__mvex)
2481{
2482 if (!IEM_IS_64BIT_CODE(pVCpu))
2483 {
2484 IEMOP_MNEMONIC(popa, "popa");
2485 IEMOP_HLP_MIN_186();
2486 IEMOP_HLP_NO_64BIT();
2487 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2488 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
2489 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
2490 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
2491 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
2492 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX)
2493 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
2494 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP)
2495 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
2496 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
2497 iemCImpl_popa_16);
2498 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2499 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
2500 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
2501 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
2502 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
2503 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX)
2504 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
2505 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP)
2506 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
2507 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
2508 iemCImpl_popa_32);
2509 }
2510 IEMOP_MNEMONIC(mvex, "mvex");
2511 Log(("mvex prefix is not supported!\n"));
2512 IEMOP_RAISE_INVALID_OPCODE_RET();
2513}
2514
2515
2516/**
2517 * @opcode 0x62
2518 * @opmnemonic bound
2519 * @op1 Gv_RO
2520 * @op2 Ma
2521 * @opmincpu 80186
2522 * @ophints harmless x86_invalid_64
2523 * @optest op1=0 op2=0 ->
2524 * @optest op1=1 op2=0 -> value.xcpt=5
2525 * @optest o16 / op1=0xffff op2=0x0000fffe ->
2526 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
2527 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
2528 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
2529 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
2530 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
2531 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
2532 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
2533 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
2534 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
2535 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
2536 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
2537 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
2538 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
2539 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
2540 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
2541 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
2542 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
2543 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
2544 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
2545 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
2546 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
2547 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
2548 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
2549 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
2550 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
2551 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
2552 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
2553 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
2554 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
2555 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
2556 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
2557 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
2558 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
2559 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
2560 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
2561 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
2562 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
2563 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
2564 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
2565 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
2566 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
2567 */
2568FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
2569{
2570 /* The BOUND instruction is invalid 64-bit mode. In legacy and
2571 compatability mode it is invalid with MOD=3.
2572
2573 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
2574 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
2575 given as R and X without an exact description, so we assume it builds on
2576 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
2577 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
2578 uint8_t bRm;
2579 if (!IEM_IS_64BIT_CODE(pVCpu))
2580 {
2581 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2582 IEMOP_HLP_MIN_186();
2583 IEM_OPCODE_GET_NEXT_U8(&bRm);
2584 if (IEM_IS_MODRM_MEM_MODE(bRm))
2585 {
2586 /** @todo testcase: check that there are two memory accesses involved. Check
2587 * whether they're both read before the \#BR triggers. */
2588 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2589 {
2590 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186 | IEM_MC_F_NOT_64BIT, 0);
2591 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2592 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
2593 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
2594 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2595
2596 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2598
2599 IEM_MC_FETCH_GREG_U16(u16Index, IEM_GET_MODRM_REG_8(bRm));
2600 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2601 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
2602
2603 IEM_MC_CALL_CIMPL_3(0, 0, iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
2604 IEM_MC_END();
2605 }
2606 else /* 32-bit operands */
2607 {
2608 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2609 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2610 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
2611 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
2612 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2613
2614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2616
2617 IEM_MC_FETCH_GREG_U32(u32Index, IEM_GET_MODRM_REG_8(bRm));
2618 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2619 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
2620
2621 IEM_MC_CALL_CIMPL_3(0, 0, iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
2622 IEM_MC_END();
2623 }
2624 }
2625
2626 /*
2627 * @opdone
2628 */
2629 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2630 {
2631 /* Note that there is no need for the CPU to fetch further bytes
2632 here because MODRM.MOD == 3. */
2633 Log(("evex not supported by the guest CPU!\n"));
2634 IEMOP_RAISE_INVALID_OPCODE_RET();
2635 }
2636 }
2637 else
2638 {
2639 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
2640 * does modr/m read, whereas AMD probably doesn't... */
2641 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2642 {
2643 Log(("evex not supported by the guest CPU!\n"));
2644 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
2645 }
2646 IEM_OPCODE_GET_NEXT_U8(&bRm);
2647 }
2648
2649 IEMOP_MNEMONIC(evex, "evex");
2650 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
2651 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
2652 Log(("evex prefix is not implemented!\n"));
2653 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2654}
2655
2656
2657/** Opcode 0x63 - non-64-bit modes. */
2658FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
2659{
2660 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
2661 IEMOP_HLP_MIN_286();
2662 IEMOP_HLP_NO_REAL_OR_V86_MODE();
2663 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2664
2665 if (IEM_IS_MODRM_REG_MODE(bRm))
2666 {
2667 /* Register */
2668 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2669 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2670 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2671 IEM_MC_ARG(uint16_t, u16Src, 1);
2672 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2673
2674 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2675 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM_8(bRm));
2676 IEM_MC_REF_EFLAGS(pEFlags);
2677 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2678
2679 IEM_MC_ADVANCE_RIP_AND_FINISH();
2680 IEM_MC_END();
2681 }
2682 else
2683 {
2684 /* Memory */
2685 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2686 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2687 IEM_MC_ARG(uint16_t, u16Src, 1);
2688 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2689 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2690 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
2691
2692 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2693 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2694 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2695 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2696 IEM_MC_FETCH_EFLAGS(EFlags);
2697 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2698
2699 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
2700 IEM_MC_COMMIT_EFLAGS(EFlags);
2701 IEM_MC_ADVANCE_RIP_AND_FINISH();
2702 IEM_MC_END();
2703 }
2704}
2705
2706
2707/**
2708 * @opcode 0x63
2709 *
2710 * @note This is a weird one. It works like a regular move instruction if
2711 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2712 * @todo This definitely needs a testcase to verify the odd cases. */
2713FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2714{
2715 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2716
2717 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2718 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2719
2720 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2721 {
2722 if (IEM_IS_MODRM_REG_MODE(bRm))
2723 {
2724 /*
2725 * Register to register.
2726 */
2727 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2729 IEM_MC_LOCAL(uint64_t, u64Value);
2730 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
2731 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2732 IEM_MC_ADVANCE_RIP_AND_FINISH();
2733 IEM_MC_END();
2734 }
2735 else
2736 {
2737 /*
2738 * We're loading a register from memory.
2739 */
2740 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
2741 IEM_MC_LOCAL(uint64_t, u64Value);
2742 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2743 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2745 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2746 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2747 IEM_MC_ADVANCE_RIP_AND_FINISH();
2748 IEM_MC_END();
2749 }
2750 }
2751 else
2752 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
2753}
2754
2755
2756/**
2757 * @opcode 0x64
2758 * @opmnemonic segfs
2759 * @opmincpu 80386
2760 * @opgroup og_prefixes
2761 */
2762FNIEMOP_DEF(iemOp_seg_FS)
2763{
2764 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2765 IEMOP_HLP_MIN_386();
2766
2767 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2768 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2769
2770 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2771 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2772}
2773
2774
2775/**
2776 * @opcode 0x65
2777 * @opmnemonic seggs
2778 * @opmincpu 80386
2779 * @opgroup og_prefixes
2780 */
2781FNIEMOP_DEF(iemOp_seg_GS)
2782{
2783 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2784 IEMOP_HLP_MIN_386();
2785
2786 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2787 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2788
2789 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2790 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2791}
2792
2793
2794/**
2795 * @opcode 0x66
2796 * @opmnemonic opsize
2797 * @openc prefix
2798 * @opmincpu 80386
2799 * @ophints harmless
2800 * @opgroup og_prefixes
2801 */
2802FNIEMOP_DEF(iemOp_op_size)
2803{
2804 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2805 IEMOP_HLP_MIN_386();
2806
2807 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2808 iemRecalEffOpSize(pVCpu);
2809
2810 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2811 when REPZ or REPNZ are present. */
2812 if (pVCpu->iem.s.idxPrefix == 0)
2813 pVCpu->iem.s.idxPrefix = 1;
2814
2815 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2816 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2817}
2818
2819
2820/**
2821 * @opcode 0x67
2822 * @opmnemonic addrsize
2823 * @openc prefix
2824 * @opmincpu 80386
2825 * @ophints harmless
2826 * @opgroup og_prefixes
2827 */
2828FNIEMOP_DEF(iemOp_addr_size)
2829{
2830 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2831 IEMOP_HLP_MIN_386();
2832
2833 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2834 switch (pVCpu->iem.s.enmDefAddrMode)
2835 {
2836 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2837 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2838 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2839 default: AssertFailed();
2840 }
2841
2842 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2843 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2844}
2845
2846
2847/**
2848 * @opcode 0x68
2849 */
2850FNIEMOP_DEF(iemOp_push_Iz)
2851{
2852 IEMOP_MNEMONIC(push_Iz, "push Iz");
2853 IEMOP_HLP_MIN_186();
2854 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2855 switch (pVCpu->iem.s.enmEffOpSize)
2856 {
2857 case IEMMODE_16BIT:
2858 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_186, 0);
2859 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2861 IEM_MC_LOCAL_CONST(uint16_t, u16Value, u16Imm);
2862 IEM_MC_PUSH_U16(u16Value);
2863 IEM_MC_ADVANCE_RIP_AND_FINISH();
2864 IEM_MC_END();
2865 break;
2866
2867 case IEMMODE_32BIT:
2868 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2869 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2871 IEM_MC_LOCAL_CONST(uint32_t, u32Value, u32Imm);
2872 IEM_MC_PUSH_U32(u32Value);
2873 IEM_MC_ADVANCE_RIP_AND_FINISH();
2874 IEM_MC_END();
2875 break;
2876
2877 case IEMMODE_64BIT:
2878 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2879 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2881 IEM_MC_LOCAL_CONST(uint64_t, u64Value, u64Imm);
2882 IEM_MC_PUSH_U64(u64Value);
2883 IEM_MC_ADVANCE_RIP_AND_FINISH();
2884 IEM_MC_END();
2885 break;
2886
2887 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2888 }
2889}
2890
2891
2892/**
2893 * @opcode 0x69
2894 */
2895FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
2896{
2897 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
2898 IEMOP_HLP_MIN_186();
2899 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2900 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2901
2902 switch (pVCpu->iem.s.enmEffOpSize)
2903 {
2904 case IEMMODE_16BIT:
2905 {
2906 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
2907 if (IEM_IS_MODRM_REG_MODE(bRm))
2908 {
2909 /* register operand */
2910 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2911 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186, 0);
2912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2913 IEM_MC_LOCAL(uint16_t, u16Tmp);
2914 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2915 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
2916 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
2917 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2918 IEM_MC_REF_EFLAGS(pEFlags);
2919 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2920 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2921
2922 IEM_MC_ADVANCE_RIP_AND_FINISH();
2923 IEM_MC_END();
2924 }
2925 else
2926 {
2927 /* memory operand */
2928 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186, 0);
2929 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2930 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2931
2932 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2934
2935 IEM_MC_LOCAL(uint16_t, u16Tmp);
2936 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2937
2938 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
2939 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
2940 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2941 IEM_MC_REF_EFLAGS(pEFlags);
2942 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2943 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2944
2945 IEM_MC_ADVANCE_RIP_AND_FINISH();
2946 IEM_MC_END();
2947 }
2948 break;
2949 }
2950
2951 case IEMMODE_32BIT:
2952 {
2953 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
2954 if (IEM_IS_MODRM_REG_MODE(bRm))
2955 {
2956 /* register operand */
2957 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2958 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
2959 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2960 IEM_MC_LOCAL(uint32_t, u32Tmp);
2961 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2962
2963 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
2964 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
2965 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2966 IEM_MC_REF_EFLAGS(pEFlags);
2967 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2968 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2969
2970 IEM_MC_ADVANCE_RIP_AND_FINISH();
2971 IEM_MC_END();
2972 }
2973 else
2974 {
2975 /* memory operand */
2976 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0);
2977 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2978 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2979
2980 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2982
2983 IEM_MC_LOCAL(uint32_t, u32Tmp);
2984 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2985
2986 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
2987 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
2988 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2989 IEM_MC_REF_EFLAGS(pEFlags);
2990 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2991 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2992
2993 IEM_MC_ADVANCE_RIP_AND_FINISH();
2994 IEM_MC_END();
2995 }
2996 break;
2997 }
2998
2999 case IEMMODE_64BIT:
3000 {
3001 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3002 if (IEM_IS_MODRM_REG_MODE(bRm))
3003 {
3004 /* register operand */
3005 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3006 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
3007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3008 IEM_MC_LOCAL(uint64_t, u64Tmp);
3009 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3010
3011 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3012 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
3013 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3014 IEM_MC_REF_EFLAGS(pEFlags);
3015 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3016 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3017
3018 IEM_MC_ADVANCE_RIP_AND_FINISH();
3019 IEM_MC_END();
3020 }
3021 else
3022 {
3023 /* memory operand */
3024 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3025 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3026 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3027
3028 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); /* Not using IEM_OPCODE_GET_NEXT_S32_SX_U64 to reduce the */
3029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /* parameter count for the threaded function for this block. */
3030
3031 IEM_MC_LOCAL(uint64_t, u64Tmp);
3032 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3033
3034 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3035 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int32_t)u32Imm, 1);
3036 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3037 IEM_MC_REF_EFLAGS(pEFlags);
3038 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3039 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3040
3041 IEM_MC_ADVANCE_RIP_AND_FINISH();
3042 IEM_MC_END();
3043 }
3044 break;
3045 }
3046
3047 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3048 }
3049}
3050
3051
3052/**
3053 * @opcode 0x6a
3054 */
3055FNIEMOP_DEF(iemOp_push_Ib)
3056{
3057 IEMOP_MNEMONIC(push_Ib, "push Ib");
3058 IEMOP_HLP_MIN_186();
3059 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3060 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3061
3062 switch (pVCpu->iem.s.enmEffOpSize)
3063 {
3064 case IEMMODE_16BIT:
3065 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_186, 0);
3066 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3067 IEM_MC_LOCAL_CONST(uint16_t, uValue, (int16_t)i8Imm);
3068 IEM_MC_PUSH_U16(uValue);
3069 IEM_MC_ADVANCE_RIP_AND_FINISH();
3070 IEM_MC_END();
3071 break;
3072 case IEMMODE_32BIT:
3073 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
3074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3075 IEM_MC_LOCAL_CONST(uint32_t, uValue, (int32_t)i8Imm);
3076 IEM_MC_PUSH_U32(uValue);
3077 IEM_MC_ADVANCE_RIP_AND_FINISH();
3078 IEM_MC_END();
3079 break;
3080 case IEMMODE_64BIT:
3081 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
3082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3083 IEM_MC_LOCAL_CONST(uint64_t, uValue, (int64_t)i8Imm);
3084 IEM_MC_PUSH_U64(uValue);
3085 IEM_MC_ADVANCE_RIP_AND_FINISH();
3086 IEM_MC_END();
3087 break;
3088 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3089 }
3090}
3091
3092
3093/**
3094 * @opcode 0x6b
3095 */
3096FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
3097{
3098 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
3099 IEMOP_HLP_MIN_186();
3100 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3101 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3102
3103 switch (pVCpu->iem.s.enmEffOpSize)
3104 {
3105 case IEMMODE_16BIT:
3106 {
3107 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
3108 if (IEM_IS_MODRM_REG_MODE(bRm))
3109 {
3110 /* register operand */
3111 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186, 0);
3112 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3114
3115 IEM_MC_LOCAL(uint16_t, u16Tmp);
3116 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3117
3118 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3119 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
3120 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3121 IEM_MC_REF_EFLAGS(pEFlags);
3122 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3123 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3124
3125 IEM_MC_ADVANCE_RIP_AND_FINISH();
3126 IEM_MC_END();
3127 }
3128 else
3129 {
3130 /* memory operand */
3131 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186, 0);
3132
3133 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3135
3136 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
3137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3138
3139 IEM_MC_LOCAL(uint16_t, u16Tmp);
3140 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3141
3142 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3143 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
3144 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3145 IEM_MC_REF_EFLAGS(pEFlags);
3146 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3147 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3148
3149 IEM_MC_ADVANCE_RIP_AND_FINISH();
3150 IEM_MC_END();
3151 }
3152 break;
3153 }
3154
3155 case IEMMODE_32BIT:
3156 {
3157 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
3158 if (IEM_IS_MODRM_REG_MODE(bRm))
3159 {
3160 /* register operand */
3161 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3162 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
3163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3164 IEM_MC_LOCAL(uint32_t, u32Tmp);
3165 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3166
3167 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3168 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
3169 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3170 IEM_MC_REF_EFLAGS(pEFlags);
3171 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3172 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3173
3174 IEM_MC_ADVANCE_RIP_AND_FINISH();
3175 IEM_MC_END();
3176 }
3177 else
3178 {
3179 /* memory operand */
3180 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0);
3181 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3182 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3183
3184 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
3185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3186
3187 IEM_MC_LOCAL(uint32_t, u32Tmp);
3188 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3189
3190 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3191 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
3192 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3193 IEM_MC_REF_EFLAGS(pEFlags);
3194 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3195 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3196
3197 IEM_MC_ADVANCE_RIP_AND_FINISH();
3198 IEM_MC_END();
3199 }
3200 break;
3201 }
3202
3203 case IEMMODE_64BIT:
3204 {
3205 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3206 if (IEM_IS_MODRM_REG_MODE(bRm))
3207 {
3208 /* register operand */
3209 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3210 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
3211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3212 IEM_MC_LOCAL(uint64_t, u64Tmp);
3213 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3214
3215 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3216 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 1);
3217 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3218 IEM_MC_REF_EFLAGS(pEFlags);
3219 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3220 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3221
3222 IEM_MC_ADVANCE_RIP_AND_FINISH();
3223 IEM_MC_END();
3224 }
3225 else
3226 {
3227 /* memory operand */
3228 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3229 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3231
3232 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); /* Not using IEM_OPCODE_GET_NEXT_S8_SX_U64 to reduce the threaded parameter count. */
3233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3234
3235 IEM_MC_LOCAL(uint64_t, u64Tmp);
3236 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3237
3238 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3239 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 1);
3240 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3241 IEM_MC_REF_EFLAGS(pEFlags);
3242 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3243 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3244
3245 IEM_MC_ADVANCE_RIP_AND_FINISH();
3246 IEM_MC_END();
3247 }
3248 break;
3249 }
3250
3251 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3252 }
3253}
3254
3255
3256/**
3257 * @opcode 0x6c
3258 */
3259FNIEMOP_DEF(iemOp_insb_Yb_DX)
3260{
3261 IEMOP_HLP_MIN_186();
3262 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3263 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3264 {
3265 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
3266 switch (pVCpu->iem.s.enmEffAddrMode)
3267 {
3268 case IEMMODE_16BIT:
3269 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3270 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3271 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3272 iemCImpl_rep_ins_op8_addr16, false);
3273 case IEMMODE_32BIT:
3274 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3275 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3276 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3277 iemCImpl_rep_ins_op8_addr32, false);
3278 case IEMMODE_64BIT:
3279 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3280 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3281 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3282 iemCImpl_rep_ins_op8_addr64, false);
3283 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3284 }
3285 }
3286 else
3287 {
3288 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
3289 switch (pVCpu->iem.s.enmEffAddrMode)
3290 {
3291 case IEMMODE_16BIT:
3292 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3293 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3294 iemCImpl_ins_op8_addr16, false);
3295 case IEMMODE_32BIT:
3296 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3297 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3298 iemCImpl_ins_op8_addr32, false);
3299 case IEMMODE_64BIT:
3300 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3301 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3302 iemCImpl_ins_op8_addr64, false);
3303 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3304 }
3305 }
3306}
3307
3308
3309/**
3310 * @opcode 0x6d
3311 */
3312FNIEMOP_DEF(iemOp_inswd_Yv_DX)
3313{
3314 IEMOP_HLP_MIN_186();
3315 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3316 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3317 {
3318 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
3319 switch (pVCpu->iem.s.enmEffOpSize)
3320 {
3321 case IEMMODE_16BIT:
3322 switch (pVCpu->iem.s.enmEffAddrMode)
3323 {
3324 case IEMMODE_16BIT:
3325 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3326 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3327 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3328 iemCImpl_rep_ins_op16_addr16, false);
3329 case IEMMODE_32BIT:
3330 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3331 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3332 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3333 iemCImpl_rep_ins_op16_addr32, false);
3334 case IEMMODE_64BIT:
3335 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3336 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3337 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3338 iemCImpl_rep_ins_op16_addr64, false);
3339 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3340 }
3341 break;
3342 case IEMMODE_64BIT:
3343 case IEMMODE_32BIT:
3344 switch (pVCpu->iem.s.enmEffAddrMode)
3345 {
3346 case IEMMODE_16BIT:
3347 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3348 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3349 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3350 iemCImpl_rep_ins_op32_addr16, false);
3351 case IEMMODE_32BIT:
3352 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3353 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3354 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3355 iemCImpl_rep_ins_op32_addr32, false);
3356 case IEMMODE_64BIT:
3357 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3358 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3359 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3360 iemCImpl_rep_ins_op32_addr64, false);
3361 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3362 }
3363 break;
3364 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3365 }
3366 }
3367 else
3368 {
3369 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
3370 switch (pVCpu->iem.s.enmEffOpSize)
3371 {
3372 case IEMMODE_16BIT:
3373 switch (pVCpu->iem.s.enmEffAddrMode)
3374 {
3375 case IEMMODE_16BIT:
3376 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3377 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3378 iemCImpl_ins_op16_addr16, false);
3379 case IEMMODE_32BIT:
3380 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3381 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3382 iemCImpl_ins_op16_addr32, false);
3383 case IEMMODE_64BIT:
3384 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3385 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3386 iemCImpl_ins_op16_addr64, false);
3387 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3388 }
3389 break;
3390 case IEMMODE_64BIT:
3391 case IEMMODE_32BIT:
3392 switch (pVCpu->iem.s.enmEffAddrMode)
3393 {
3394 case IEMMODE_16BIT:
3395 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3396 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3397 iemCImpl_ins_op32_addr16, false);
3398 case IEMMODE_32BIT:
3399 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3400 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3401 iemCImpl_ins_op32_addr32, false);
3402 case IEMMODE_64BIT:
3403 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3404 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3405 iemCImpl_ins_op32_addr64, false);
3406 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3407 }
3408 break;
3409 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3410 }
3411 }
3412}
3413
3414
3415/**
3416 * @opcode 0x6e
3417 */
3418FNIEMOP_DEF(iemOp_outsb_Yb_DX)
3419{
3420 IEMOP_HLP_MIN_186();
3421 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3422 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3423 {
3424 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
3425 switch (pVCpu->iem.s.enmEffAddrMode)
3426 {
3427 case IEMMODE_16BIT:
3428 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3429 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3430 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3431 iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3432 case IEMMODE_32BIT:
3433 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3434 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3435 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3436 iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3437 case IEMMODE_64BIT:
3438 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3439 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3440 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3441 iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3442 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3443 }
3444 }
3445 else
3446 {
3447 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
3448 switch (pVCpu->iem.s.enmEffAddrMode)
3449 {
3450 case IEMMODE_16BIT:
3451 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3452 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3453 iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3454 case IEMMODE_32BIT:
3455 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3456 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3457 iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3458 case IEMMODE_64BIT:
3459 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3460 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3461 iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3462 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3463 }
3464 }
3465}
3466
3467
3468/**
3469 * @opcode 0x6f
3470 */
3471FNIEMOP_DEF(iemOp_outswd_Yv_DX)
3472{
3473 IEMOP_HLP_MIN_186();
3474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3475 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3476 {
3477 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
3478 switch (pVCpu->iem.s.enmEffOpSize)
3479 {
3480 case IEMMODE_16BIT:
3481 switch (pVCpu->iem.s.enmEffAddrMode)
3482 {
3483 case IEMMODE_16BIT:
3484 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3485 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3486 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3487 iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3488 case IEMMODE_32BIT:
3489 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3490 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3491 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3492 iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3493 case IEMMODE_64BIT:
3494 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3495 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3496 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3497 iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3498 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3499 }
3500 break;
3501 case IEMMODE_64BIT:
3502 case IEMMODE_32BIT:
3503 switch (pVCpu->iem.s.enmEffAddrMode)
3504 {
3505 case IEMMODE_16BIT:
3506 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3507 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3508 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3509 iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3510 case IEMMODE_32BIT:
3511 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3512 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3513 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3514 iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3515 case IEMMODE_64BIT:
3516 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3517 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3518 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3519 iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3520 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3521 }
3522 break;
3523 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3524 }
3525 }
3526 else
3527 {
3528 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
3529 switch (pVCpu->iem.s.enmEffOpSize)
3530 {
3531 case IEMMODE_16BIT:
3532 switch (pVCpu->iem.s.enmEffAddrMode)
3533 {
3534 case IEMMODE_16BIT:
3535 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3536 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3537 iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3538 case IEMMODE_32BIT:
3539 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3540 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3541 iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3542 case IEMMODE_64BIT:
3543 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3544 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3545 iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3546 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3547 }
3548 break;
3549 case IEMMODE_64BIT:
3550 case IEMMODE_32BIT:
3551 switch (pVCpu->iem.s.enmEffAddrMode)
3552 {
3553 case IEMMODE_16BIT:
3554 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3555 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3556 iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3557 case IEMMODE_32BIT:
3558 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3559 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3560 iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3561 case IEMMODE_64BIT:
3562 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3563 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3564 iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3565 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3566 }
3567 break;
3568 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3569 }
3570 }
3571}
3572
3573
3574/**
3575 * @opcode 0x70
3576 */
3577FNIEMOP_DEF(iemOp_jo_Jb)
3578{
3579 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
3580 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3581 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3582
3583 IEM_MC_BEGIN(0, 0, 0, 0);
3584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3585 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3586 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3587 } IEM_MC_ELSE() {
3588 IEM_MC_ADVANCE_RIP_AND_FINISH();
3589 } IEM_MC_ENDIF();
3590 IEM_MC_END();
3591}
3592
3593
3594/**
3595 * @opcode 0x71
3596 */
3597FNIEMOP_DEF(iemOp_jno_Jb)
3598{
3599 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
3600 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3601 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3602
3603 IEM_MC_BEGIN(0, 0, 0, 0);
3604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3605 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3606 IEM_MC_ADVANCE_RIP_AND_FINISH();
3607 } IEM_MC_ELSE() {
3608 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3609 } IEM_MC_ENDIF();
3610 IEM_MC_END();
3611}
3612
3613/**
3614 * @opcode 0x72
3615 */
3616FNIEMOP_DEF(iemOp_jc_Jb)
3617{
3618 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
3619 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3620 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3621
3622 IEM_MC_BEGIN(0, 0, 0, 0);
3623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3624 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3625 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3626 } IEM_MC_ELSE() {
3627 IEM_MC_ADVANCE_RIP_AND_FINISH();
3628 } IEM_MC_ENDIF();
3629 IEM_MC_END();
3630}
3631
3632
3633/**
3634 * @opcode 0x73
3635 */
3636FNIEMOP_DEF(iemOp_jnc_Jb)
3637{
3638 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
3639 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3640 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3641
3642 IEM_MC_BEGIN(0, 0, 0, 0);
3643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3644 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3645 IEM_MC_ADVANCE_RIP_AND_FINISH();
3646 } IEM_MC_ELSE() {
3647 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3648 } IEM_MC_ENDIF();
3649 IEM_MC_END();
3650}
3651
3652
3653/**
3654 * @opcode 0x74
3655 */
3656FNIEMOP_DEF(iemOp_je_Jb)
3657{
3658 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
3659 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3660 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3661
3662 IEM_MC_BEGIN(0, 0, 0, 0);
3663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3664 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3665 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3666 } IEM_MC_ELSE() {
3667 IEM_MC_ADVANCE_RIP_AND_FINISH();
3668 } IEM_MC_ENDIF();
3669 IEM_MC_END();
3670}
3671
3672
3673/**
3674 * @opcode 0x75
3675 */
3676FNIEMOP_DEF(iemOp_jne_Jb)
3677{
3678 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
3679 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3680 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3681
3682 IEM_MC_BEGIN(0, 0, 0, 0);
3683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3684 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3685 IEM_MC_ADVANCE_RIP_AND_FINISH();
3686 } IEM_MC_ELSE() {
3687 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3688 } IEM_MC_ENDIF();
3689 IEM_MC_END();
3690}
3691
3692
3693/**
3694 * @opcode 0x76
3695 */
3696FNIEMOP_DEF(iemOp_jbe_Jb)
3697{
3698 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
3699 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3700 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3701
3702 IEM_MC_BEGIN(0, 0, 0, 0);
3703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3704 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3705 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3706 } IEM_MC_ELSE() {
3707 IEM_MC_ADVANCE_RIP_AND_FINISH();
3708 } IEM_MC_ENDIF();
3709 IEM_MC_END();
3710}
3711
3712
3713/**
3714 * @opcode 0x77
3715 */
3716FNIEMOP_DEF(iemOp_jnbe_Jb)
3717{
3718 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
3719 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3720 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3721
3722 IEM_MC_BEGIN(0, 0, 0, 0);
3723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3724 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3725 IEM_MC_ADVANCE_RIP_AND_FINISH();
3726 } IEM_MC_ELSE() {
3727 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3728 } IEM_MC_ENDIF();
3729 IEM_MC_END();
3730}
3731
3732
3733/**
3734 * @opcode 0x78
3735 */
3736FNIEMOP_DEF(iemOp_js_Jb)
3737{
3738 IEMOP_MNEMONIC(js_Jb, "js Jb");
3739 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3740 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3741
3742 IEM_MC_BEGIN(0, 0, 0, 0);
3743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3744 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3745 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3746 } IEM_MC_ELSE() {
3747 IEM_MC_ADVANCE_RIP_AND_FINISH();
3748 } IEM_MC_ENDIF();
3749 IEM_MC_END();
3750}
3751
3752
3753/**
3754 * @opcode 0x79
3755 */
3756FNIEMOP_DEF(iemOp_jns_Jb)
3757{
3758 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
3759 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3760 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3761
3762 IEM_MC_BEGIN(0, 0, 0, 0);
3763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3764 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3765 IEM_MC_ADVANCE_RIP_AND_FINISH();
3766 } IEM_MC_ELSE() {
3767 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3768 } IEM_MC_ENDIF();
3769 IEM_MC_END();
3770}
3771
3772
3773/**
3774 * @opcode 0x7a
3775 */
3776FNIEMOP_DEF(iemOp_jp_Jb)
3777{
3778 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
3779 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3780 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3781
3782 IEM_MC_BEGIN(0, 0, 0, 0);
3783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3784 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3785 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3786 } IEM_MC_ELSE() {
3787 IEM_MC_ADVANCE_RIP_AND_FINISH();
3788 } IEM_MC_ENDIF();
3789 IEM_MC_END();
3790}
3791
3792
3793/**
3794 * @opcode 0x7b
3795 */
3796FNIEMOP_DEF(iemOp_jnp_Jb)
3797{
3798 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
3799 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3800 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3801
3802 IEM_MC_BEGIN(0, 0, 0, 0);
3803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3804 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3805 IEM_MC_ADVANCE_RIP_AND_FINISH();
3806 } IEM_MC_ELSE() {
3807 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3808 } IEM_MC_ENDIF();
3809 IEM_MC_END();
3810}
3811
3812
3813/**
3814 * @opcode 0x7c
3815 */
3816FNIEMOP_DEF(iemOp_jl_Jb)
3817{
3818 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
3819 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3820 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3821
3822 IEM_MC_BEGIN(0, 0, 0, 0);
3823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3824 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3825 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3826 } IEM_MC_ELSE() {
3827 IEM_MC_ADVANCE_RIP_AND_FINISH();
3828 } IEM_MC_ENDIF();
3829 IEM_MC_END();
3830}
3831
3832
3833/**
3834 * @opcode 0x7d
3835 */
3836FNIEMOP_DEF(iemOp_jnl_Jb)
3837{
3838 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
3839 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3840 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3841
3842 IEM_MC_BEGIN(0, 0, 0, 0);
3843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3844 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3845 IEM_MC_ADVANCE_RIP_AND_FINISH();
3846 } IEM_MC_ELSE() {
3847 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3848 } IEM_MC_ENDIF();
3849 IEM_MC_END();
3850}
3851
3852
3853/**
3854 * @opcode 0x7e
3855 */
3856FNIEMOP_DEF(iemOp_jle_Jb)
3857{
3858 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
3859 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3860 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3861
3862 IEM_MC_BEGIN(0, 0, 0, 0);
3863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3864 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3865 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3866 } IEM_MC_ELSE() {
3867 IEM_MC_ADVANCE_RIP_AND_FINISH();
3868 } IEM_MC_ENDIF();
3869 IEM_MC_END();
3870}
3871
3872
3873/**
3874 * @opcode 0x7f
3875 */
3876FNIEMOP_DEF(iemOp_jnle_Jb)
3877{
3878 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
3879 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3880 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3881
3882 IEM_MC_BEGIN(0, 0, 0, 0);
3883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3884 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3885 IEM_MC_ADVANCE_RIP_AND_FINISH();
3886 } IEM_MC_ELSE() {
3887 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3888 } IEM_MC_ENDIF();
3889 IEM_MC_END();
3890}
3891
3892
3893/**
3894 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
3895 * iemOp_Grp1_Eb_Ib_80.
3896 */
3897#define IEMOP_BODY_BINARY_Eb_Ib_RW(a_fnNormalU8) \
3898 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3899 { \
3900 /* register target */ \
3901 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3902 IEM_MC_BEGIN(3, 0, 0, 0); \
3903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3904 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3905 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3906 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3907 \
3908 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3909 IEM_MC_REF_EFLAGS(pEFlags); \
3910 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3911 \
3912 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3913 IEM_MC_END(); \
3914 } \
3915 else \
3916 { \
3917 /* memory target */ \
3918 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
3919 { \
3920 IEM_MC_BEGIN(3, 3, 0, 0); \
3921 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3922 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3923 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3924 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3925 \
3926 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3927 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3928 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3929 IEMOP_HLP_DONE_DECODING(); \
3930 \
3931 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3932 IEM_MC_FETCH_EFLAGS(EFlags); \
3933 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3934 \
3935 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
3936 IEM_MC_COMMIT_EFLAGS(EFlags); \
3937 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3938 IEM_MC_END(); \
3939 } \
3940 else \
3941 { \
3942 (void)0
3943
3944#define IEMOP_BODY_BINARY_Eb_Ib_LOCKED(a_fnLockedU8) \
3945 IEM_MC_BEGIN(3, 3, 0, 0); \
3946 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3947 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3948 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3949 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3950 \
3951 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3952 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3953 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3954 IEMOP_HLP_DONE_DECODING(); \
3955 \
3956 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3957 IEM_MC_FETCH_EFLAGS(EFlags); \
3958 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
3959 \
3960 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
3961 IEM_MC_COMMIT_EFLAGS(EFlags); \
3962 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3963 IEM_MC_END(); \
3964 } \
3965 } \
3966 (void)0
3967
3968#define IEMOP_BODY_BINARY_Eb_Ib_RO(a_fnNormalU8) \
3969 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3970 { \
3971 /* register target */ \
3972 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3973 IEM_MC_BEGIN(3, 0, 0, 0); \
3974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3975 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3976 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3977 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3978 \
3979 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3980 IEM_MC_REF_EFLAGS(pEFlags); \
3981 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3982 \
3983 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3984 IEM_MC_END(); \
3985 } \
3986 else \
3987 { \
3988 /* memory target */ \
3989 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
3990 { \
3991 IEM_MC_BEGIN(3, 3, 0, 0); \
3992 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
3993 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3994 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3995 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3996 \
3997 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3998 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3999 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4000 IEMOP_HLP_DONE_DECODING(); \
4001 \
4002 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4003 IEM_MC_FETCH_EFLAGS(EFlags); \
4004 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
4005 \
4006 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4007 IEM_MC_COMMIT_EFLAGS(EFlags); \
4008 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4009 IEM_MC_END(); \
4010 } \
4011 else \
4012 { \
4013 (void)0
4014
4015#define IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK() \
4016 IEMOP_HLP_DONE_DECODING(); \
4017 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4018 } \
4019 } \
4020 (void)0
4021
4022
4023
4024/**
4025 * @opmaps grp1_80,grp1_83
4026 * @opcode /0
4027 */
4028FNIEMOP_DEF_1(iemOp_Grp1_add_Eb_Ib, uint8_t, bRm)
4029{
4030 IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib");
4031 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_add_u8);
4032 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_add_u8_locked);
4033}
4034
4035
4036/**
4037 * @opmaps grp1_80,grp1_83
4038 * @opcode /1
4039 */
4040FNIEMOP_DEF_1(iemOp_Grp1_or_Eb_Ib, uint8_t, bRm)
4041{
4042 IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib");
4043 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_or_u8);
4044 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_or_u8_locked);
4045}
4046
4047
4048/**
4049 * @opmaps grp1_80,grp1_83
4050 * @opcode /2
4051 */
4052FNIEMOP_DEF_1(iemOp_Grp1_adc_Eb_Ib, uint8_t, bRm)
4053{
4054 IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib");
4055 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_adc_u8);
4056 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_adc_u8_locked);
4057}
4058
4059
4060/**
4061 * @opmaps grp1_80,grp1_83
4062 * @opcode /3
4063 */
4064FNIEMOP_DEF_1(iemOp_Grp1_sbb_Eb_Ib, uint8_t, bRm)
4065{
4066 IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib");
4067 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sbb_u8);
4068 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sbb_u8_locked);
4069}
4070
4071
4072/**
4073 * @opmaps grp1_80,grp1_83
4074 * @opcode /4
4075 */
4076FNIEMOP_DEF_1(iemOp_Grp1_and_Eb_Ib, uint8_t, bRm)
4077{
4078 IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib");
4079 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_and_u8);
4080 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_and_u8_locked);
4081}
4082
4083
4084/**
4085 * @opmaps grp1_80,grp1_83
4086 * @opcode /5
4087 */
4088FNIEMOP_DEF_1(iemOp_Grp1_sub_Eb_Ib, uint8_t, bRm)
4089{
4090 IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib");
4091 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sub_u8);
4092 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sub_u8_locked);
4093}
4094
4095
4096/**
4097 * @opmaps grp1_80,grp1_83
4098 * @opcode /6
4099 */
4100FNIEMOP_DEF_1(iemOp_Grp1_xor_Eb_Ib, uint8_t, bRm)
4101{
4102 IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib");
4103 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_xor_u8);
4104 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_xor_u8_locked);
4105}
4106
4107
4108/**
4109 * @opmaps grp1_80,grp1_83
4110 * @opcode /7
4111 */
4112FNIEMOP_DEF_1(iemOp_Grp1_cmp_Eb_Ib, uint8_t, bRm)
4113{
4114 IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib");
4115 IEMOP_BODY_BINARY_Eb_Ib_RO(iemAImpl_cmp_u8);
4116 IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK();
4117}
4118
4119
4120/**
4121 * @opcode 0x80
4122 */
4123FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
4124{
4125 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4126 switch (IEM_GET_MODRM_REG_8(bRm))
4127 {
4128 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Eb_Ib, bRm);
4129 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Eb_Ib, bRm);
4130 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Eb_Ib, bRm);
4131 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Eb_Ib, bRm);
4132 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Eb_Ib, bRm);
4133 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Eb_Ib, bRm);
4134 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Eb_Ib, bRm);
4135 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Eb_Ib, bRm);
4136 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4137 }
4138}
4139
4140
4141/**
4142 * Body for a group 1 binary operator.
4143 */
4144#define IEMOP_BODY_BINARY_Ev_Iz_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4145 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4146 { \
4147 /* register target */ \
4148 switch (pVCpu->iem.s.enmEffOpSize) \
4149 { \
4150 case IEMMODE_16BIT: \
4151 { \
4152 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4153 IEM_MC_BEGIN(3, 0, 0, 0); \
4154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4155 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4156 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4157 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4158 \
4159 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4160 IEM_MC_REF_EFLAGS(pEFlags); \
4161 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4162 \
4163 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4164 IEM_MC_END(); \
4165 break; \
4166 } \
4167 \
4168 case IEMMODE_32BIT: \
4169 { \
4170 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4171 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4173 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4174 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4175 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4176 \
4177 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4178 IEM_MC_REF_EFLAGS(pEFlags); \
4179 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4180 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
4181 \
4182 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4183 IEM_MC_END(); \
4184 break; \
4185 } \
4186 \
4187 case IEMMODE_64BIT: \
4188 { \
4189 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4190 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4192 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4193 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4194 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4195 \
4196 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4197 IEM_MC_REF_EFLAGS(pEFlags); \
4198 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4199 \
4200 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4201 IEM_MC_END(); \
4202 break; \
4203 } \
4204 \
4205 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4206 } \
4207 } \
4208 else \
4209 { \
4210 /* memory target */ \
4211 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4212 { \
4213 switch (pVCpu->iem.s.enmEffOpSize) \
4214 { \
4215 case IEMMODE_16BIT: \
4216 { \
4217 IEM_MC_BEGIN(3, 3, 0, 0); \
4218 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4219 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4220 \
4221 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4222 IEMOP_HLP_DONE_DECODING(); \
4223 \
4224 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4225 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4226 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4227 \
4228 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4229 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4230 IEM_MC_FETCH_EFLAGS(EFlags); \
4231 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4232 \
4233 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4234 IEM_MC_COMMIT_EFLAGS(EFlags); \
4235 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4236 IEM_MC_END(); \
4237 break; \
4238 } \
4239 \
4240 case IEMMODE_32BIT: \
4241 { \
4242 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4243 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4244 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4245 \
4246 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4247 IEMOP_HLP_DONE_DECODING(); \
4248 \
4249 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4250 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4251 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4252 \
4253 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4254 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4255 IEM_MC_FETCH_EFLAGS(EFlags); \
4256 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4257 \
4258 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4259 IEM_MC_COMMIT_EFLAGS(EFlags); \
4260 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4261 IEM_MC_END(); \
4262 break; \
4263 } \
4264 \
4265 case IEMMODE_64BIT: \
4266 { \
4267 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4268 \
4269 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4270 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4271 \
4272 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4273 IEMOP_HLP_DONE_DECODING(); \
4274 \
4275 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4276 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4277 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4278 \
4279 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4280 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4281 IEM_MC_FETCH_EFLAGS(EFlags); \
4282 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4283 \
4284 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4285 IEM_MC_COMMIT_EFLAGS(EFlags); \
4286 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4287 IEM_MC_END(); \
4288 break; \
4289 } \
4290 \
4291 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4292 } \
4293 } \
4294 else \
4295 { \
4296 (void)0
4297/* This must be a separate macro due to parsing restrictions in IEMAllInstPython.py. */
4298#define IEMOP_BODY_BINARY_Ev_Iz_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4299 switch (pVCpu->iem.s.enmEffOpSize) \
4300 { \
4301 case IEMMODE_16BIT: \
4302 { \
4303 IEM_MC_BEGIN(3, 3, 0, 0); \
4304 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4305 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4306 \
4307 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4308 IEMOP_HLP_DONE_DECODING(); \
4309 \
4310 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4311 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4312 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4313 \
4314 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4315 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4316 IEM_MC_FETCH_EFLAGS(EFlags); \
4317 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4318 \
4319 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4320 IEM_MC_COMMIT_EFLAGS(EFlags); \
4321 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4322 IEM_MC_END(); \
4323 break; \
4324 } \
4325 \
4326 case IEMMODE_32BIT: \
4327 { \
4328 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4329 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4330 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4331 \
4332 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4333 IEMOP_HLP_DONE_DECODING(); \
4334 \
4335 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4336 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4337 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4338 \
4339 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4340 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4341 IEM_MC_FETCH_EFLAGS(EFlags); \
4342 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4343 \
4344 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4345 IEM_MC_COMMIT_EFLAGS(EFlags); \
4346 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4347 IEM_MC_END(); \
4348 break; \
4349 } \
4350 \
4351 case IEMMODE_64BIT: \
4352 { \
4353 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4354 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4356 \
4357 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4358 IEMOP_HLP_DONE_DECODING(); \
4359 \
4360 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4361 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4362 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4363 \
4364 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4365 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4366 IEM_MC_FETCH_EFLAGS(EFlags); \
4367 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4368 \
4369 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4370 IEM_MC_COMMIT_EFLAGS(EFlags); \
4371 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4372 IEM_MC_END(); \
4373 break; \
4374 } \
4375 \
4376 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4377 } \
4378 } \
4379 } \
4380 (void)0
4381
4382/* read-only version */
4383#define IEMOP_BODY_BINARY_Ev_Iz_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4384 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4385 { \
4386 /* register target */ \
4387 switch (pVCpu->iem.s.enmEffOpSize) \
4388 { \
4389 case IEMMODE_16BIT: \
4390 { \
4391 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4392 IEM_MC_BEGIN(3, 0, 0, 0); \
4393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4394 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4395 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4396 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4397 \
4398 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4399 IEM_MC_REF_EFLAGS(pEFlags); \
4400 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4401 \
4402 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4403 IEM_MC_END(); \
4404 break; \
4405 } \
4406 \
4407 case IEMMODE_32BIT: \
4408 { \
4409 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4410 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4412 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4413 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4414 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4415 \
4416 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4417 IEM_MC_REF_EFLAGS(pEFlags); \
4418 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4419 \
4420 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4421 IEM_MC_END(); \
4422 break; \
4423 } \
4424 \
4425 case IEMMODE_64BIT: \
4426 { \
4427 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4428 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4430 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4431 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4432 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4433 \
4434 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4435 IEM_MC_REF_EFLAGS(pEFlags); \
4436 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4437 \
4438 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4439 IEM_MC_END(); \
4440 break; \
4441 } \
4442 \
4443 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4444 } \
4445 } \
4446 else \
4447 { \
4448 /* memory target */ \
4449 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4450 { \
4451 switch (pVCpu->iem.s.enmEffOpSize) \
4452 { \
4453 case IEMMODE_16BIT: \
4454 { \
4455 IEM_MC_BEGIN(3, 3, 0, 0); \
4456 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4457 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4458 \
4459 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4460 IEMOP_HLP_DONE_DECODING(); \
4461 \
4462 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4463 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
4464 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4465 \
4466 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4467 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4468 IEM_MC_FETCH_EFLAGS(EFlags); \
4469 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4470 \
4471 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4472 IEM_MC_COMMIT_EFLAGS(EFlags); \
4473 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4474 IEM_MC_END(); \
4475 break; \
4476 } \
4477 \
4478 case IEMMODE_32BIT: \
4479 { \
4480 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4481 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4482 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4483 \
4484 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4485 IEMOP_HLP_DONE_DECODING(); \
4486 \
4487 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4488 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
4489 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4490 \
4491 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4492 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4493 IEM_MC_FETCH_EFLAGS(EFlags); \
4494 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4495 \
4496 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4497 IEM_MC_COMMIT_EFLAGS(EFlags); \
4498 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4499 IEM_MC_END(); \
4500 break; \
4501 } \
4502 \
4503 case IEMMODE_64BIT: \
4504 { \
4505 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4506 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4507 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4508 \
4509 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4510 IEMOP_HLP_DONE_DECODING(); \
4511 \
4512 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4513 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
4514 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4515 \
4516 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4517 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4518 IEM_MC_FETCH_EFLAGS(EFlags); \
4519 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4520 \
4521 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4522 IEM_MC_COMMIT_EFLAGS(EFlags); \
4523 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4524 IEM_MC_END(); \
4525 break; \
4526 } \
4527 \
4528 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4529 } \
4530 } \
4531 else \
4532 { \
4533 IEMOP_HLP_DONE_DECODING(); \
4534 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4535 } \
4536 } \
4537 (void)0
4538
4539
4540/**
4541 * @opmaps grp1_81
4542 * @opcode /0
4543 */
4544FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Iz, uint8_t, bRm)
4545{
4546 IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz");
4547 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
4548 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4549}
4550
4551
4552/**
4553 * @opmaps grp1_81
4554 * @opcode /1
4555 */
4556FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Iz, uint8_t, bRm)
4557{
4558 IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz");
4559 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
4560 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4561}
4562
4563
4564/**
4565 * @opmaps grp1_81
4566 * @opcode /2
4567 */
4568FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Iz, uint8_t, bRm)
4569{
4570 IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz");
4571 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
4572 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
4573}
4574
4575
4576/**
4577 * @opmaps grp1_81
4578 * @opcode /3
4579 */
4580FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Iz, uint8_t, bRm)
4581{
4582 IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz");
4583 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
4584 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
4585}
4586
4587
4588/**
4589 * @opmaps grp1_81
4590 * @opcode /4
4591 */
4592FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Iz, uint8_t, bRm)
4593{
4594 IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz");
4595 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
4596 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
4597}
4598
4599
4600/**
4601 * @opmaps grp1_81
4602 * @opcode /5
4603 */
4604FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Iz, uint8_t, bRm)
4605{
4606 IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz");
4607 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
4608 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
4609}
4610
4611
4612/**
4613 * @opmaps grp1_81
4614 * @opcode /6
4615 */
4616FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Iz, uint8_t, bRm)
4617{
4618 IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz");
4619 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
4620 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
4621}
4622
4623
4624/**
4625 * @opmaps grp1_81
4626 * @opcode /7
4627 */
4628FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Iz, uint8_t, bRm)
4629{
4630 IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz");
4631 IEMOP_BODY_BINARY_Ev_Iz_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
4632}
4633
4634
4635/**
4636 * @opcode 0x81
4637 */
4638FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
4639{
4640 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4641 switch (IEM_GET_MODRM_REG_8(bRm))
4642 {
4643 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Iz, bRm);
4644 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Iz, bRm);
4645 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Iz, bRm);
4646 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Iz, bRm);
4647 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Iz, bRm);
4648 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Iz, bRm);
4649 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Iz, bRm);
4650 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Iz, bRm);
4651 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4652 }
4653}
4654
4655
4656/**
4657 * @opcode 0x82
4658 * @opmnemonic grp1_82
4659 * @opgroup og_groups
4660 */
4661FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
4662{
4663 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
4664 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
4665}
4666
4667
4668/**
4669 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
4670 * iemOp_Grp1_Ev_Ib.
4671 */
4672#define IEMOP_BODY_BINARY_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4673 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4674 { \
4675 /* \
4676 * Register target \
4677 */ \
4678 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4679 switch (pVCpu->iem.s.enmEffOpSize) \
4680 { \
4681 case IEMMODE_16BIT: \
4682 IEM_MC_BEGIN(3, 0, 0, 0); \
4683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4684 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4685 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4686 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4687 \
4688 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4689 IEM_MC_REF_EFLAGS(pEFlags); \
4690 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4691 \
4692 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4693 IEM_MC_END(); \
4694 break; \
4695 \
4696 case IEMMODE_32BIT: \
4697 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4699 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4700 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4701 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4702 \
4703 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4704 IEM_MC_REF_EFLAGS(pEFlags); \
4705 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4706 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
4707 \
4708 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4709 IEM_MC_END(); \
4710 break; \
4711 \
4712 case IEMMODE_64BIT: \
4713 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4715 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4716 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4717 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4718 \
4719 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4720 IEM_MC_REF_EFLAGS(pEFlags); \
4721 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4722 \
4723 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4724 IEM_MC_END(); \
4725 break; \
4726 \
4727 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4728 } \
4729 } \
4730 else \
4731 { \
4732 /* \
4733 * Memory target. \
4734 */ \
4735 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4736 { \
4737 switch (pVCpu->iem.s.enmEffOpSize) \
4738 { \
4739 case IEMMODE_16BIT: \
4740 IEM_MC_BEGIN(3, 3, 0, 0); \
4741 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4742 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4743 \
4744 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4745 IEMOP_HLP_DONE_DECODING(); \
4746 \
4747 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4748 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4749 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4750 \
4751 IEM_MC_ARG_CONST(uint16_t, u16Src, (int16_t)(int8_t)u8Imm, 1); \
4752 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4753 IEM_MC_FETCH_EFLAGS(EFlags); \
4754 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4755 \
4756 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4757 IEM_MC_COMMIT_EFLAGS(EFlags); \
4758 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4759 IEM_MC_END(); \
4760 break; \
4761 \
4762 case IEMMODE_32BIT: \
4763 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4764 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4766 \
4767 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4768 IEMOP_HLP_DONE_DECODING(); \
4769 \
4770 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4771 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4772 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4773 \
4774 IEM_MC_ARG_CONST(uint32_t, u32Src, (int32_t)(int8_t)u8Imm, 1); \
4775 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4776 IEM_MC_FETCH_EFLAGS(EFlags); \
4777 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4778 \
4779 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4780 IEM_MC_COMMIT_EFLAGS(EFlags); \
4781 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4782 IEM_MC_END(); \
4783 break; \
4784 \
4785 case IEMMODE_64BIT: \
4786 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4787 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4788 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4789 \
4790 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4791 IEMOP_HLP_DONE_DECODING(); \
4792 \
4793 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4794 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4795 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4796 \
4797 IEM_MC_ARG_CONST(uint64_t, u64Src, (int64_t)(int8_t)u8Imm, 1); \
4798 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4799 IEM_MC_FETCH_EFLAGS(EFlags); \
4800 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4801 \
4802 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4803 IEM_MC_COMMIT_EFLAGS(EFlags); \
4804 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4805 IEM_MC_END(); \
4806 break; \
4807 \
4808 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4809 } \
4810 } \
4811 else \
4812 { \
4813 (void)0
4814/* Separate macro to work around parsing issue in IEMAllInstPython.py */
4815#define IEMOP_BODY_BINARY_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4816 switch (pVCpu->iem.s.enmEffOpSize) \
4817 { \
4818 case IEMMODE_16BIT: \
4819 IEM_MC_BEGIN(3, 3, 0, 0); \
4820 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4821 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4822 \
4823 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4824 IEMOP_HLP_DONE_DECODING(); \
4825 \
4826 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4827 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4828 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4829 \
4830 IEM_MC_ARG_CONST(uint16_t, u16Src, (int16_t)(int8_t)u8Imm, 1); \
4831 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4832 IEM_MC_FETCH_EFLAGS(EFlags); \
4833 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4834 \
4835 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4836 IEM_MC_COMMIT_EFLAGS(EFlags); \
4837 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4838 IEM_MC_END(); \
4839 break; \
4840 \
4841 case IEMMODE_32BIT: \
4842 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4843 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4844 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4845 \
4846 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4847 IEMOP_HLP_DONE_DECODING(); \
4848 \
4849 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4850 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4851 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4852 \
4853 IEM_MC_ARG_CONST(uint32_t, u32Src, (int32_t)(int8_t)u8Imm, 1); \
4854 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4855 IEM_MC_FETCH_EFLAGS(EFlags); \
4856 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4857 \
4858 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4859 IEM_MC_COMMIT_EFLAGS(EFlags); \
4860 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4861 IEM_MC_END(); \
4862 break; \
4863 \
4864 case IEMMODE_64BIT: \
4865 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4866 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4867 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4868 \
4869 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4870 IEMOP_HLP_DONE_DECODING(); \
4871 \
4872 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4873 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4874 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4875 \
4876 IEM_MC_ARG_CONST(uint64_t, u64Src, (int64_t)(int8_t)u8Imm, 1); \
4877 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4878 IEM_MC_FETCH_EFLAGS(EFlags); \
4879 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4880 \
4881 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4882 IEM_MC_COMMIT_EFLAGS(EFlags); \
4883 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4884 IEM_MC_END(); \
4885 break; \
4886 \
4887 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4888 } \
4889 } \
4890 } \
4891 (void)0
4892
4893/* read-only variant */
4894#define IEMOP_BODY_BINARY_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4895 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4896 { \
4897 /* \
4898 * Register target \
4899 */ \
4900 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4901 switch (pVCpu->iem.s.enmEffOpSize) \
4902 { \
4903 case IEMMODE_16BIT: \
4904 IEM_MC_BEGIN(3, 0, 0, 0); \
4905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4906 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4907 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4908 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4909 \
4910 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4911 IEM_MC_REF_EFLAGS(pEFlags); \
4912 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4913 \
4914 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4915 IEM_MC_END(); \
4916 break; \
4917 \
4918 case IEMMODE_32BIT: \
4919 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4921 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4922 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4923 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4924 \
4925 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4926 IEM_MC_REF_EFLAGS(pEFlags); \
4927 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4928 \
4929 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4930 IEM_MC_END(); \
4931 break; \
4932 \
4933 case IEMMODE_64BIT: \
4934 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4936 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4937 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4938 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4939 \
4940 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4941 IEM_MC_REF_EFLAGS(pEFlags); \
4942 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4943 \
4944 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4945 IEM_MC_END(); \
4946 break; \
4947 \
4948 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4949 } \
4950 } \
4951 else \
4952 { \
4953 /* \
4954 * Memory target. \
4955 */ \
4956 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4957 { \
4958 switch (pVCpu->iem.s.enmEffOpSize) \
4959 { \
4960 case IEMMODE_16BIT: \
4961 IEM_MC_BEGIN(3, 3, 0, 0); \
4962 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4963 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4964 \
4965 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4966 IEMOP_HLP_DONE_DECODING(); \
4967 \
4968 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4969 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
4970 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4971 \
4972 IEM_MC_ARG_CONST(uint16_t, u16Src, (int16_t)(int8_t)u8Imm, 1); \
4973 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4974 IEM_MC_FETCH_EFLAGS(EFlags); \
4975 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4976 \
4977 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4978 IEM_MC_COMMIT_EFLAGS(EFlags); \
4979 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4980 IEM_MC_END(); \
4981 break; \
4982 \
4983 case IEMMODE_32BIT: \
4984 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4986 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4987 \
4988 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4989 IEMOP_HLP_DONE_DECODING(); \
4990 \
4991 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4992 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
4993 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4994 \
4995 IEM_MC_ARG_CONST(uint32_t, u32Src, (int32_t)(int8_t)u8Imm, 1); \
4996 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4997 IEM_MC_FETCH_EFLAGS(EFlags); \
4998 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4999 \
5000 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5001 IEM_MC_COMMIT_EFLAGS(EFlags); \
5002 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5003 IEM_MC_END(); \
5004 break; \
5005 \
5006 case IEMMODE_64BIT: \
5007 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
5008 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5009 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5010 \
5011 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5012 IEMOP_HLP_DONE_DECODING(); \
5013 \
5014 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5015 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
5016 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5017 \
5018 IEM_MC_ARG_CONST(uint64_t, u64Src, (int64_t)(int8_t)u8Imm, 1); \
5019 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5020 IEM_MC_FETCH_EFLAGS(EFlags); \
5021 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
5022 \
5023 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5024 IEM_MC_COMMIT_EFLAGS(EFlags); \
5025 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5026 IEM_MC_END(); \
5027 break; \
5028 \
5029 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5030 } \
5031 } \
5032 else \
5033 { \
5034 IEMOP_HLP_DONE_DECODING(); \
5035 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
5036 } \
5037 } \
5038 (void)0
5039
5040/**
5041 * @opmaps grp1_83
5042 * @opcode /0
5043 */
5044FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Ib, uint8_t, bRm)
5045{
5046 IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib");
5047 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
5048 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
5049}
5050
5051
5052/**
5053 * @opmaps grp1_83
5054 * @opcode /1
5055 */
5056FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Ib, uint8_t, bRm)
5057{
5058 IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib");
5059 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
5060 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
5061}
5062
5063
5064/**
5065 * @opmaps grp1_83
5066 * @opcode /2
5067 */
5068FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Ib, uint8_t, bRm)
5069{
5070 IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib");
5071 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
5072 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
5073}
5074
5075
5076/**
5077 * @opmaps grp1_83
5078 * @opcode /3
5079 */
5080FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Ib, uint8_t, bRm)
5081{
5082 IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib");
5083 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
5084 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
5085}
5086
5087
5088/**
5089 * @opmaps grp1_83
5090 * @opcode /4
5091 */
5092FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Ib, uint8_t, bRm)
5093{
5094 IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib");
5095 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
5096 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
5097}
5098
5099
5100/**
5101 * @opmaps grp1_83
5102 * @opcode /5
5103 */
5104FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Ib, uint8_t, bRm)
5105{
5106 IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib");
5107 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
5108 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
5109}
5110
5111
5112/**
5113 * @opmaps grp1_83
5114 * @opcode /6
5115 */
5116FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Ib, uint8_t, bRm)
5117{
5118 IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib");
5119 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
5120 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
5121}
5122
5123
5124/**
5125 * @opmaps grp1_83
5126 * @opcode /7
5127 */
5128FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Ib, uint8_t, bRm)
5129{
5130 IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib");
5131 IEMOP_BODY_BINARY_Ev_Ib_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
5132}
5133
5134
5135/**
5136 * @opcode 0x83
5137 */
5138FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
5139{
5140 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
5141 to the 386 even if absent in the intel reference manuals and some
5142 3rd party opcode listings. */
5143 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5144 switch (IEM_GET_MODRM_REG_8(bRm))
5145 {
5146 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Ib, bRm);
5147 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Ib, bRm);
5148 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Ib, bRm);
5149 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Ib, bRm);
5150 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Ib, bRm);
5151 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Ib, bRm);
5152 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Ib, bRm);
5153 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Ib, bRm);
5154 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5155 }
5156}
5157
5158
5159/**
5160 * @opcode 0x84
5161 */
5162FNIEMOP_DEF(iemOp_test_Eb_Gb)
5163{
5164 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
5165 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5166 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_test_u8);
5167 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
5168}
5169
5170
5171/**
5172 * @opcode 0x85
5173 */
5174FNIEMOP_DEF(iemOp_test_Ev_Gv)
5175{
5176 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
5177 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5178 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64);
5179}
5180
5181
5182/**
5183 * @opcode 0x86
5184 */
5185FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
5186{
5187 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5188 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
5189
5190 /*
5191 * If rm is denoting a register, no more instruction bytes.
5192 */
5193 if (IEM_IS_MODRM_REG_MODE(bRm))
5194 {
5195 IEM_MC_BEGIN(0, 2, 0, 0);
5196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5197 IEM_MC_LOCAL(uint8_t, uTmp1);
5198 IEM_MC_LOCAL(uint8_t, uTmp2);
5199
5200 IEM_MC_FETCH_GREG_U8(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5201 IEM_MC_FETCH_GREG_U8(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5202 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5203 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5204
5205 IEM_MC_ADVANCE_RIP_AND_FINISH();
5206 IEM_MC_END();
5207 }
5208 else
5209 {
5210 /*
5211 * We're accessing memory.
5212 */
5213#define IEMOP_XCHG_BYTE(a_fnWorker) \
5214 IEM_MC_BEGIN(2, 4, 0, 0); \
5215 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5216 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5217 IEM_MC_LOCAL(uint8_t, uTmpReg); \
5218 IEM_MC_ARG(uint8_t *, pu8Mem, 0); \
5219 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, uTmpReg, 1); \
5220 \
5221 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5222 IEMOP_HLP_DONE_DECODING(); /** @todo testcase: lock xchg */ \
5223 IEM_MC_MEM_MAP_U8_RW(pu8Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5224 IEM_MC_FETCH_GREG_U8(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5225 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker, pu8Mem, pu8Reg); \
5226 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
5227 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5228 \
5229 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5230 IEM_MC_END()
5231
5232 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5233 {
5234 IEMOP_XCHG_BYTE(iemAImpl_xchg_u8_locked);
5235 }
5236 else
5237 {
5238 IEMOP_XCHG_BYTE(iemAImpl_xchg_u8_unlocked);
5239 }
5240 }
5241}
5242
5243
5244/**
5245 * @opcode 0x87
5246 */
5247FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
5248{
5249 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
5250 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5251
5252 /*
5253 * If rm is denoting a register, no more instruction bytes.
5254 */
5255 if (IEM_IS_MODRM_REG_MODE(bRm))
5256 {
5257 switch (pVCpu->iem.s.enmEffOpSize)
5258 {
5259 case IEMMODE_16BIT:
5260 IEM_MC_BEGIN(0, 2, 0, 0);
5261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5262 IEM_MC_LOCAL(uint16_t, uTmp1);
5263 IEM_MC_LOCAL(uint16_t, uTmp2);
5264
5265 IEM_MC_FETCH_GREG_U16(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5266 IEM_MC_FETCH_GREG_U16(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5267 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5268 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5269
5270 IEM_MC_ADVANCE_RIP_AND_FINISH();
5271 IEM_MC_END();
5272 break;
5273
5274 case IEMMODE_32BIT:
5275 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5276 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5277 IEM_MC_LOCAL(uint32_t, uTmp1);
5278 IEM_MC_LOCAL(uint32_t, uTmp2);
5279
5280 IEM_MC_FETCH_GREG_U32(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5281 IEM_MC_FETCH_GREG_U32(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5282 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5283 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5284
5285 IEM_MC_ADVANCE_RIP_AND_FINISH();
5286 IEM_MC_END();
5287 break;
5288
5289 case IEMMODE_64BIT:
5290 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5292 IEM_MC_LOCAL(uint64_t, uTmp1);
5293 IEM_MC_LOCAL(uint64_t, uTmp2);
5294
5295 IEM_MC_FETCH_GREG_U64(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5296 IEM_MC_FETCH_GREG_U64(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5297 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5298 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5299
5300 IEM_MC_ADVANCE_RIP_AND_FINISH();
5301 IEM_MC_END();
5302 break;
5303
5304 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5305 }
5306 }
5307 else
5308 {
5309 /*
5310 * We're accessing memory.
5311 */
5312#define IEMOP_XCHG_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64) \
5313 do { \
5314 switch (pVCpu->iem.s.enmEffOpSize) \
5315 { \
5316 case IEMMODE_16BIT: \
5317 IEM_MC_BEGIN(2, 4, 0, 0); \
5318 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5319 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5320 IEM_MC_LOCAL(uint16_t, uTmpReg); \
5321 IEM_MC_ARG(uint16_t *, pu16Mem, 0); \
5322 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, uTmpReg, 1); \
5323 \
5324 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5325 IEMOP_HLP_DONE_DECODING(); /** @todo testcase: lock xchg */ \
5326 IEM_MC_MEM_MAP_U16_RW(pu16Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5327 IEM_MC_FETCH_GREG_U16(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5328 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker16, pu16Mem, pu16Reg); \
5329 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
5330 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5331 \
5332 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5333 IEM_MC_END(); \
5334 break; \
5335 \
5336 case IEMMODE_32BIT: \
5337 IEM_MC_BEGIN(2, 4, IEM_MC_F_MIN_386, 0); \
5338 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5339 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5340 IEM_MC_LOCAL(uint32_t, uTmpReg); \
5341 IEM_MC_ARG(uint32_t *, pu32Mem, 0); \
5342 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, uTmpReg, 1); \
5343 \
5344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5345 IEMOP_HLP_DONE_DECODING(); \
5346 IEM_MC_MEM_MAP_U32_RW(pu32Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5347 IEM_MC_FETCH_GREG_U32(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5348 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker32, pu32Mem, pu32Reg); \
5349 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
5350 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5351 \
5352 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5353 IEM_MC_END(); \
5354 break; \
5355 \
5356 case IEMMODE_64BIT: \
5357 IEM_MC_BEGIN(2, 4, IEM_MC_F_64BIT, 0); \
5358 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5359 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5360 IEM_MC_LOCAL(uint64_t, uTmpReg); \
5361 IEM_MC_ARG(uint64_t *, pu64Mem, 0); \
5362 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, uTmpReg, 1); \
5363 \
5364 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5365 IEMOP_HLP_DONE_DECODING(); \
5366 IEM_MC_MEM_MAP_U64_RW(pu64Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5367 IEM_MC_FETCH_GREG_U64(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5368 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker64, pu64Mem, pu64Reg); \
5369 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
5370 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5371 \
5372 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5373 IEM_MC_END(); \
5374 break; \
5375 \
5376 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5377 } \
5378 } while (0)
5379 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5380 {
5381 IEMOP_XCHG_EV_GV(iemAImpl_xchg_u16_locked, iemAImpl_xchg_u32_locked, iemAImpl_xchg_u64_locked);
5382 }
5383 else
5384 {
5385 IEMOP_XCHG_EV_GV(iemAImpl_xchg_u16_unlocked, iemAImpl_xchg_u32_unlocked, iemAImpl_xchg_u64_unlocked);
5386 }
5387 }
5388}
5389
5390
5391/**
5392 * @opcode 0x88
5393 */
5394FNIEMOP_DEF(iemOp_mov_Eb_Gb)
5395{
5396 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
5397
5398 uint8_t bRm;
5399 IEM_OPCODE_GET_NEXT_U8(&bRm);
5400
5401 /*
5402 * If rm is denoting a register, no more instruction bytes.
5403 */
5404 if (IEM_IS_MODRM_REG_MODE(bRm))
5405 {
5406 IEM_MC_BEGIN(0, 1, 0, 0);
5407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5408 IEM_MC_LOCAL(uint8_t, u8Value);
5409 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5410 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Value);
5411 IEM_MC_ADVANCE_RIP_AND_FINISH();
5412 IEM_MC_END();
5413 }
5414 else
5415 {
5416 /*
5417 * We're writing a register to memory.
5418 */
5419 IEM_MC_BEGIN(0, 2, 0, 0);
5420 IEM_MC_LOCAL(uint8_t, u8Value);
5421 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5422 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5424 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5425 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
5426 IEM_MC_ADVANCE_RIP_AND_FINISH();
5427 IEM_MC_END();
5428 }
5429}
5430
5431
5432/**
5433 * @opcode 0x89
5434 */
5435FNIEMOP_DEF(iemOp_mov_Ev_Gv)
5436{
5437 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
5438
5439 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5440
5441 /*
5442 * If rm is denoting a register, no more instruction bytes.
5443 */
5444 if (IEM_IS_MODRM_REG_MODE(bRm))
5445 {
5446 switch (pVCpu->iem.s.enmEffOpSize)
5447 {
5448 case IEMMODE_16BIT:
5449 IEM_MC_BEGIN(0, 1, 0, 0);
5450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5451 IEM_MC_LOCAL(uint16_t, u16Value);
5452 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5453 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5454 IEM_MC_ADVANCE_RIP_AND_FINISH();
5455 IEM_MC_END();
5456 break;
5457
5458 case IEMMODE_32BIT:
5459 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5461 IEM_MC_LOCAL(uint32_t, u32Value);
5462 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5463 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5464 IEM_MC_ADVANCE_RIP_AND_FINISH();
5465 IEM_MC_END();
5466 break;
5467
5468 case IEMMODE_64BIT:
5469 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5471 IEM_MC_LOCAL(uint64_t, u64Value);
5472 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5473 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5474 IEM_MC_ADVANCE_RIP_AND_FINISH();
5475 IEM_MC_END();
5476 break;
5477
5478 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5479 }
5480 }
5481 else
5482 {
5483 /*
5484 * We're writing a register to memory.
5485 */
5486 switch (pVCpu->iem.s.enmEffOpSize)
5487 {
5488 case IEMMODE_16BIT:
5489 IEM_MC_BEGIN(0, 2, 0, 0);
5490 IEM_MC_LOCAL(uint16_t, u16Value);
5491 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5494 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5495 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5496 IEM_MC_ADVANCE_RIP_AND_FINISH();
5497 IEM_MC_END();
5498 break;
5499
5500 case IEMMODE_32BIT:
5501 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5502 IEM_MC_LOCAL(uint32_t, u32Value);
5503 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5504 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5506 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5507 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
5508 IEM_MC_ADVANCE_RIP_AND_FINISH();
5509 IEM_MC_END();
5510 break;
5511
5512 case IEMMODE_64BIT:
5513 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5514 IEM_MC_LOCAL(uint64_t, u64Value);
5515 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5516 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5518 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5519 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
5520 IEM_MC_ADVANCE_RIP_AND_FINISH();
5521 IEM_MC_END();
5522 break;
5523
5524 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5525 }
5526 }
5527}
5528
5529
5530/**
5531 * @opcode 0x8a
5532 */
5533FNIEMOP_DEF(iemOp_mov_Gb_Eb)
5534{
5535 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
5536
5537 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5538
5539 /*
5540 * If rm is denoting a register, no more instruction bytes.
5541 */
5542 if (IEM_IS_MODRM_REG_MODE(bRm))
5543 {
5544 IEM_MC_BEGIN(0, 1, 0, 0);
5545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5546 IEM_MC_LOCAL(uint8_t, u8Value);
5547 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5548 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5549 IEM_MC_ADVANCE_RIP_AND_FINISH();
5550 IEM_MC_END();
5551 }
5552 else
5553 {
5554 /*
5555 * We're loading a register from memory.
5556 */
5557 IEM_MC_BEGIN(0, 2, 0, 0);
5558 IEM_MC_LOCAL(uint8_t, u8Value);
5559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5560 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5562 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5563 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5564 IEM_MC_ADVANCE_RIP_AND_FINISH();
5565 IEM_MC_END();
5566 }
5567}
5568
5569
5570/**
5571 * @opcode 0x8b
5572 */
5573FNIEMOP_DEF(iemOp_mov_Gv_Ev)
5574{
5575 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
5576
5577 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5578
5579 /*
5580 * If rm is denoting a register, no more instruction bytes.
5581 */
5582 if (IEM_IS_MODRM_REG_MODE(bRm))
5583 {
5584 switch (pVCpu->iem.s.enmEffOpSize)
5585 {
5586 case IEMMODE_16BIT:
5587 IEM_MC_BEGIN(0, 1, 0, 0);
5588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5589 IEM_MC_LOCAL(uint16_t, u16Value);
5590 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5591 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5592 IEM_MC_ADVANCE_RIP_AND_FINISH();
5593 IEM_MC_END();
5594 break;
5595
5596 case IEMMODE_32BIT:
5597 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5599 IEM_MC_LOCAL(uint32_t, u32Value);
5600 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5601 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5602 IEM_MC_ADVANCE_RIP_AND_FINISH();
5603 IEM_MC_END();
5604 break;
5605
5606 case IEMMODE_64BIT:
5607 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5609 IEM_MC_LOCAL(uint64_t, u64Value);
5610 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5611 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5612 IEM_MC_ADVANCE_RIP_AND_FINISH();
5613 IEM_MC_END();
5614 break;
5615
5616 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5617 }
5618 }
5619 else
5620 {
5621 /*
5622 * We're loading a register from memory.
5623 */
5624 switch (pVCpu->iem.s.enmEffOpSize)
5625 {
5626 case IEMMODE_16BIT:
5627 IEM_MC_BEGIN(0, 2, 0, 0);
5628 IEM_MC_LOCAL(uint16_t, u16Value);
5629 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5630 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5632 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5633 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5634 IEM_MC_ADVANCE_RIP_AND_FINISH();
5635 IEM_MC_END();
5636 break;
5637
5638 case IEMMODE_32BIT:
5639 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5640 IEM_MC_LOCAL(uint32_t, u32Value);
5641 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5642 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5644 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5645 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5646 IEM_MC_ADVANCE_RIP_AND_FINISH();
5647 IEM_MC_END();
5648 break;
5649
5650 case IEMMODE_64BIT:
5651 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5652 IEM_MC_LOCAL(uint64_t, u64Value);
5653 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5654 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5656 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5657 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5658 IEM_MC_ADVANCE_RIP_AND_FINISH();
5659 IEM_MC_END();
5660 break;
5661
5662 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5663 }
5664 }
5665}
5666
5667
5668/**
5669 * opcode 0x63
5670 * @todo Table fixme
5671 */
5672FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
5673{
5674 if (!IEM_IS_64BIT_CODE(pVCpu))
5675 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
5676 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
5677 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
5678 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
5679}
5680
5681
5682/**
5683 * @opcode 0x8c
5684 */
5685FNIEMOP_DEF(iemOp_mov_Ev_Sw)
5686{
5687 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
5688
5689 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5690
5691 /*
5692 * Check that the destination register exists. The REX.R prefix is ignored.
5693 */
5694 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5695 if (iSegReg > X86_SREG_GS)
5696 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5697
5698 /*
5699 * If rm is denoting a register, no more instruction bytes.
5700 * In that case, the operand size is respected and the upper bits are
5701 * cleared (starting with some pentium).
5702 */
5703 if (IEM_IS_MODRM_REG_MODE(bRm))
5704 {
5705 switch (pVCpu->iem.s.enmEffOpSize)
5706 {
5707 case IEMMODE_16BIT:
5708 IEM_MC_BEGIN(0, 1, 0, 0);
5709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5710 IEM_MC_LOCAL(uint16_t, u16Value);
5711 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5712 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5713 IEM_MC_ADVANCE_RIP_AND_FINISH();
5714 IEM_MC_END();
5715 break;
5716
5717 case IEMMODE_32BIT:
5718 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5720 IEM_MC_LOCAL(uint32_t, u32Value);
5721 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
5722 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5723 IEM_MC_ADVANCE_RIP_AND_FINISH();
5724 IEM_MC_END();
5725 break;
5726
5727 case IEMMODE_64BIT:
5728 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5730 IEM_MC_LOCAL(uint64_t, u64Value);
5731 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
5732 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5733 IEM_MC_ADVANCE_RIP_AND_FINISH();
5734 IEM_MC_END();
5735 break;
5736
5737 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5738 }
5739 }
5740 else
5741 {
5742 /*
5743 * We're saving the register to memory. The access is word sized
5744 * regardless of operand size prefixes.
5745 */
5746#if 0 /* not necessary */
5747 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5748#endif
5749 IEM_MC_BEGIN(0, 2, 0, 0);
5750 IEM_MC_LOCAL(uint16_t, u16Value);
5751 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5752 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5753 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5754 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5755 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5756 IEM_MC_ADVANCE_RIP_AND_FINISH();
5757 IEM_MC_END();
5758 }
5759}
5760
5761
5762
5763
5764/**
5765 * @opcode 0x8d
5766 */
5767FNIEMOP_DEF(iemOp_lea_Gv_M)
5768{
5769 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
5770 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5771 if (IEM_IS_MODRM_REG_MODE(bRm))
5772 IEMOP_RAISE_INVALID_OPCODE_RET(); /* no register form */
5773
5774 switch (pVCpu->iem.s.enmEffOpSize)
5775 {
5776 case IEMMODE_16BIT:
5777 IEM_MC_BEGIN(0, 2, 0, 0);
5778 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5779 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5780 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5781 /** @todo optimize: This value casting/masking can be skipped if addr-size ==
5782 * operand-size, which is usually the case. It'll save an instruction
5783 * and a register. */
5784 IEM_MC_LOCAL(uint16_t, u16Cast);
5785 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
5786 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Cast);
5787 IEM_MC_ADVANCE_RIP_AND_FINISH();
5788 IEM_MC_END();
5789 break;
5790
5791 case IEMMODE_32BIT:
5792 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5793 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5794 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5796 /** @todo optimize: This value casting/masking can be skipped if addr-size ==
5797 * operand-size, which is usually the case. It'll save an instruction
5798 * and a register. */
5799 IEM_MC_LOCAL(uint32_t, u32Cast);
5800 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
5801 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Cast);
5802 IEM_MC_ADVANCE_RIP_AND_FINISH();
5803 IEM_MC_END();
5804 break;
5805
5806 case IEMMODE_64BIT:
5807 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5808 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5809 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5811 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), GCPtrEffSrc);
5812 IEM_MC_ADVANCE_RIP_AND_FINISH();
5813 IEM_MC_END();
5814 break;
5815
5816 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5817 }
5818}
5819
5820
5821/**
5822 * @opcode 0x8e
5823 */
5824FNIEMOP_DEF(iemOp_mov_Sw_Ev)
5825{
5826 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
5827
5828 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5829
5830 /*
5831 * The practical operand size is 16-bit.
5832 */
5833#if 0 /* not necessary */
5834 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5835#endif
5836
5837 /*
5838 * Check that the destination register exists and can be used with this
5839 * instruction. The REX.R prefix is ignored.
5840 */
5841 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5842 /** @todo r=bird: What does 8086 do here wrt CS? */
5843 if ( iSegReg == X86_SREG_CS
5844 || iSegReg > X86_SREG_GS)
5845 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5846
5847 /*
5848 * If rm is denoting a register, no more instruction bytes.
5849 *
5850 * Note! Using IEMOP_MOV_SW_EV_REG_BODY here to specify different
5851 * IEM_CIMPL_F_XXX values depending on the CPU mode and target
5852 * register. This is a restriction of the current recompiler
5853 * approach.
5854 */
5855 if (IEM_IS_MODRM_REG_MODE(bRm))
5856 {
5857#define IEMOP_MOV_SW_EV_REG_BODY(a_fCImplFlags) \
5858 IEM_MC_BEGIN(2, 0, 0, a_fCImplFlags); \
5859 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5860 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
5861 IEM_MC_ARG(uint16_t, u16Value, 1); \
5862 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5863 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, \
5864 RT_BIT_64(kIemNativeGstReg_SegSelFirst + iSegReg) \
5865 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + iSegReg) \
5866 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + iSegReg), \
5867 iemCImpl_load_SReg, iSRegArg, u16Value); \
5868 IEM_MC_END()
5869
5870 if (iSegReg == X86_SREG_SS)
5871 {
5872 if (IEM_IS_32BIT_CODE(pVCpu))
5873 {
5874 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
5875 }
5876 else
5877 {
5878 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
5879 }
5880 }
5881 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5882 {
5883 IEMOP_MOV_SW_EV_REG_BODY(0);
5884 }
5885 else
5886 {
5887 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_MODE);
5888 }
5889#undef IEMOP_MOV_SW_EV_REG_BODY
5890 }
5891 else
5892 {
5893 /*
5894 * We're loading the register from memory. The access is word sized
5895 * regardless of operand size prefixes.
5896 */
5897#define IEMOP_MOV_SW_EV_MEM_BODY(a_fCImplFlags) \
5898 IEM_MC_BEGIN(2, 1, 0, a_fCImplFlags); \
5899 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
5900 IEM_MC_ARG(uint16_t, u16Value, 1); \
5901 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5902 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5904 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5905 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, \
5906 RT_BIT_64(kIemNativeGstReg_SegSelFirst + iSegReg) \
5907 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + iSegReg) \
5908 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + iSegReg), \
5909 iemCImpl_load_SReg, iSRegArg, u16Value); \
5910 IEM_MC_END()
5911
5912 if (iSegReg == X86_SREG_SS)
5913 {
5914 if (IEM_IS_32BIT_CODE(pVCpu))
5915 {
5916 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
5917 }
5918 else
5919 {
5920 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
5921 }
5922 }
5923 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5924 {
5925 IEMOP_MOV_SW_EV_MEM_BODY(0);
5926 }
5927 else
5928 {
5929 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_MODE);
5930 }
5931#undef IEMOP_MOV_SW_EV_MEM_BODY
5932 }
5933}
5934
5935
5936/** Opcode 0x8f /0. */
5937FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
5938{
5939 /* This bugger is rather annoying as it requires rSP to be updated before
5940 doing the effective address calculations. Will eventually require a
5941 split between the R/M+SIB decoding and the effective address
5942 calculation - which is something that is required for any attempt at
5943 reusing this code for a recompiler. It may also be good to have if we
5944 need to delay #UD exception caused by invalid lock prefixes.
5945
5946 For now, we'll do a mostly safe interpreter-only implementation here. */
5947 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
5948 * now until tests show it's checked.. */
5949 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
5950
5951 /* Register access is relatively easy and can share code. */
5952 if (IEM_IS_MODRM_REG_MODE(bRm))
5953 return FNIEMOP_CALL_1(iemOpCommonPopGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
5954
5955 /*
5956 * Memory target.
5957 *
5958 * Intel says that RSP is incremented before it's used in any effective
5959 * address calcuations. This means some serious extra annoyance here since
5960 * we decode and calculate the effective address in one step and like to
5961 * delay committing registers till everything is done.
5962 *
5963 * So, we'll decode and calculate the effective address twice. This will
5964 * require some recoding if turned into a recompiler.
5965 */
5966 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
5967
5968#if 1 /* This can be compiled, optimize later if needed. */
5969 switch (pVCpu->iem.s.enmEffOpSize)
5970 {
5971 case IEMMODE_16BIT:
5972 IEM_MC_BEGIN(2, 0, 0, 0);
5973 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5974 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2 << 8);
5975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5976 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
5977 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem16, iEffSeg, GCPtrEffDst);
5978 IEM_MC_END();
5979 break;
5980
5981 case IEMMODE_32BIT:
5982 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0);
5983 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5984 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4 << 8);
5985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5986 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
5987 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem32, iEffSeg, GCPtrEffDst);
5988 IEM_MC_END();
5989 break;
5990
5991 case IEMMODE_64BIT:
5992 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
5993 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5994 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 8 << 8);
5995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5996 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
5997 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem64, iEffSeg, GCPtrEffDst);
5998 IEM_MC_END();
5999 break;
6000
6001 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6002 }
6003
6004#else
6005# ifndef TST_IEM_CHECK_MC
6006 /* Calc effective address with modified ESP. */
6007/** @todo testcase */
6008 RTGCPTR GCPtrEff;
6009 VBOXSTRICTRC rcStrict;
6010 switch (pVCpu->iem.s.enmEffOpSize)
6011 {
6012 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 2 << 8, &GCPtrEff); break;
6013 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 4 << 8, &GCPtrEff); break;
6014 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 8 << 8, &GCPtrEff); break;
6015 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6016 }
6017 if (rcStrict != VINF_SUCCESS)
6018 return rcStrict;
6019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6020
6021 /* Perform the operation - this should be CImpl. */
6022 RTUINT64U TmpRsp;
6023 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
6024 switch (pVCpu->iem.s.enmEffOpSize)
6025 {
6026 case IEMMODE_16BIT:
6027 {
6028 uint16_t u16Value;
6029 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
6030 if (rcStrict == VINF_SUCCESS)
6031 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
6032 break;
6033 }
6034
6035 case IEMMODE_32BIT:
6036 {
6037 uint32_t u32Value;
6038 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
6039 if (rcStrict == VINF_SUCCESS)
6040 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
6041 break;
6042 }
6043
6044 case IEMMODE_64BIT:
6045 {
6046 uint64_t u64Value;
6047 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
6048 if (rcStrict == VINF_SUCCESS)
6049 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
6050 break;
6051 }
6052
6053 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6054 }
6055 if (rcStrict == VINF_SUCCESS)
6056 {
6057 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
6058 return iemRegUpdateRipAndFinishClearingRF(pVCpu);
6059 }
6060 return rcStrict;
6061
6062# else
6063 return VERR_IEM_IPE_2;
6064# endif
6065#endif
6066}
6067
6068
6069/**
6070 * @opcode 0x8f
6071 */
6072FNIEMOP_DEF(iemOp_Grp1A__xop)
6073{
6074 /*
6075 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
6076 * three byte VEX prefix, except that the mmmmm field cannot have the values
6077 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
6078 */
6079 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6080 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
6081 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
6082
6083 IEMOP_MNEMONIC(xop, "xop");
6084 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
6085 {
6086 /** @todo Test when exctly the XOP conformance checks kick in during
6087 * instruction decoding and fetching (using \#PF). */
6088 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
6089 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6090 if ( ( pVCpu->iem.s.fPrefixes
6091 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
6092 == 0)
6093 {
6094 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
6095 if ((bXop2 & 0x80 /* XOP.W */) && IEM_IS_64BIT_CODE(pVCpu))
6096 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
6097 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6098 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
6099 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
6100 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
6101 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
6102 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
6103
6104 /** @todo XOP: Just use new tables and decoders. */
6105 switch (bRm & 0x1f)
6106 {
6107 case 8: /* xop opcode map 8. */
6108 IEMOP_BITCH_ABOUT_STUB();
6109 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6110
6111 case 9: /* xop opcode map 9. */
6112 IEMOP_BITCH_ABOUT_STUB();
6113 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6114
6115 case 10: /* xop opcode map 10. */
6116 IEMOP_BITCH_ABOUT_STUB();
6117 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6118
6119 default:
6120 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6121 IEMOP_RAISE_INVALID_OPCODE_RET();
6122 }
6123 }
6124 else
6125 Log(("XOP: Invalid prefix mix!\n"));
6126 }
6127 else
6128 Log(("XOP: XOP support disabled!\n"));
6129 IEMOP_RAISE_INVALID_OPCODE_RET();
6130}
6131
6132
6133/**
6134 * Common 'xchg reg,rAX' helper.
6135 */
6136FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
6137{
6138 iReg |= pVCpu->iem.s.uRexB;
6139 switch (pVCpu->iem.s.enmEffOpSize)
6140 {
6141 case IEMMODE_16BIT:
6142 IEM_MC_BEGIN(0, 2, 0, 0);
6143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6144 IEM_MC_LOCAL(uint16_t, u16Tmp1);
6145 IEM_MC_LOCAL(uint16_t, u16Tmp2);
6146 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
6147 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
6148 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
6149 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
6150 IEM_MC_ADVANCE_RIP_AND_FINISH();
6151 IEM_MC_END();
6152 break;
6153
6154 case IEMMODE_32BIT:
6155 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6157 IEM_MC_LOCAL(uint32_t, u32Tmp1);
6158 IEM_MC_LOCAL(uint32_t, u32Tmp2);
6159 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
6160 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
6161 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
6162 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
6163 IEM_MC_ADVANCE_RIP_AND_FINISH();
6164 IEM_MC_END();
6165 break;
6166
6167 case IEMMODE_64BIT:
6168 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6170 IEM_MC_LOCAL(uint64_t, u64Tmp1);
6171 IEM_MC_LOCAL(uint64_t, u64Tmp2);
6172 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
6173 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
6174 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
6175 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
6176 IEM_MC_ADVANCE_RIP_AND_FINISH();
6177 IEM_MC_END();
6178 break;
6179
6180 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6181 }
6182}
6183
6184
6185/**
6186 * @opcode 0x90
6187 */
6188FNIEMOP_DEF(iemOp_nop)
6189{
6190 /* R8/R8D and RAX/EAX can be exchanged. */
6191 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
6192 {
6193 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
6194 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
6195 }
6196
6197 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
6198 {
6199 IEMOP_MNEMONIC(pause, "pause");
6200 /* ASSUMING that we keep the IEM_F_X86_CTX_IN_GUEST, IEM_F_X86_CTX_VMX
6201 and IEM_F_X86_CTX_SVM in the TB key, we can safely do the following: */
6202 if (!IEM_IS_IN_GUEST(pVCpu))
6203 { /* probable */ }
6204#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6205 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_VMX)
6206 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmx_pause);
6207#endif
6208#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
6209 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_SVM)
6210 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_svm_pause);
6211#endif
6212 }
6213 else
6214 IEMOP_MNEMONIC(nop, "nop");
6215 /** @todo testcase: lock nop; lock pause */
6216 IEM_MC_BEGIN(0, 0, 0, 0);
6217 IEMOP_HLP_DONE_DECODING();
6218 IEM_MC_ADVANCE_RIP_AND_FINISH();
6219 IEM_MC_END();
6220}
6221
6222
6223/**
6224 * @opcode 0x91
6225 */
6226FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
6227{
6228 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
6229 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
6230}
6231
6232
6233/**
6234 * @opcode 0x92
6235 */
6236FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
6237{
6238 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
6239 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
6240}
6241
6242
6243/**
6244 * @opcode 0x93
6245 */
6246FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
6247{
6248 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
6249 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
6250}
6251
6252
6253/**
6254 * @opcode 0x94
6255 */
6256FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
6257{
6258 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
6259 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
6260}
6261
6262
6263/**
6264 * @opcode 0x95
6265 */
6266FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
6267{
6268 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
6269 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
6270}
6271
6272
6273/**
6274 * @opcode 0x96
6275 */
6276FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
6277{
6278 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
6279 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
6280}
6281
6282
6283/**
6284 * @opcode 0x97
6285 */
6286FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
6287{
6288 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
6289 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
6290}
6291
6292
6293/**
6294 * @opcode 0x98
6295 */
6296FNIEMOP_DEF(iemOp_cbw)
6297{
6298 switch (pVCpu->iem.s.enmEffOpSize)
6299 {
6300 case IEMMODE_16BIT:
6301 IEMOP_MNEMONIC(cbw, "cbw");
6302 IEM_MC_BEGIN(0, 1, 0, 0);
6303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6304 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
6305 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
6306 } IEM_MC_ELSE() {
6307 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
6308 } IEM_MC_ENDIF();
6309 IEM_MC_ADVANCE_RIP_AND_FINISH();
6310 IEM_MC_END();
6311 break;
6312
6313 case IEMMODE_32BIT:
6314 IEMOP_MNEMONIC(cwde, "cwde");
6315 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6317 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6318 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
6319 } IEM_MC_ELSE() {
6320 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
6321 } IEM_MC_ENDIF();
6322 IEM_MC_ADVANCE_RIP_AND_FINISH();
6323 IEM_MC_END();
6324 break;
6325
6326 case IEMMODE_64BIT:
6327 IEMOP_MNEMONIC(cdqe, "cdqe");
6328 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6330 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6331 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
6332 } IEM_MC_ELSE() {
6333 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
6334 } IEM_MC_ENDIF();
6335 IEM_MC_ADVANCE_RIP_AND_FINISH();
6336 IEM_MC_END();
6337 break;
6338
6339 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6340 }
6341}
6342
6343
6344/**
6345 * @opcode 0x99
6346 */
6347FNIEMOP_DEF(iemOp_cwd)
6348{
6349 switch (pVCpu->iem.s.enmEffOpSize)
6350 {
6351 case IEMMODE_16BIT:
6352 IEMOP_MNEMONIC(cwd, "cwd");
6353 IEM_MC_BEGIN(0, 1, 0, 0);
6354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6355 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6356 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
6357 } IEM_MC_ELSE() {
6358 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
6359 } IEM_MC_ENDIF();
6360 IEM_MC_ADVANCE_RIP_AND_FINISH();
6361 IEM_MC_END();
6362 break;
6363
6364 case IEMMODE_32BIT:
6365 IEMOP_MNEMONIC(cdq, "cdq");
6366 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6368 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6369 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
6370 } IEM_MC_ELSE() {
6371 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
6372 } IEM_MC_ENDIF();
6373 IEM_MC_ADVANCE_RIP_AND_FINISH();
6374 IEM_MC_END();
6375 break;
6376
6377 case IEMMODE_64BIT:
6378 IEMOP_MNEMONIC(cqo, "cqo");
6379 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6381 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
6382 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
6383 } IEM_MC_ELSE() {
6384 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
6385 } IEM_MC_ENDIF();
6386 IEM_MC_ADVANCE_RIP_AND_FINISH();
6387 IEM_MC_END();
6388 break;
6389
6390 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6391 }
6392}
6393
6394
6395/**
6396 * @opcode 0x9a
6397 */
6398FNIEMOP_DEF(iemOp_call_Ap)
6399{
6400 IEMOP_MNEMONIC(call_Ap, "call Ap");
6401 IEMOP_HLP_NO_64BIT();
6402
6403 /* Decode the far pointer address and pass it on to the far call C implementation. */
6404 uint32_t off32Seg;
6405 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
6406 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
6407 else
6408 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
6409 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
6410 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6411 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
6412 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, UINT64_MAX,
6413 iemCImpl_callf, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
6414 /** @todo make task-switches, ring-switches, ++ return non-zero status */
6415}
6416
6417
6418/** Opcode 0x9b. (aka fwait) */
6419FNIEMOP_DEF(iemOp_wait)
6420{
6421 IEMOP_MNEMONIC(wait, "wait");
6422 IEM_MC_BEGIN(0, 0, 0, 0);
6423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6424 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
6425 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6426 IEM_MC_ADVANCE_RIP_AND_FINISH();
6427 IEM_MC_END();
6428}
6429
6430
6431/**
6432 * @opcode 0x9c
6433 */
6434FNIEMOP_DEF(iemOp_pushf_Fv)
6435{
6436 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
6437 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6438 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6439 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP),
6440 iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
6441}
6442
6443
6444/**
6445 * @opcode 0x9d
6446 */
6447FNIEMOP_DEF(iemOp_popf_Fv)
6448{
6449 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
6450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6451 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6452 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE_AND_AFTER,
6453 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP),
6454 iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
6455}
6456
6457
6458/**
6459 * @opcode 0x9e
6460 */
6461FNIEMOP_DEF(iemOp_sahf)
6462{
6463 IEMOP_MNEMONIC(sahf, "sahf");
6464 if ( IEM_IS_64BIT_CODE(pVCpu)
6465 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6466 IEMOP_RAISE_INVALID_OPCODE_RET();
6467 IEM_MC_BEGIN(0, 2, 0, 0);
6468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6469 IEM_MC_LOCAL(uint32_t, u32Flags);
6470 IEM_MC_LOCAL(uint32_t, EFlags);
6471 IEM_MC_FETCH_EFLAGS(EFlags);
6472 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
6473 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6474 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
6475 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
6476 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
6477 IEM_MC_COMMIT_EFLAGS(EFlags);
6478 IEM_MC_ADVANCE_RIP_AND_FINISH();
6479 IEM_MC_END();
6480}
6481
6482
6483/**
6484 * @opcode 0x9f
6485 */
6486FNIEMOP_DEF(iemOp_lahf)
6487{
6488 IEMOP_MNEMONIC(lahf, "lahf");
6489 if ( IEM_IS_64BIT_CODE(pVCpu)
6490 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6491 IEMOP_RAISE_INVALID_OPCODE_RET();
6492 IEM_MC_BEGIN(0, 1, 0, 0);
6493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6494 IEM_MC_LOCAL(uint8_t, u8Flags);
6495 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
6496 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
6497 IEM_MC_ADVANCE_RIP_AND_FINISH();
6498 IEM_MC_END();
6499}
6500
6501
6502/**
6503 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
6504 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode.
6505 * Will return/throw on failures.
6506 * @param a_GCPtrMemOff The variable to store the offset in.
6507 */
6508#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
6509 do \
6510 { \
6511 switch (pVCpu->iem.s.enmEffAddrMode) \
6512 { \
6513 case IEMMODE_16BIT: \
6514 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
6515 break; \
6516 case IEMMODE_32BIT: \
6517 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
6518 break; \
6519 case IEMMODE_64BIT: \
6520 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
6521 break; \
6522 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
6523 } \
6524 } while (0)
6525
6526/**
6527 * @opcode 0xa0
6528 */
6529FNIEMOP_DEF(iemOp_mov_AL_Ob)
6530{
6531 /*
6532 * Get the offset.
6533 */
6534 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
6535 RTGCPTR GCPtrMemOffDecode;
6536 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6537
6538 /*
6539 * Fetch AL.
6540 */
6541 IEM_MC_BEGIN(0, 2, 0, 0);
6542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6543 IEM_MC_LOCAL(uint8_t, u8Tmp);
6544 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6545 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6546 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6547 IEM_MC_ADVANCE_RIP_AND_FINISH();
6548 IEM_MC_END();
6549}
6550
6551
6552/**
6553 * @opcode 0xa1
6554 */
6555FNIEMOP_DEF(iemOp_mov_rAX_Ov)
6556{
6557 /*
6558 * Get the offset.
6559 */
6560 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
6561 RTGCPTR GCPtrMemOffDecode;
6562 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6563
6564 /*
6565 * Fetch rAX.
6566 */
6567 switch (pVCpu->iem.s.enmEffOpSize)
6568 {
6569 case IEMMODE_16BIT:
6570 IEM_MC_BEGIN(0, 2, 0, 0);
6571 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6572 IEM_MC_LOCAL(uint16_t, u16Tmp);
6573 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6574 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6575 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
6576 IEM_MC_ADVANCE_RIP_AND_FINISH();
6577 IEM_MC_END();
6578 break;
6579
6580 case IEMMODE_32BIT:
6581 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6583 IEM_MC_LOCAL(uint32_t, u32Tmp);
6584 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6585 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6586 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
6587 IEM_MC_ADVANCE_RIP_AND_FINISH();
6588 IEM_MC_END();
6589 break;
6590
6591 case IEMMODE_64BIT:
6592 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6594 IEM_MC_LOCAL(uint64_t, u64Tmp);
6595 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6596 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6597 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
6598 IEM_MC_ADVANCE_RIP_AND_FINISH();
6599 IEM_MC_END();
6600 break;
6601
6602 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6603 }
6604}
6605
6606
6607/**
6608 * @opcode 0xa2
6609 */
6610FNIEMOP_DEF(iemOp_mov_Ob_AL)
6611{
6612 /*
6613 * Get the offset.
6614 */
6615 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
6616 RTGCPTR GCPtrMemOffDecode;
6617 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6618
6619 /*
6620 * Store AL.
6621 */
6622 IEM_MC_BEGIN(0, 2, 0, 0);
6623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6624 IEM_MC_LOCAL(uint8_t, u8Tmp);
6625 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
6626 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6627 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
6628 IEM_MC_ADVANCE_RIP_AND_FINISH();
6629 IEM_MC_END();
6630}
6631
6632
6633/**
6634 * @opcode 0xa3
6635 */
6636FNIEMOP_DEF(iemOp_mov_Ov_rAX)
6637{
6638 /*
6639 * Get the offset.
6640 */
6641 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
6642 RTGCPTR GCPtrMemOffDecode;
6643 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6644
6645 /*
6646 * Store rAX.
6647 */
6648 switch (pVCpu->iem.s.enmEffOpSize)
6649 {
6650 case IEMMODE_16BIT:
6651 IEM_MC_BEGIN(0, 2, 0, 0);
6652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6653 IEM_MC_LOCAL(uint16_t, u16Tmp);
6654 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
6655 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6656 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
6657 IEM_MC_ADVANCE_RIP_AND_FINISH();
6658 IEM_MC_END();
6659 break;
6660
6661 case IEMMODE_32BIT:
6662 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6664 IEM_MC_LOCAL(uint32_t, u32Tmp);
6665 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
6666 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6667 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
6668 IEM_MC_ADVANCE_RIP_AND_FINISH();
6669 IEM_MC_END();
6670 break;
6671
6672 case IEMMODE_64BIT:
6673 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6675 IEM_MC_LOCAL(uint64_t, u64Tmp);
6676 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
6677 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6678 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
6679 IEM_MC_ADVANCE_RIP_AND_FINISH();
6680 IEM_MC_END();
6681 break;
6682
6683 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6684 }
6685}
6686
6687/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
6688#define IEM_MOVS_CASE(ValBits, AddrBits, a_fMcFlags) \
6689 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
6690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6691 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6692 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6693 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6694 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
6695 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6696 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
6697 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6698 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6699 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6700 } IEM_MC_ELSE() { \
6701 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6702 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6703 } IEM_MC_ENDIF(); \
6704 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6705 IEM_MC_END() \
6706
6707/**
6708 * @opcode 0xa4
6709 */
6710FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
6711{
6712 /*
6713 * Use the C implementation if a repeat prefix is encountered.
6714 */
6715 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6716 {
6717 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
6718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6719 switch (pVCpu->iem.s.enmEffAddrMode)
6720 {
6721 case IEMMODE_16BIT:
6722 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6723 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6724 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6725 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6726 iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
6727 case IEMMODE_32BIT:
6728 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6729 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6730 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6731 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6732 iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
6733 case IEMMODE_64BIT:
6734 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6735 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6736 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6737 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6738 iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
6739 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6740 }
6741 }
6742
6743 /*
6744 * Sharing case implementation with movs[wdq] below.
6745 */
6746 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
6747 switch (pVCpu->iem.s.enmEffAddrMode)
6748 {
6749 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
6750 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32, IEM_MC_F_MIN_386); break;
6751 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64, IEM_MC_F_64BIT); break;
6752 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6753 }
6754}
6755
6756
6757/**
6758 * @opcode 0xa5
6759 */
6760FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
6761{
6762
6763 /*
6764 * Use the C implementation if a repeat prefix is encountered.
6765 */
6766 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6767 {
6768 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
6769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6770 switch (pVCpu->iem.s.enmEffOpSize)
6771 {
6772 case IEMMODE_16BIT:
6773 switch (pVCpu->iem.s.enmEffAddrMode)
6774 {
6775 case IEMMODE_16BIT:
6776 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6777 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6778 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6779 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6780 iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
6781 case IEMMODE_32BIT:
6782 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6783 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6784 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6785 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6786 iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
6787 case IEMMODE_64BIT:
6788 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6789 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6790 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6791 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6792 iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
6793 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6794 }
6795 break;
6796 case IEMMODE_32BIT:
6797 switch (pVCpu->iem.s.enmEffAddrMode)
6798 {
6799 case IEMMODE_16BIT:
6800 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6801 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6802 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6803 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6804 iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
6805 case IEMMODE_32BIT:
6806 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6807 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6808 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6809 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6810 iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
6811 case IEMMODE_64BIT:
6812 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6813 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6814 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6815 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6816 iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
6817 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6818 }
6819 case IEMMODE_64BIT:
6820 switch (pVCpu->iem.s.enmEffAddrMode)
6821 {
6822 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
6823 case IEMMODE_32BIT:
6824 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6825 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6826 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6827 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6828 iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
6829 case IEMMODE_64BIT:
6830 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6831 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6832 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6833 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6834 iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
6835 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6836 }
6837 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6838 }
6839 }
6840
6841 /*
6842 * Annoying double switch here.
6843 * Using ugly macro for implementing the cases, sharing it with movsb.
6844 */
6845 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
6846 switch (pVCpu->iem.s.enmEffOpSize)
6847 {
6848 case IEMMODE_16BIT:
6849 switch (pVCpu->iem.s.enmEffAddrMode)
6850 {
6851 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
6852 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32, IEM_MC_F_MIN_386); break;
6853 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64, IEM_MC_F_64BIT); break;
6854 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6855 }
6856 break;
6857
6858 case IEMMODE_32BIT:
6859 switch (pVCpu->iem.s.enmEffAddrMode)
6860 {
6861 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
6862 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32, IEM_MC_F_MIN_386); break;
6863 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64, IEM_MC_F_64BIT); break;
6864 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6865 }
6866 break;
6867
6868 case IEMMODE_64BIT:
6869 switch (pVCpu->iem.s.enmEffAddrMode)
6870 {
6871 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6872 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32, IEM_MC_F_64BIT); break;
6873 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64, IEM_MC_F_64BIT); break;
6874 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6875 }
6876 break;
6877 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6878 }
6879}
6880
6881#undef IEM_MOVS_CASE
6882
6883/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
6884#define IEM_CMPS_CASE(ValBits, AddrBits, a_fMcFlags) \
6885 IEM_MC_BEGIN(3, 3, a_fMcFlags, 0); \
6886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6887 \
6888 IEM_MC_LOCAL(RTGCPTR, uAddr1); \
6889 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr1, X86_GREG_xSI); \
6890 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
6891 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr1); \
6892 \
6893 IEM_MC_LOCAL(RTGCPTR, uAddr2); \
6894 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr2, X86_GREG_xDI); \
6895 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
6896 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr2); \
6897 \
6898 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
6899 IEM_MC_REF_EFLAGS(pEFlags); \
6900 IEM_MC_ARG_LOCAL_REF(uint##ValBits##_t *, puValue1, uValue1, 0); \
6901 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
6902 \
6903 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6904 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6905 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6906 } IEM_MC_ELSE() { \
6907 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6908 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6909 } IEM_MC_ENDIF(); \
6910 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6911 IEM_MC_END() \
6912
6913/**
6914 * @opcode 0xa6
6915 */
6916FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
6917{
6918
6919 /*
6920 * Use the C implementation if a repeat prefix is encountered.
6921 */
6922 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6923 {
6924 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
6925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6926 switch (pVCpu->iem.s.enmEffAddrMode)
6927 {
6928 case IEMMODE_16BIT:
6929 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6930 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6931 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6932 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6933 iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6934 case IEMMODE_32BIT:
6935 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6936 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6937 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6938 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6939 iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6940 case IEMMODE_64BIT:
6941 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6942 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6943 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6944 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6945 iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6946 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6947 }
6948 }
6949 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6950 {
6951 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
6952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6953 switch (pVCpu->iem.s.enmEffAddrMode)
6954 {
6955 case IEMMODE_16BIT:
6956 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6957 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6958 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6959 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6960 iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6961 case IEMMODE_32BIT:
6962 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6963 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6964 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6965 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6966 iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6967 case IEMMODE_64BIT:
6968 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6969 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6970 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6971 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6972 iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6973 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6974 }
6975 }
6976
6977 /*
6978 * Sharing case implementation with cmps[wdq] below.
6979 */
6980 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
6981 switch (pVCpu->iem.s.enmEffAddrMode)
6982 {
6983 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
6984 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32, IEM_MC_F_MIN_386); break;
6985 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64, IEM_MC_F_64BIT); break;
6986 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6987 }
6988}
6989
6990
6991/**
6992 * @opcode 0xa7
6993 */
6994FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
6995{
6996 /*
6997 * Use the C implementation if a repeat prefix is encountered.
6998 */
6999 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7000 {
7001 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
7002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7003 switch (pVCpu->iem.s.enmEffOpSize)
7004 {
7005 case IEMMODE_16BIT:
7006 switch (pVCpu->iem.s.enmEffAddrMode)
7007 {
7008 case IEMMODE_16BIT:
7009 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7010 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7011 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7012 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7013 iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
7014 case IEMMODE_32BIT:
7015 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7016 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7017 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7018 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7019 iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
7020 case IEMMODE_64BIT:
7021 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7022 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7023 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7024 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7025 iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
7026 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7027 }
7028 break;
7029 case IEMMODE_32BIT:
7030 switch (pVCpu->iem.s.enmEffAddrMode)
7031 {
7032 case IEMMODE_16BIT:
7033 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7034 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7035 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7036 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7037 iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
7038 case IEMMODE_32BIT:
7039 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7040 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7041 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7042 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7043 iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
7044 case IEMMODE_64BIT:
7045 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7046 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7047 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7048 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7049 iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
7050 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7051 }
7052 case IEMMODE_64BIT:
7053 switch (pVCpu->iem.s.enmEffAddrMode)
7054 {
7055 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
7056 case IEMMODE_32BIT:
7057 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7058 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7059 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7060 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7061 iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
7062 case IEMMODE_64BIT:
7063 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7064 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7065 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7066 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7067 iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
7068 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7069 }
7070 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7071 }
7072 }
7073
7074 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7075 {
7076 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
7077 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7078 switch (pVCpu->iem.s.enmEffOpSize)
7079 {
7080 case IEMMODE_16BIT:
7081 switch (pVCpu->iem.s.enmEffAddrMode)
7082 {
7083 case IEMMODE_16BIT:
7084 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7085 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7086 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7087 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7088 iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
7089 case IEMMODE_32BIT:
7090 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7091 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7092 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7093 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7094 iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
7095 case IEMMODE_64BIT:
7096 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7097 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7098 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7099 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7100 iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
7101 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7102 }
7103 break;
7104 case IEMMODE_32BIT:
7105 switch (pVCpu->iem.s.enmEffAddrMode)
7106 {
7107 case IEMMODE_16BIT:
7108 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7109 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7110 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7111 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7112 iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
7113 case IEMMODE_32BIT:
7114 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7115 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7116 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7117 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7118 iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
7119 case IEMMODE_64BIT:
7120 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7121 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7122 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7123 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7124 iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
7125 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7126 }
7127 case IEMMODE_64BIT:
7128 switch (pVCpu->iem.s.enmEffAddrMode)
7129 {
7130 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
7131 case IEMMODE_32BIT:
7132 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7133 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7134 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7135 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7136 iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
7137 case IEMMODE_64BIT:
7138 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7139 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7140 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7141 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7142 iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
7143 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7144 }
7145 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7146 }
7147 }
7148
7149 /*
7150 * Annoying double switch here.
7151 * Using ugly macro for implementing the cases, sharing it with cmpsb.
7152 */
7153 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
7154 switch (pVCpu->iem.s.enmEffOpSize)
7155 {
7156 case IEMMODE_16BIT:
7157 switch (pVCpu->iem.s.enmEffAddrMode)
7158 {
7159 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7160 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7161 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64, IEM_MC_F_64BIT); break;
7162 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7163 }
7164 break;
7165
7166 case IEMMODE_32BIT:
7167 switch (pVCpu->iem.s.enmEffAddrMode)
7168 {
7169 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7170 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7171 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64, IEM_MC_F_64BIT); break;
7172 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7173 }
7174 break;
7175
7176 case IEMMODE_64BIT:
7177 switch (pVCpu->iem.s.enmEffAddrMode)
7178 {
7179 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7180 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32, IEM_MC_F_MIN_386); break;
7181 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64, IEM_MC_F_64BIT); break;
7182 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7183 }
7184 break;
7185 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7186 }
7187}
7188
7189#undef IEM_CMPS_CASE
7190
7191/**
7192 * @opcode 0xa8
7193 */
7194FNIEMOP_DEF(iemOp_test_AL_Ib)
7195{
7196 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
7197 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7198 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_test_u8);
7199}
7200
7201
7202/**
7203 * @opcode 0xa9
7204 */
7205FNIEMOP_DEF(iemOp_test_eAX_Iz)
7206{
7207 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
7208 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7209 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, 0);
7210}
7211
7212
7213/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
7214#define IEM_STOS_CASE(ValBits, AddrBits, a_fMcFlags) \
7215 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
7216 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7217 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7218 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7219 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
7220 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7221 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
7222 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7223 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7224 } IEM_MC_ELSE() { \
7225 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7226 } IEM_MC_ENDIF(); \
7227 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7228 IEM_MC_END() \
7229
7230/**
7231 * @opcode 0xaa
7232 */
7233FNIEMOP_DEF(iemOp_stosb_Yb_AL)
7234{
7235 /*
7236 * Use the C implementation if a repeat prefix is encountered.
7237 */
7238 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7239 {
7240 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
7241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7242 switch (pVCpu->iem.s.enmEffAddrMode)
7243 {
7244 case IEMMODE_16BIT:
7245 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7246 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7247 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7248 iemCImpl_stos_al_m16);
7249 case IEMMODE_32BIT:
7250 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7251 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7252 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7253 iemCImpl_stos_al_m32);
7254 case IEMMODE_64BIT:
7255 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7256 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7257 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7258 iemCImpl_stos_al_m64);
7259 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7260 }
7261 }
7262
7263 /*
7264 * Sharing case implementation with stos[wdq] below.
7265 */
7266 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
7267 switch (pVCpu->iem.s.enmEffAddrMode)
7268 {
7269 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7270 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7271 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64, IEM_MC_F_64BIT); break;
7272 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7273 }
7274}
7275
7276
7277/**
7278 * @opcode 0xab
7279 */
7280FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
7281{
7282 /*
7283 * Use the C implementation if a repeat prefix is encountered.
7284 */
7285 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7286 {
7287 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
7288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7289 switch (pVCpu->iem.s.enmEffOpSize)
7290 {
7291 case IEMMODE_16BIT:
7292 switch (pVCpu->iem.s.enmEffAddrMode)
7293 {
7294 case IEMMODE_16BIT:
7295 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7296 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7297 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7298 iemCImpl_stos_ax_m16);
7299 case IEMMODE_32BIT:
7300 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7301 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7302 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7303 iemCImpl_stos_ax_m32);
7304 case IEMMODE_64BIT:
7305 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7306 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7307 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7308 iemCImpl_stos_ax_m64);
7309 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7310 }
7311 break;
7312 case IEMMODE_32BIT:
7313 switch (pVCpu->iem.s.enmEffAddrMode)
7314 {
7315 case IEMMODE_16BIT:
7316 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7317 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7318 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7319 iemCImpl_stos_eax_m16);
7320 case IEMMODE_32BIT:
7321 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7322 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7323 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7324 iemCImpl_stos_eax_m32);
7325 case IEMMODE_64BIT:
7326 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7327 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7328 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7329 iemCImpl_stos_eax_m64);
7330 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7331 }
7332 case IEMMODE_64BIT:
7333 switch (pVCpu->iem.s.enmEffAddrMode)
7334 {
7335 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
7336 case IEMMODE_32BIT:
7337 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7338 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7339 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7340 iemCImpl_stos_rax_m32);
7341 case IEMMODE_64BIT:
7342 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7343 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7344 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7345 iemCImpl_stos_rax_m64);
7346 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7347 }
7348 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7349 }
7350 }
7351
7352 /*
7353 * Annoying double switch here.
7354 * Using ugly macro for implementing the cases, sharing it with stosb.
7355 */
7356 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
7357 switch (pVCpu->iem.s.enmEffOpSize)
7358 {
7359 case IEMMODE_16BIT:
7360 switch (pVCpu->iem.s.enmEffAddrMode)
7361 {
7362 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7363 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7364 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64, IEM_MC_F_64BIT); break;
7365 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7366 }
7367 break;
7368
7369 case IEMMODE_32BIT:
7370 switch (pVCpu->iem.s.enmEffAddrMode)
7371 {
7372 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7373 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7374 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64, IEM_MC_F_64BIT); break;
7375 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7376 }
7377 break;
7378
7379 case IEMMODE_64BIT:
7380 switch (pVCpu->iem.s.enmEffAddrMode)
7381 {
7382 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7383 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32, IEM_MC_F_64BIT); break;
7384 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64, IEM_MC_F_64BIT); break;
7385 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7386 }
7387 break;
7388 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7389 }
7390}
7391
7392#undef IEM_STOS_CASE
7393
7394/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
7395#define IEM_LODS_CASE(ValBits, AddrBits, a_fMcFlags) \
7396 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
7397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7398 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7399 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7400 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
7401 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
7402 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
7403 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7404 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7405 } IEM_MC_ELSE() { \
7406 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7407 } IEM_MC_ENDIF(); \
7408 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7409 IEM_MC_END() \
7410
7411/**
7412 * @opcode 0xac
7413 */
7414FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
7415{
7416 /*
7417 * Use the C implementation if a repeat prefix is encountered.
7418 */
7419 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7420 {
7421 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
7422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7423 switch (pVCpu->iem.s.enmEffAddrMode)
7424 {
7425 case IEMMODE_16BIT:
7426 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7427 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7428 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7429 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7430 iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
7431 case IEMMODE_32BIT:
7432 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7433 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7434 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7435 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7436 iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
7437 case IEMMODE_64BIT:
7438 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7439 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7440 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7441 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7442 iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
7443 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7444 }
7445 }
7446
7447 /*
7448 * Sharing case implementation with stos[wdq] below.
7449 */
7450 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
7451 switch (pVCpu->iem.s.enmEffAddrMode)
7452 {
7453 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7454 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7455 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64, IEM_MC_F_64BIT); break;
7456 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7457 }
7458}
7459
7460
7461/**
7462 * @opcode 0xad
7463 */
7464FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
7465{
7466 /*
7467 * Use the C implementation if a repeat prefix is encountered.
7468 */
7469 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7470 {
7471 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
7472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7473 switch (pVCpu->iem.s.enmEffOpSize)
7474 {
7475 case IEMMODE_16BIT:
7476 switch (pVCpu->iem.s.enmEffAddrMode)
7477 {
7478 case IEMMODE_16BIT:
7479 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7480 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7481 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7482 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7483 iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
7484 case IEMMODE_32BIT:
7485 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7486 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7487 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7488 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7489 iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
7490 case IEMMODE_64BIT:
7491 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7492 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7493 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7494 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7495 iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
7496 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7497 }
7498 break;
7499 case IEMMODE_32BIT:
7500 switch (pVCpu->iem.s.enmEffAddrMode)
7501 {
7502 case IEMMODE_16BIT:
7503 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7504 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7505 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7506 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7507 iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
7508 case IEMMODE_32BIT:
7509 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7510 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7511 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7512 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7513 iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
7514 case IEMMODE_64BIT:
7515 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7516 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7517 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7518 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7519 iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
7520 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7521 }
7522 case IEMMODE_64BIT:
7523 switch (pVCpu->iem.s.enmEffAddrMode)
7524 {
7525 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
7526 case IEMMODE_32BIT:
7527 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7528 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7529 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7530 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7531 iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
7532 case IEMMODE_64BIT:
7533 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7534 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7535 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7536 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7537 iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
7538 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7539 }
7540 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7541 }
7542 }
7543
7544 /*
7545 * Annoying double switch here.
7546 * Using ugly macro for implementing the cases, sharing it with lodsb.
7547 */
7548 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
7549 switch (pVCpu->iem.s.enmEffOpSize)
7550 {
7551 case IEMMODE_16BIT:
7552 switch (pVCpu->iem.s.enmEffAddrMode)
7553 {
7554 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7555 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7556 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64, IEM_MC_F_64BIT); break;
7557 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7558 }
7559 break;
7560
7561 case IEMMODE_32BIT:
7562 switch (pVCpu->iem.s.enmEffAddrMode)
7563 {
7564 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7565 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7566 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64, IEM_MC_F_64BIT); break;
7567 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7568 }
7569 break;
7570
7571 case IEMMODE_64BIT:
7572 switch (pVCpu->iem.s.enmEffAddrMode)
7573 {
7574 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7575 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32, IEM_MC_F_64BIT); break;
7576 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64, IEM_MC_F_64BIT); break;
7577 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7578 }
7579 break;
7580 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7581 }
7582}
7583
7584#undef IEM_LODS_CASE
7585
7586/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
7587#define IEM_SCAS_CASE(ValBits, AddrBits, a_fMcFlags) \
7588 IEM_MC_BEGIN(3, 2, a_fMcFlags, 0); \
7589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7590 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
7591 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
7592 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
7593 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7594 \
7595 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7596 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
7597 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
7598 IEM_MC_REF_EFLAGS(pEFlags); \
7599 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
7600 \
7601 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7602 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7603 } IEM_MC_ELSE() { \
7604 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7605 } IEM_MC_ENDIF(); \
7606 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7607 IEM_MC_END();
7608
7609/**
7610 * @opcode 0xae
7611 */
7612FNIEMOP_DEF(iemOp_scasb_AL_Xb)
7613{
7614 /*
7615 * Use the C implementation if a repeat prefix is encountered.
7616 */
7617 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7618 {
7619 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
7620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7621 switch (pVCpu->iem.s.enmEffAddrMode)
7622 {
7623 case IEMMODE_16BIT:
7624 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7625 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7626 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7627 iemCImpl_repe_scas_al_m16);
7628 case IEMMODE_32BIT:
7629 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7630 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7631 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7632 iemCImpl_repe_scas_al_m32);
7633 case IEMMODE_64BIT:
7634 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7635 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7636 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7637 iemCImpl_repe_scas_al_m64);
7638 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7639 }
7640 }
7641 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7642 {
7643 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
7644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7645 switch (pVCpu->iem.s.enmEffAddrMode)
7646 {
7647 case IEMMODE_16BIT:
7648 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7649 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7650 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7651 iemCImpl_repne_scas_al_m16);
7652 case IEMMODE_32BIT:
7653 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7654 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7655 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7656 iemCImpl_repne_scas_al_m32);
7657 case IEMMODE_64BIT:
7658 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7659 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7660 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7661 iemCImpl_repne_scas_al_m64);
7662 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7663 }
7664 }
7665
7666 /*
7667 * Sharing case implementation with stos[wdq] below.
7668 */
7669 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
7670 switch (pVCpu->iem.s.enmEffAddrMode)
7671 {
7672 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7673 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7674 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64, IEM_MC_F_64BIT); break;
7675 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7676 }
7677}
7678
7679
7680/**
7681 * @opcode 0xaf
7682 */
7683FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
7684{
7685 /*
7686 * Use the C implementation if a repeat prefix is encountered.
7687 */
7688 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7689 {
7690 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
7691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7692 switch (pVCpu->iem.s.enmEffOpSize)
7693 {
7694 case IEMMODE_16BIT:
7695 switch (pVCpu->iem.s.enmEffAddrMode)
7696 {
7697 case IEMMODE_16BIT:
7698 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7699 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7700 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7701 iemCImpl_repe_scas_ax_m16);
7702 case IEMMODE_32BIT:
7703 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7704 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7705 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7706 iemCImpl_repe_scas_ax_m32);
7707 case IEMMODE_64BIT:
7708 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7709 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7710 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7711 iemCImpl_repe_scas_ax_m64);
7712 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7713 }
7714 break;
7715 case IEMMODE_32BIT:
7716 switch (pVCpu->iem.s.enmEffAddrMode)
7717 {
7718 case IEMMODE_16BIT:
7719 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7720 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7721 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7722 iemCImpl_repe_scas_eax_m16);
7723 case IEMMODE_32BIT:
7724 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7725 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7726 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7727 iemCImpl_repe_scas_eax_m32);
7728 case IEMMODE_64BIT:
7729 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7730 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7731 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7732 iemCImpl_repe_scas_eax_m64);
7733 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7734 }
7735 case IEMMODE_64BIT:
7736 switch (pVCpu->iem.s.enmEffAddrMode)
7737 {
7738 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
7739 case IEMMODE_32BIT:
7740 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7741 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7742 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7743 iemCImpl_repe_scas_rax_m32);
7744 case IEMMODE_64BIT:
7745 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7746 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7747 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7748 iemCImpl_repe_scas_rax_m64);
7749 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7750 }
7751 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7752 }
7753 }
7754 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7755 {
7756 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
7757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7758 switch (pVCpu->iem.s.enmEffOpSize)
7759 {
7760 case IEMMODE_16BIT:
7761 switch (pVCpu->iem.s.enmEffAddrMode)
7762 {
7763 case IEMMODE_16BIT:
7764 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7765 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7766 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7767 iemCImpl_repne_scas_ax_m16);
7768 case IEMMODE_32BIT:
7769 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7770 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7771 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7772 iemCImpl_repne_scas_ax_m32);
7773 case IEMMODE_64BIT:
7774 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7775 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7776 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7777 iemCImpl_repne_scas_ax_m64);
7778 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7779 }
7780 break;
7781 case IEMMODE_32BIT:
7782 switch (pVCpu->iem.s.enmEffAddrMode)
7783 {
7784 case IEMMODE_16BIT:
7785 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7786 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7787 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7788 iemCImpl_repne_scas_eax_m16);
7789 case IEMMODE_32BIT:
7790 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7791 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7792 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7793 iemCImpl_repne_scas_eax_m32);
7794 case IEMMODE_64BIT:
7795 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7796 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7797 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7798 iemCImpl_repne_scas_eax_m64);
7799 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7800 }
7801 case IEMMODE_64BIT:
7802 switch (pVCpu->iem.s.enmEffAddrMode)
7803 {
7804 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
7805 case IEMMODE_32BIT:
7806 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7807 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7808 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7809 iemCImpl_repne_scas_rax_m32);
7810 case IEMMODE_64BIT:
7811 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7812 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7813 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7814 iemCImpl_repne_scas_rax_m64);
7815 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7816 }
7817 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7818 }
7819 }
7820
7821 /*
7822 * Annoying double switch here.
7823 * Using ugly macro for implementing the cases, sharing it with scasb.
7824 */
7825 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
7826 switch (pVCpu->iem.s.enmEffOpSize)
7827 {
7828 case IEMMODE_16BIT:
7829 switch (pVCpu->iem.s.enmEffAddrMode)
7830 {
7831 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7832 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7833 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64, IEM_MC_F_64BIT); break;
7834 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7835 }
7836 break;
7837
7838 case IEMMODE_32BIT:
7839 switch (pVCpu->iem.s.enmEffAddrMode)
7840 {
7841 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7842 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7843 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64, IEM_MC_F_64BIT); break;
7844 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7845 }
7846 break;
7847
7848 case IEMMODE_64BIT:
7849 switch (pVCpu->iem.s.enmEffAddrMode)
7850 {
7851 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7852 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32, IEM_MC_F_64BIT); break;
7853 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64, IEM_MC_F_64BIT); break;
7854 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7855 }
7856 break;
7857 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7858 }
7859}
7860
7861#undef IEM_SCAS_CASE
7862
7863/**
7864 * Common 'mov r8, imm8' helper.
7865 */
7866FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iFixedReg)
7867{
7868 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7869 IEM_MC_BEGIN(0, 0, 0, 0);
7870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7871 IEM_MC_STORE_GREG_U8_CONST(iFixedReg, u8Imm);
7872 IEM_MC_ADVANCE_RIP_AND_FINISH();
7873 IEM_MC_END();
7874}
7875
7876
7877/**
7878 * @opcode 0xb0
7879 */
7880FNIEMOP_DEF(iemOp_mov_AL_Ib)
7881{
7882 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
7883 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7884}
7885
7886
7887/**
7888 * @opcode 0xb1
7889 */
7890FNIEMOP_DEF(iemOp_CL_Ib)
7891{
7892 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
7893 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7894}
7895
7896
7897/**
7898 * @opcode 0xb2
7899 */
7900FNIEMOP_DEF(iemOp_DL_Ib)
7901{
7902 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
7903 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7904}
7905
7906
7907/**
7908 * @opcode 0xb3
7909 */
7910FNIEMOP_DEF(iemOp_BL_Ib)
7911{
7912 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
7913 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7914}
7915
7916
7917/**
7918 * @opcode 0xb4
7919 */
7920FNIEMOP_DEF(iemOp_mov_AH_Ib)
7921{
7922 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
7923 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7924}
7925
7926
7927/**
7928 * @opcode 0xb5
7929 */
7930FNIEMOP_DEF(iemOp_CH_Ib)
7931{
7932 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
7933 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7934}
7935
7936
7937/**
7938 * @opcode 0xb6
7939 */
7940FNIEMOP_DEF(iemOp_DH_Ib)
7941{
7942 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
7943 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7944}
7945
7946
7947/**
7948 * @opcode 0xb7
7949 */
7950FNIEMOP_DEF(iemOp_BH_Ib)
7951{
7952 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
7953 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7954}
7955
7956
7957/**
7958 * Common 'mov regX,immX' helper.
7959 */
7960FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iFixedReg)
7961{
7962 switch (pVCpu->iem.s.enmEffOpSize)
7963 {
7964 case IEMMODE_16BIT:
7965 IEM_MC_BEGIN(0, 0, 0, 0);
7966 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7968 IEM_MC_STORE_GREG_U16_CONST(iFixedReg, u16Imm);
7969 IEM_MC_ADVANCE_RIP_AND_FINISH();
7970 IEM_MC_END();
7971 break;
7972
7973 case IEMMODE_32BIT:
7974 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7975 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
7976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7977 IEM_MC_STORE_GREG_U32_CONST(iFixedReg, u32Imm);
7978 IEM_MC_ADVANCE_RIP_AND_FINISH();
7979 IEM_MC_END();
7980 break;
7981
7982 case IEMMODE_64BIT:
7983 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
7984 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
7985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7986 IEM_MC_STORE_GREG_U64_CONST(iFixedReg, u64Imm);
7987 IEM_MC_ADVANCE_RIP_AND_FINISH();
7988 IEM_MC_END();
7989 break;
7990 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7991 }
7992}
7993
7994
7995/**
7996 * @opcode 0xb8
7997 */
7998FNIEMOP_DEF(iemOp_eAX_Iv)
7999{
8000 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
8001 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8002}
8003
8004
8005/**
8006 * @opcode 0xb9
8007 */
8008FNIEMOP_DEF(iemOp_eCX_Iv)
8009{
8010 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
8011 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8012}
8013
8014
8015/**
8016 * @opcode 0xba
8017 */
8018FNIEMOP_DEF(iemOp_eDX_Iv)
8019{
8020 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
8021 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8022}
8023
8024
8025/**
8026 * @opcode 0xbb
8027 */
8028FNIEMOP_DEF(iemOp_eBX_Iv)
8029{
8030 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
8031 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8032}
8033
8034
8035/**
8036 * @opcode 0xbc
8037 */
8038FNIEMOP_DEF(iemOp_eSP_Iv)
8039{
8040 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
8041 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8042}
8043
8044
8045/**
8046 * @opcode 0xbd
8047 */
8048FNIEMOP_DEF(iemOp_eBP_Iv)
8049{
8050 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
8051 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8052}
8053
8054
8055/**
8056 * @opcode 0xbe
8057 */
8058FNIEMOP_DEF(iemOp_eSI_Iv)
8059{
8060 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
8061 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8062}
8063
8064
8065/**
8066 * @opcode 0xbf
8067 */
8068FNIEMOP_DEF(iemOp_eDI_Iv)
8069{
8070 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
8071 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8072}
8073
8074
8075/**
8076 * @opcode 0xc0
8077 */
8078FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
8079{
8080 IEMOP_HLP_MIN_186();
8081 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8082 PCIEMOPSHIFTSIZES pImpl;
8083 switch (IEM_GET_MODRM_REG_8(bRm))
8084 {
8085 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
8086 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
8087 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
8088 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
8089 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
8090 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
8091 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
8092 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8093 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8094 }
8095 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8096
8097 if (IEM_IS_MODRM_REG_MODE(bRm))
8098 {
8099 /* register */
8100 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8101 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0);
8102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8103 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8104 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8105 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8106 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8107 IEM_MC_REF_EFLAGS(pEFlags);
8108 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8109 IEM_MC_ADVANCE_RIP_AND_FINISH();
8110 IEM_MC_END();
8111 }
8112 else
8113 {
8114 /* memory */
8115 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_186, 0);
8116 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8117 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8118
8119 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8121
8122 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8123 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8124 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8125
8126 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8127 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8128 IEM_MC_FETCH_EFLAGS(EFlags);
8129 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8130
8131 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
8132 IEM_MC_COMMIT_EFLAGS(EFlags);
8133 IEM_MC_ADVANCE_RIP_AND_FINISH();
8134 IEM_MC_END();
8135 }
8136}
8137
8138
8139/**
8140 * @opcode 0xc1
8141 */
8142FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
8143{
8144 IEMOP_HLP_MIN_186();
8145 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8146 PCIEMOPSHIFTSIZES pImpl;
8147 switch (IEM_GET_MODRM_REG_8(bRm))
8148 {
8149 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
8150 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
8151 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
8152 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
8153 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
8154 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
8155 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
8156 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8157 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8158 }
8159 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8160
8161 if (IEM_IS_MODRM_REG_MODE(bRm))
8162 {
8163 /* register */
8164 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8165 switch (pVCpu->iem.s.enmEffOpSize)
8166 {
8167 case IEMMODE_16BIT:
8168 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0);
8169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8170 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8171 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8172 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8173 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8174 IEM_MC_REF_EFLAGS(pEFlags);
8175 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8176 IEM_MC_ADVANCE_RIP_AND_FINISH();
8177 IEM_MC_END();
8178 break;
8179
8180 case IEMMODE_32BIT:
8181 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
8182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8183 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8184 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8185 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8186 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8187 IEM_MC_REF_EFLAGS(pEFlags);
8188 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8189 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
8190 IEM_MC_ADVANCE_RIP_AND_FINISH();
8191 IEM_MC_END();
8192 break;
8193
8194 case IEMMODE_64BIT:
8195 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
8196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8197 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8198 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8199 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8200 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8201 IEM_MC_REF_EFLAGS(pEFlags);
8202 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8203 IEM_MC_ADVANCE_RIP_AND_FINISH();
8204 IEM_MC_END();
8205 break;
8206
8207 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8208 }
8209 }
8210 else
8211 {
8212 /* memory */
8213 switch (pVCpu->iem.s.enmEffOpSize)
8214 {
8215 case IEMMODE_16BIT:
8216 IEM_MC_BEGIN(3, 3, 0, 0);
8217 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8218 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8219
8220 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8222
8223 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8224 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8225 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8226
8227 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8228 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8229 IEM_MC_FETCH_EFLAGS(EFlags);
8230 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8231
8232 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
8233 IEM_MC_COMMIT_EFLAGS(EFlags);
8234 IEM_MC_ADVANCE_RIP_AND_FINISH();
8235 IEM_MC_END();
8236 break;
8237
8238 case IEMMODE_32BIT:
8239 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
8240 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8241 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8242
8243 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8245
8246 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8247 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8248 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8249
8250 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8251 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8252 IEM_MC_FETCH_EFLAGS(EFlags);
8253 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8254
8255 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
8256 IEM_MC_COMMIT_EFLAGS(EFlags);
8257 IEM_MC_ADVANCE_RIP_AND_FINISH();
8258 IEM_MC_END();
8259 break;
8260
8261 case IEMMODE_64BIT:
8262 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
8263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8265
8266 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8267 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8268
8269 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8270 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8271 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8272
8273 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8274 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8275 IEM_MC_FETCH_EFLAGS(EFlags);
8276 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8277
8278 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
8279 IEM_MC_COMMIT_EFLAGS(EFlags);
8280 IEM_MC_ADVANCE_RIP_AND_FINISH();
8281 IEM_MC_END();
8282 break;
8283
8284 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8285 }
8286 }
8287}
8288
8289
8290/**
8291 * @opcode 0xc2
8292 */
8293FNIEMOP_DEF(iemOp_retn_Iw)
8294{
8295 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
8296 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8297 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8299 switch (pVCpu->iem.s.enmEffOpSize)
8300 {
8301 case IEMMODE_16BIT:
8302 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_retn_iw_16, u16Imm);
8303 case IEMMODE_32BIT:
8304 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_retn_iw_32, u16Imm);
8305 case IEMMODE_64BIT:
8306 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_retn_iw_64, u16Imm);
8307 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8308 }
8309}
8310
8311
8312/**
8313 * @opcode 0xc3
8314 */
8315FNIEMOP_DEF(iemOp_retn)
8316{
8317 IEMOP_MNEMONIC(retn, "retn");
8318 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8320 switch (pVCpu->iem.s.enmEffOpSize)
8321 {
8322 case IEMMODE_16BIT:
8323 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_retn_16);
8324 case IEMMODE_32BIT:
8325 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_retn_32);
8326 case IEMMODE_64BIT:
8327 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_retn_64);
8328 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8329 }
8330}
8331
8332
8333/**
8334 * @opcode 0xc4
8335 */
8336FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
8337{
8338 /* The LDS instruction is invalid 64-bit mode. In legacy and
8339 compatability mode it is invalid with MOD=3.
8340 The use as a VEX prefix is made possible by assigning the inverted
8341 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
8342 outside of 64-bit mode. VEX is not available in real or v86 mode. */
8343 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8344 if ( IEM_IS_64BIT_CODE(pVCpu)
8345 || IEM_IS_MODRM_REG_MODE(bRm) )
8346 {
8347 IEMOP_MNEMONIC(vex3_prefix, "vex3");
8348 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
8349 {
8350 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
8351 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
8352 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
8353 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
8354 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
8355 if ((bVex2 & 0x80 /* VEX.W */) && IEM_IS_64BIT_CODE(pVCpu))
8356 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
8357 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
8358 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
8359 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
8360 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
8361 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
8362 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
8363
8364 switch (bRm & 0x1f)
8365 {
8366 case 1: /* 0x0f lead opcode byte. */
8367#ifdef IEM_WITH_VEX
8368 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8369#else
8370 IEMOP_BITCH_ABOUT_STUB();
8371 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8372#endif
8373
8374 case 2: /* 0x0f 0x38 lead opcode bytes. */
8375#ifdef IEM_WITH_VEX
8376 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8377#else
8378 IEMOP_BITCH_ABOUT_STUB();
8379 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8380#endif
8381
8382 case 3: /* 0x0f 0x3a lead opcode bytes. */
8383#ifdef IEM_WITH_VEX
8384 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8385#else
8386 IEMOP_BITCH_ABOUT_STUB();
8387 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8388#endif
8389
8390 default:
8391 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
8392 IEMOP_RAISE_INVALID_OPCODE_RET();
8393 }
8394 }
8395 Log(("VEX3: VEX support disabled!\n"));
8396 IEMOP_RAISE_INVALID_OPCODE_RET();
8397 }
8398
8399 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
8400 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
8401}
8402
8403
8404/**
8405 * @opcode 0xc5
8406 */
8407FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
8408{
8409 /* The LES instruction is invalid 64-bit mode. In legacy and
8410 compatability mode it is invalid with MOD=3.
8411 The use as a VEX prefix is made possible by assigning the inverted
8412 REX.R to the top MOD bit, and the top bit in the inverted register
8413 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
8414 to accessing registers 0..7 in this VEX form. */
8415 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8416 if ( IEM_IS_64BIT_CODE(pVCpu)
8417 || IEM_IS_MODRM_REG_MODE(bRm))
8418 {
8419 IEMOP_MNEMONIC(vex2_prefix, "vex2");
8420 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
8421 {
8422 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
8423 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
8424 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
8425 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
8426 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
8427 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
8428 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
8429 pVCpu->iem.s.idxPrefix = bRm & 0x3;
8430
8431#ifdef IEM_WITH_VEX
8432 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8433#else
8434 IEMOP_BITCH_ABOUT_STUB();
8435 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8436#endif
8437 }
8438
8439 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
8440 Log(("VEX2: VEX support disabled!\n"));
8441 IEMOP_RAISE_INVALID_OPCODE_RET();
8442 }
8443
8444 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
8445 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
8446}
8447
8448
8449/**
8450 * @opcode 0xc6
8451 */
8452FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
8453{
8454 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8455 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
8456 IEMOP_RAISE_INVALID_OPCODE_RET();
8457 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
8458
8459 if (IEM_IS_MODRM_REG_MODE(bRm))
8460 {
8461 /* register access */
8462 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8463 IEM_MC_BEGIN(0, 0, 0, 0);
8464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8465 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u8Imm);
8466 IEM_MC_ADVANCE_RIP_AND_FINISH();
8467 IEM_MC_END();
8468 }
8469 else
8470 {
8471 /* memory access. */
8472 IEM_MC_BEGIN(0, 1, 0, 0);
8473 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8474 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8475 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8477 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
8478 IEM_MC_ADVANCE_RIP_AND_FINISH();
8479 IEM_MC_END();
8480 }
8481}
8482
8483
8484/**
8485 * @opcode 0xc7
8486 */
8487FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
8488{
8489 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8490 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Iz in this group. */
8491 IEMOP_RAISE_INVALID_OPCODE_RET();
8492 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
8493
8494 if (IEM_IS_MODRM_REG_MODE(bRm))
8495 {
8496 /* register access */
8497 switch (pVCpu->iem.s.enmEffOpSize)
8498 {
8499 case IEMMODE_16BIT:
8500 IEM_MC_BEGIN(0, 0, 0, 0);
8501 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8503 IEM_MC_STORE_GREG_U16_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u16Imm);
8504 IEM_MC_ADVANCE_RIP_AND_FINISH();
8505 IEM_MC_END();
8506 break;
8507
8508 case IEMMODE_32BIT:
8509 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8510 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8512 IEM_MC_STORE_GREG_U32_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u32Imm);
8513 IEM_MC_ADVANCE_RIP_AND_FINISH();
8514 IEM_MC_END();
8515 break;
8516
8517 case IEMMODE_64BIT:
8518 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
8519 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8521 IEM_MC_STORE_GREG_U64_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u64Imm);
8522 IEM_MC_ADVANCE_RIP_AND_FINISH();
8523 IEM_MC_END();
8524 break;
8525
8526 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8527 }
8528 }
8529 else
8530 {
8531 /* memory access. */
8532 switch (pVCpu->iem.s.enmEffOpSize)
8533 {
8534 case IEMMODE_16BIT:
8535 IEM_MC_BEGIN(0, 1, 0, 0);
8536 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8537 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
8538 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8540 IEM_MC_STORE_MEM_U16_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
8541 IEM_MC_ADVANCE_RIP_AND_FINISH();
8542 IEM_MC_END();
8543 break;
8544
8545 case IEMMODE_32BIT:
8546 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8547 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8548 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8549 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8551 IEM_MC_STORE_MEM_U32_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
8552 IEM_MC_ADVANCE_RIP_AND_FINISH();
8553 IEM_MC_END();
8554 break;
8555
8556 case IEMMODE_64BIT:
8557 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
8558 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8559 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8560 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8562 IEM_MC_STORE_MEM_U64_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
8563 IEM_MC_ADVANCE_RIP_AND_FINISH();
8564 IEM_MC_END();
8565 break;
8566
8567 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8568 }
8569 }
8570}
8571
8572
8573
8574
8575/**
8576 * @opcode 0xc8
8577 */
8578FNIEMOP_DEF(iemOp_enter_Iw_Ib)
8579{
8580 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
8581 IEMOP_HLP_MIN_186();
8582 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8583 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
8584 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
8585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8586 IEM_MC_DEFER_TO_CIMPL_3_RET(0,
8587 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8588 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP),
8589 iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
8590}
8591
8592
8593/**
8594 * @opcode 0xc9
8595 */
8596FNIEMOP_DEF(iemOp_leave)
8597{
8598 IEMOP_MNEMONIC(leave, "leave");
8599 IEMOP_HLP_MIN_186();
8600 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8602 IEM_MC_DEFER_TO_CIMPL_1_RET(0,
8603 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8604 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP),
8605 iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
8606}
8607
8608
8609/**
8610 * @opcode 0xca
8611 */
8612FNIEMOP_DEF(iemOp_retf_Iw)
8613{
8614 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
8615 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8617 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
8618 | IEM_CIMPL_F_MODE,
8619 RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
8620 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
8621 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8622 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
8623 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
8624 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
8625 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8626 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
8627 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
8628 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
8629 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8630 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS),
8631 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
8632}
8633
8634
8635/**
8636 * @opcode 0xcb
8637 */
8638FNIEMOP_DEF(iemOp_retf)
8639{
8640 IEMOP_MNEMONIC(retf, "retf");
8641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8642 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
8643 | IEM_CIMPL_F_MODE,
8644 RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
8645 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
8646 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8647 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
8648 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
8649 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
8650 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8651 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
8652 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
8653 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
8654 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8655 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS),
8656 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
8657}
8658
8659
8660/**
8661 * @opcode 0xcc
8662 */
8663FNIEMOP_DEF(iemOp_int3)
8664{
8665 IEMOP_MNEMONIC(int3, "int3");
8666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8667 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8668 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
8669 iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
8670}
8671
8672
8673/**
8674 * @opcode 0xcd
8675 */
8676FNIEMOP_DEF(iemOp_int_Ib)
8677{
8678 IEMOP_MNEMONIC(int_Ib, "int Ib");
8679 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
8680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8681 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8682 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS, UINT64_MAX,
8683 iemCImpl_int, u8Int, IEMINT_INTN);
8684 /** @todo make task-switches, ring-switches, ++ return non-zero status */
8685}
8686
8687
8688/**
8689 * @opcode 0xce
8690 */
8691FNIEMOP_DEF(iemOp_into)
8692{
8693 IEMOP_MNEMONIC(into, "into");
8694 IEMOP_HLP_NO_64BIT();
8695 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8696 | IEM_CIMPL_F_BRANCH_CONDITIONAL | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
8697 UINT64_MAX,
8698 iemCImpl_int, X86_XCPT_OF, IEMINT_INTO);
8699 /** @todo make task-switches, ring-switches, ++ return non-zero status */
8700}
8701
8702
8703/**
8704 * @opcode 0xcf
8705 */
8706FNIEMOP_DEF(iemOp_iret)
8707{
8708 IEMOP_MNEMONIC(iret, "iret");
8709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8710 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8711 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE | IEM_CIMPL_F_VMEXIT,
8712 RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
8713 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
8714 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
8715 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
8716 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
8717 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
8718 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8719 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8720 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8721 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
8722 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
8723 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS),
8724 iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
8725 /* Segment registers are sanitized when returning to an outer ring, or fully
8726 reloaded when returning to v86 mode. Thus the large flush list above. */
8727}
8728
8729
8730/**
8731 * @opcode 0xd0
8732 */
8733FNIEMOP_DEF(iemOp_Grp2_Eb_1)
8734{
8735 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8736 PCIEMOPSHIFTSIZES pImpl;
8737 switch (IEM_GET_MODRM_REG_8(bRm))
8738 {
8739 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
8740 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
8741 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
8742 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
8743 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
8744 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
8745 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
8746 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8747 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
8748 }
8749 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8750
8751 if (IEM_IS_MODRM_REG_MODE(bRm))
8752 {
8753 /* register */
8754 IEM_MC_BEGIN(3, 0, 0, 0);
8755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8756 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8757 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
8758 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8759 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8760 IEM_MC_REF_EFLAGS(pEFlags);
8761 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8762 IEM_MC_ADVANCE_RIP_AND_FINISH();
8763 IEM_MC_END();
8764 }
8765 else
8766 {
8767 /* memory */
8768 IEM_MC_BEGIN(3, 3, 0, 0);
8769 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8770 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
8771 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8772 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8773 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8774
8775 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8777 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8778 IEM_MC_FETCH_EFLAGS(EFlags);
8779 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8780
8781 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
8782 IEM_MC_COMMIT_EFLAGS(EFlags);
8783 IEM_MC_ADVANCE_RIP_AND_FINISH();
8784 IEM_MC_END();
8785 }
8786}
8787
8788
8789
8790/**
8791 * @opcode 0xd1
8792 */
8793FNIEMOP_DEF(iemOp_Grp2_Ev_1)
8794{
8795 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8796 PCIEMOPSHIFTSIZES pImpl;
8797 switch (IEM_GET_MODRM_REG_8(bRm))
8798 {
8799 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
8800 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
8801 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
8802 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
8803 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
8804 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
8805 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
8806 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8807 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
8808 }
8809 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8810
8811 if (IEM_IS_MODRM_REG_MODE(bRm))
8812 {
8813 /* register */
8814 switch (pVCpu->iem.s.enmEffOpSize)
8815 {
8816 case IEMMODE_16BIT:
8817 IEM_MC_BEGIN(3, 0, 0, 0);
8818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8819 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8820 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8821 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8822 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8823 IEM_MC_REF_EFLAGS(pEFlags);
8824 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8825 IEM_MC_ADVANCE_RIP_AND_FINISH();
8826 IEM_MC_END();
8827 break;
8828
8829 case IEMMODE_32BIT:
8830 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
8831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8832 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8833 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8834 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8835 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8836 IEM_MC_REF_EFLAGS(pEFlags);
8837 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8838 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
8839 IEM_MC_ADVANCE_RIP_AND_FINISH();
8840 IEM_MC_END();
8841 break;
8842
8843 case IEMMODE_64BIT:
8844 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
8845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8846 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8847 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8848 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8849 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8850 IEM_MC_REF_EFLAGS(pEFlags);
8851 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8852 IEM_MC_ADVANCE_RIP_AND_FINISH();
8853 IEM_MC_END();
8854 break;
8855
8856 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8857 }
8858 }
8859 else
8860 {
8861 /* memory */
8862 switch (pVCpu->iem.s.enmEffOpSize)
8863 {
8864 case IEMMODE_16BIT:
8865 IEM_MC_BEGIN(3, 3, 0, 0);
8866 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8867 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8868 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8869 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8870 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8871
8872 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8874 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8875 IEM_MC_FETCH_EFLAGS(EFlags);
8876 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8877
8878 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
8879 IEM_MC_COMMIT_EFLAGS(EFlags);
8880 IEM_MC_ADVANCE_RIP_AND_FINISH();
8881 IEM_MC_END();
8882 break;
8883
8884 case IEMMODE_32BIT:
8885 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
8886 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8887 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8888 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8889 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8890 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8891
8892 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8894 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8895 IEM_MC_FETCH_EFLAGS(EFlags);
8896 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8897
8898 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
8899 IEM_MC_COMMIT_EFLAGS(EFlags);
8900 IEM_MC_ADVANCE_RIP_AND_FINISH();
8901 IEM_MC_END();
8902 break;
8903
8904 case IEMMODE_64BIT:
8905 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
8906 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8907 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8908 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8909 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8910 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8911
8912 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8914 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8915 IEM_MC_FETCH_EFLAGS(EFlags);
8916 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8917
8918 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
8919 IEM_MC_COMMIT_EFLAGS(EFlags);
8920 IEM_MC_ADVANCE_RIP_AND_FINISH();
8921 IEM_MC_END();
8922 break;
8923
8924 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8925 }
8926 }
8927}
8928
8929
8930/**
8931 * @opcode 0xd2
8932 */
8933FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
8934{
8935 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8936 PCIEMOPSHIFTSIZES pImpl;
8937 switch (IEM_GET_MODRM_REG_8(bRm))
8938 {
8939 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
8940 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
8941 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
8942 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
8943 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
8944 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
8945 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
8946 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8947 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
8948 }
8949 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8950
8951 if (IEM_IS_MODRM_REG_MODE(bRm))
8952 {
8953 /* register */
8954 IEM_MC_BEGIN(3, 0, 0, 0);
8955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8956 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8957 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8958 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8959 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8960 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8961 IEM_MC_REF_EFLAGS(pEFlags);
8962 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8963 IEM_MC_ADVANCE_RIP_AND_FINISH();
8964 IEM_MC_END();
8965 }
8966 else
8967 {
8968 /* memory */
8969 IEM_MC_BEGIN(3, 3, 0, 0);
8970 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8971 IEM_MC_ARG(uint8_t, cShiftArg, 1);
8972 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8973 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8974 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8975
8976 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8977 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8978 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
8979 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8980 IEM_MC_FETCH_EFLAGS(EFlags);
8981 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8982
8983 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
8984 IEM_MC_COMMIT_EFLAGS(EFlags);
8985 IEM_MC_ADVANCE_RIP_AND_FINISH();
8986 IEM_MC_END();
8987 }
8988}
8989
8990
8991/**
8992 * @opcode 0xd3
8993 */
8994FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
8995{
8996 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8997 PCIEMOPSHIFTSIZES pImpl;
8998 switch (IEM_GET_MODRM_REG_8(bRm))
8999 {
9000 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
9001 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
9002 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
9003 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
9004 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
9005 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
9006 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
9007 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9008 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
9009 }
9010 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
9011
9012 if (IEM_IS_MODRM_REG_MODE(bRm))
9013 {
9014 /* register */
9015 switch (pVCpu->iem.s.enmEffOpSize)
9016 {
9017 case IEMMODE_16BIT:
9018 IEM_MC_BEGIN(3, 0, 0, 0);
9019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9020 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9021 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9022 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9023 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9024 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9025 IEM_MC_REF_EFLAGS(pEFlags);
9026 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
9027 IEM_MC_ADVANCE_RIP_AND_FINISH();
9028 IEM_MC_END();
9029 break;
9030
9031 case IEMMODE_32BIT:
9032 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
9033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9034 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9035 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9036 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9037 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9038 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9039 IEM_MC_REF_EFLAGS(pEFlags);
9040 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
9041 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
9042 IEM_MC_ADVANCE_RIP_AND_FINISH();
9043 IEM_MC_END();
9044 break;
9045
9046 case IEMMODE_64BIT:
9047 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
9048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9049 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9050 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9051 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9052 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9053 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9054 IEM_MC_REF_EFLAGS(pEFlags);
9055 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
9056 IEM_MC_ADVANCE_RIP_AND_FINISH();
9057 IEM_MC_END();
9058 break;
9059
9060 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9061 }
9062 }
9063 else
9064 {
9065 /* memory */
9066 switch (pVCpu->iem.s.enmEffOpSize)
9067 {
9068 case IEMMODE_16BIT:
9069 IEM_MC_BEGIN(3, 3, 0, 0);
9070 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9071 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9072 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9073 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9074 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9075
9076 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9077 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9078 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9079 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9080 IEM_MC_FETCH_EFLAGS(EFlags);
9081 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
9082
9083 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
9084 IEM_MC_COMMIT_EFLAGS(EFlags);
9085 IEM_MC_ADVANCE_RIP_AND_FINISH();
9086 IEM_MC_END();
9087 break;
9088
9089 case IEMMODE_32BIT:
9090 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
9091 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9092 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9093 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9094 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9095 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9096
9097 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9099 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9100 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9101 IEM_MC_FETCH_EFLAGS(EFlags);
9102 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
9103
9104 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
9105 IEM_MC_COMMIT_EFLAGS(EFlags);
9106 IEM_MC_ADVANCE_RIP_AND_FINISH();
9107 IEM_MC_END();
9108 break;
9109
9110 case IEMMODE_64BIT:
9111 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
9112 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9113 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9114 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9115 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9116 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9117
9118 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9120 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9121 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9122 IEM_MC_FETCH_EFLAGS(EFlags);
9123 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
9124
9125 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
9126 IEM_MC_COMMIT_EFLAGS(EFlags);
9127 IEM_MC_ADVANCE_RIP_AND_FINISH();
9128 IEM_MC_END();
9129 break;
9130
9131 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9132 }
9133 }
9134}
9135
9136/**
9137 * @opcode 0xd4
9138 */
9139FNIEMOP_DEF(iemOp_aam_Ib)
9140{
9141 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
9142 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
9143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9144 IEMOP_HLP_NO_64BIT();
9145 if (!bImm)
9146 IEMOP_RAISE_DIVIDE_ERROR_RET();
9147 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aam, bImm);
9148}
9149
9150
9151/**
9152 * @opcode 0xd5
9153 */
9154FNIEMOP_DEF(iemOp_aad_Ib)
9155{
9156 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
9157 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
9158 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9159 IEMOP_HLP_NO_64BIT();
9160 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aad, bImm);
9161}
9162
9163
9164/**
9165 * @opcode 0xd6
9166 */
9167FNIEMOP_DEF(iemOp_salc)
9168{
9169 IEMOP_MNEMONIC(salc, "salc");
9170 IEMOP_HLP_NO_64BIT();
9171
9172 IEM_MC_BEGIN(0, 0, 0, 0);
9173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9174 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9175 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
9176 } IEM_MC_ELSE() {
9177 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
9178 } IEM_MC_ENDIF();
9179 IEM_MC_ADVANCE_RIP_AND_FINISH();
9180 IEM_MC_END();
9181}
9182
9183
9184/**
9185 * @opcode 0xd7
9186 */
9187FNIEMOP_DEF(iemOp_xlat)
9188{
9189 IEMOP_MNEMONIC(xlat, "xlat");
9190 switch (pVCpu->iem.s.enmEffAddrMode)
9191 {
9192 case IEMMODE_16BIT:
9193 IEM_MC_BEGIN(2, 0, 0, 0);
9194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9195 IEM_MC_LOCAL(uint8_t, u8Tmp);
9196 IEM_MC_LOCAL(uint16_t, u16Addr);
9197 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
9198 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
9199 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
9200 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9201 IEM_MC_ADVANCE_RIP_AND_FINISH();
9202 IEM_MC_END();
9203 break;
9204
9205 case IEMMODE_32BIT:
9206 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0);
9207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9208 IEM_MC_LOCAL(uint8_t, u8Tmp);
9209 IEM_MC_LOCAL(uint32_t, u32Addr);
9210 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
9211 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
9212 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
9213 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9214 IEM_MC_ADVANCE_RIP_AND_FINISH();
9215 IEM_MC_END();
9216 break;
9217
9218 case IEMMODE_64BIT:
9219 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
9220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9221 IEM_MC_LOCAL(uint8_t, u8Tmp);
9222 IEM_MC_LOCAL(uint64_t, u64Addr);
9223 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
9224 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
9225 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
9226 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9227 IEM_MC_ADVANCE_RIP_AND_FINISH();
9228 IEM_MC_END();
9229 break;
9230
9231 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9232 }
9233}
9234
9235
9236/**
9237 * Common worker for FPU instructions working on ST0 and STn, and storing the
9238 * result in ST0.
9239 *
9240 * @param bRm Mod R/M byte.
9241 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9242 */
9243FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
9244{
9245 IEM_MC_BEGIN(3, 1, 0, 0);
9246 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9247 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9248 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9249 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9250 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9251
9252 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9253 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9254 IEM_MC_PREPARE_FPU_USAGE();
9255 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9256 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
9257 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9258 } IEM_MC_ELSE() {
9259 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9260 } IEM_MC_ENDIF();
9261 IEM_MC_ADVANCE_RIP_AND_FINISH();
9262
9263 IEM_MC_END();
9264}
9265
9266
9267/**
9268 * Common worker for FPU instructions working on ST0 and STn, and only affecting
9269 * flags.
9270 *
9271 * @param bRm Mod R/M byte.
9272 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9273 */
9274FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
9275{
9276 IEM_MC_BEGIN(3, 1, 0, 0);
9277 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9278 IEM_MC_LOCAL(uint16_t, u16Fsw);
9279 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9280 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9281 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9282
9283 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9284 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9285 IEM_MC_PREPARE_FPU_USAGE();
9286 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9287 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
9288 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9289 } IEM_MC_ELSE() {
9290 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
9291 } IEM_MC_ENDIF();
9292 IEM_MC_ADVANCE_RIP_AND_FINISH();
9293
9294 IEM_MC_END();
9295}
9296
9297
9298/**
9299 * Common worker for FPU instructions working on ST0 and STn, only affecting
9300 * flags, and popping when done.
9301 *
9302 * @param bRm Mod R/M byte.
9303 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9304 */
9305FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
9306{
9307 IEM_MC_BEGIN(3, 1, 0, 0);
9308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9309 IEM_MC_LOCAL(uint16_t, u16Fsw);
9310 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9311 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9312 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9313
9314 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9315 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9316 IEM_MC_PREPARE_FPU_USAGE();
9317 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9318 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
9319 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9320 } IEM_MC_ELSE() {
9321 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
9322 } IEM_MC_ENDIF();
9323 IEM_MC_ADVANCE_RIP_AND_FINISH();
9324
9325 IEM_MC_END();
9326}
9327
9328
9329/** Opcode 0xd8 11/0. */
9330FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
9331{
9332 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
9333 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
9334}
9335
9336
9337/** Opcode 0xd8 11/1. */
9338FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
9339{
9340 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
9341 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
9342}
9343
9344
9345/** Opcode 0xd8 11/2. */
9346FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
9347{
9348 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
9349 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
9350}
9351
9352
9353/** Opcode 0xd8 11/3. */
9354FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
9355{
9356 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
9357 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
9358}
9359
9360
9361/** Opcode 0xd8 11/4. */
9362FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
9363{
9364 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
9365 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
9366}
9367
9368
9369/** Opcode 0xd8 11/5. */
9370FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
9371{
9372 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
9373 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
9374}
9375
9376
9377/** Opcode 0xd8 11/6. */
9378FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
9379{
9380 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
9381 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
9382}
9383
9384
9385/** Opcode 0xd8 11/7. */
9386FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
9387{
9388 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
9389 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
9390}
9391
9392
9393/**
9394 * Common worker for FPU instructions working on ST0 and an m32r, and storing
9395 * the result in ST0.
9396 *
9397 * @param bRm Mod R/M byte.
9398 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9399 */
9400FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
9401{
9402 IEM_MC_BEGIN(3, 3, 0, 0);
9403 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9404 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9405 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
9406 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9407 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9408 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
9409
9410 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9412
9413 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9414 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9415 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9416
9417 IEM_MC_PREPARE_FPU_USAGE();
9418 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9419 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
9420 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9421 } IEM_MC_ELSE() {
9422 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9423 } IEM_MC_ENDIF();
9424 IEM_MC_ADVANCE_RIP_AND_FINISH();
9425
9426 IEM_MC_END();
9427}
9428
9429
9430/** Opcode 0xd8 !11/0. */
9431FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
9432{
9433 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
9434 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
9435}
9436
9437
9438/** Opcode 0xd8 !11/1. */
9439FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
9440{
9441 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
9442 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
9443}
9444
9445
9446/** Opcode 0xd8 !11/2. */
9447FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
9448{
9449 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
9450
9451 IEM_MC_BEGIN(3, 3, 0, 0);
9452 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9453 IEM_MC_LOCAL(uint16_t, u16Fsw);
9454 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
9455 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9456 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9457 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
9458
9459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9461
9462 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9463 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9464 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9465
9466 IEM_MC_PREPARE_FPU_USAGE();
9467 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9468 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
9469 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9470 } IEM_MC_ELSE() {
9471 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9472 } IEM_MC_ENDIF();
9473 IEM_MC_ADVANCE_RIP_AND_FINISH();
9474
9475 IEM_MC_END();
9476}
9477
9478
9479/** Opcode 0xd8 !11/3. */
9480FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
9481{
9482 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
9483
9484 IEM_MC_BEGIN(3, 3, 0, 0);
9485 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9486 IEM_MC_LOCAL(uint16_t, u16Fsw);
9487 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
9488 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9489 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9490 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
9491
9492 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9494
9495 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9496 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9497 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9498
9499 IEM_MC_PREPARE_FPU_USAGE();
9500 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9501 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
9502 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9503 } IEM_MC_ELSE() {
9504 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9505 } IEM_MC_ENDIF();
9506 IEM_MC_ADVANCE_RIP_AND_FINISH();
9507
9508 IEM_MC_END();
9509}
9510
9511
9512/** Opcode 0xd8 !11/4. */
9513FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
9514{
9515 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
9516 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
9517}
9518
9519
9520/** Opcode 0xd8 !11/5. */
9521FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
9522{
9523 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
9524 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
9525}
9526
9527
9528/** Opcode 0xd8 !11/6. */
9529FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
9530{
9531 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
9532 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
9533}
9534
9535
9536/** Opcode 0xd8 !11/7. */
9537FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
9538{
9539 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
9540 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
9541}
9542
9543
9544/**
9545 * @opcode 0xd8
9546 */
9547FNIEMOP_DEF(iemOp_EscF0)
9548{
9549 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9550 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
9551
9552 if (IEM_IS_MODRM_REG_MODE(bRm))
9553 {
9554 switch (IEM_GET_MODRM_REG_8(bRm))
9555 {
9556 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
9557 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
9558 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
9559 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
9560 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
9561 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
9562 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
9563 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
9564 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9565 }
9566 }
9567 else
9568 {
9569 switch (IEM_GET_MODRM_REG_8(bRm))
9570 {
9571 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
9572 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
9573 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
9574 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
9575 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
9576 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
9577 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
9578 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
9579 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9580 }
9581 }
9582}
9583
9584
9585/** Opcode 0xd9 /0 mem32real
9586 * @sa iemOp_fld_m64r */
9587FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
9588{
9589 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
9590
9591 IEM_MC_BEGIN(2, 3, 0, 0);
9592 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9593 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9594 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
9595 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9596 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
9597
9598 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9600
9601 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9602 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9603 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9604 IEM_MC_PREPARE_FPU_USAGE();
9605 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
9606 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r32, pFpuRes, pr32Val);
9607 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9608 } IEM_MC_ELSE() {
9609 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9610 } IEM_MC_ENDIF();
9611 IEM_MC_ADVANCE_RIP_AND_FINISH();
9612
9613 IEM_MC_END();
9614}
9615
9616
9617/** Opcode 0xd9 !11/2 mem32real */
9618FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
9619{
9620 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
9621 IEM_MC_BEGIN(3, 3, 0, 0);
9622 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9623 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9624
9625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9626 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9627 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9628 IEM_MC_PREPARE_FPU_USAGE();
9629
9630 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9631 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
9632 IEM_MC_MEM_MAP_R32_WO(pr32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9633
9634 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9635 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9636 IEM_MC_LOCAL(uint16_t, u16Fsw);
9637 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
9638 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
9639 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
9640 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9641 } IEM_MC_ELSE() {
9642 IEM_MC_IF_FCW_IM() {
9643 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
9644 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
9645 } IEM_MC_ELSE() {
9646 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
9647 } IEM_MC_ENDIF();
9648 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9649 } IEM_MC_ENDIF();
9650 IEM_MC_ADVANCE_RIP_AND_FINISH();
9651
9652 IEM_MC_END();
9653}
9654
9655
9656/** Opcode 0xd9 !11/3 */
9657FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
9658{
9659 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
9660 IEM_MC_BEGIN(3, 3, 0, 0);
9661 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9662 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9663
9664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9665 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9666 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9667 IEM_MC_PREPARE_FPU_USAGE();
9668
9669 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9670 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
9671 IEM_MC_MEM_MAP_R32_WO(pr32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9672
9673 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9674 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9675 IEM_MC_LOCAL(uint16_t, u16Fsw);
9676 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
9677 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
9678 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
9679 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9680 } IEM_MC_ELSE() {
9681 IEM_MC_IF_FCW_IM() {
9682 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
9683 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
9684 } IEM_MC_ELSE() {
9685 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
9686 } IEM_MC_ENDIF();
9687 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9688 } IEM_MC_ENDIF();
9689 IEM_MC_ADVANCE_RIP_AND_FINISH();
9690
9691 IEM_MC_END();
9692}
9693
9694
9695/** Opcode 0xd9 !11/4 */
9696FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
9697{
9698 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
9699 IEM_MC_BEGIN(3, 0, 0, 0);
9700 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
9701 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9702
9703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9704 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9705 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9706
9707 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9708 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
9709 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
9710 IEM_MC_END();
9711}
9712
9713
9714/** Opcode 0xd9 !11/5 */
9715FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
9716{
9717 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
9718 IEM_MC_BEGIN(1, 1, 0, 0);
9719 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9720 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9721
9722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9723 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9724 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9725
9726 IEM_MC_ARG(uint16_t, u16Fsw, 0);
9727 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9728
9729 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_FPU, 0, iemCImpl_fldcw, u16Fsw);
9730 IEM_MC_END();
9731}
9732
9733
9734/** Opcode 0xd9 !11/6 */
9735FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
9736{
9737 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
9738 IEM_MC_BEGIN(3, 0, 0, 0);
9739 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
9740 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9741
9742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9743 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9744 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9745
9746 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9747 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
9748 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
9749 IEM_MC_END();
9750}
9751
9752
9753/** Opcode 0xd9 !11/7 */
9754FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
9755{
9756 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
9757 IEM_MC_BEGIN(2, 0, 0, 0);
9758 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9759 IEM_MC_LOCAL(uint16_t, u16Fcw);
9760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9762 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9763 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9764 IEM_MC_FETCH_FCW(u16Fcw);
9765 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
9766 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
9767 IEM_MC_END();
9768}
9769
9770
9771/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
9772FNIEMOP_DEF(iemOp_fnop)
9773{
9774 IEMOP_MNEMONIC(fnop, "fnop");
9775 IEM_MC_BEGIN(0, 0, 0, 0);
9776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9777 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9778 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9779 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9780 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
9781 * intel optimizations. Investigate. */
9782 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9783 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
9784 IEM_MC_END();
9785}
9786
9787
9788/** Opcode 0xd9 11/0 stN */
9789FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
9790{
9791 IEMOP_MNEMONIC(fld_stN, "fld stN");
9792 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
9793 * indicates that it does. */
9794 IEM_MC_BEGIN(0, 2, 0, 0);
9795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9796 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9797 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9798 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9799 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9800
9801 IEM_MC_PREPARE_FPU_USAGE();
9802 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, IEM_GET_MODRM_RM_8(bRm)) {
9803 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9804 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
9805 } IEM_MC_ELSE() {
9806 IEM_MC_FPU_STACK_PUSH_UNDERFLOW(pVCpu->iem.s.uFpuOpcode);
9807 } IEM_MC_ENDIF();
9808
9809 IEM_MC_ADVANCE_RIP_AND_FINISH();
9810 IEM_MC_END();
9811}
9812
9813
9814/** Opcode 0xd9 11/3 stN */
9815FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
9816{
9817 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
9818 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
9819 * indicates that it does. */
9820 IEM_MC_BEGIN(2, 3, 0, 0);
9821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9822 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
9823 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
9824 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9825 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ IEM_GET_MODRM_RM_8(bRm), 0);
9826 IEM_MC_ARG_CONST(uint16_t, uFpuOpcode, /*=*/ pVCpu->iem.s.uFpuOpcode, 1);
9827 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9828 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9829
9830 IEM_MC_PREPARE_FPU_USAGE();
9831 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9832 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
9833 IEM_MC_STORE_FPUREG_R80_SRC_REF(IEM_GET_MODRM_RM_8(bRm), pr80Value1);
9834 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9835 } IEM_MC_ELSE() {
9836 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, 0, iemCImpl_fxch_underflow, iStReg, uFpuOpcode);
9837 } IEM_MC_ENDIF();
9838
9839 IEM_MC_ADVANCE_RIP_AND_FINISH();
9840 IEM_MC_END();
9841}
9842
9843
9844/** Opcode 0xd9 11/4, 0xdd 11/2. */
9845FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
9846{
9847 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
9848
9849 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
9850 uint8_t const iDstReg = IEM_GET_MODRM_RM_8(bRm);
9851 if (!iDstReg)
9852 {
9853 IEM_MC_BEGIN(0, 1, 0, 0);
9854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9855 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
9856 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9857 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9858
9859 IEM_MC_PREPARE_FPU_USAGE();
9860 IEM_MC_IF_FPUREG_NOT_EMPTY(0) {
9861 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9862 } IEM_MC_ELSE() {
9863 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0, pVCpu->iem.s.uFpuOpcode);
9864 } IEM_MC_ENDIF();
9865
9866 IEM_MC_ADVANCE_RIP_AND_FINISH();
9867 IEM_MC_END();
9868 }
9869 else
9870 {
9871 IEM_MC_BEGIN(0, 2, 0, 0);
9872 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9873 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9874 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9875 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9876 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9877
9878 IEM_MC_PREPARE_FPU_USAGE();
9879 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9880 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9881 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg, pVCpu->iem.s.uFpuOpcode);
9882 } IEM_MC_ELSE() {
9883 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg, pVCpu->iem.s.uFpuOpcode);
9884 } IEM_MC_ENDIF();
9885
9886 IEM_MC_ADVANCE_RIP_AND_FINISH();
9887 IEM_MC_END();
9888 }
9889}
9890
9891
9892/**
9893 * Common worker for FPU instructions working on ST0 and replaces it with the
9894 * result, i.e. unary operators.
9895 *
9896 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9897 */
9898FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
9899{
9900 IEM_MC_BEGIN(2, 1, 0, 0);
9901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9902 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9903 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9904 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9905
9906 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9907 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9908 IEM_MC_PREPARE_FPU_USAGE();
9909 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9910 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
9911 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9912 } IEM_MC_ELSE() {
9913 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9914 } IEM_MC_ENDIF();
9915 IEM_MC_ADVANCE_RIP_AND_FINISH();
9916
9917 IEM_MC_END();
9918}
9919
9920
9921/** Opcode 0xd9 0xe0. */
9922FNIEMOP_DEF(iemOp_fchs)
9923{
9924 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
9925 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
9926}
9927
9928
9929/** Opcode 0xd9 0xe1. */
9930FNIEMOP_DEF(iemOp_fabs)
9931{
9932 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
9933 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
9934}
9935
9936
9937/** Opcode 0xd9 0xe4. */
9938FNIEMOP_DEF(iemOp_ftst)
9939{
9940 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
9941 IEM_MC_BEGIN(2, 1, 0, 0);
9942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9943 IEM_MC_LOCAL(uint16_t, u16Fsw);
9944 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9945 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9946
9947 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9948 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9949 IEM_MC_PREPARE_FPU_USAGE();
9950 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9951 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_ftst_r80, pu16Fsw, pr80Value);
9952 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9953 } IEM_MC_ELSE() {
9954 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
9955 } IEM_MC_ENDIF();
9956 IEM_MC_ADVANCE_RIP_AND_FINISH();
9957
9958 IEM_MC_END();
9959}
9960
9961
9962/** Opcode 0xd9 0xe5. */
9963FNIEMOP_DEF(iemOp_fxam)
9964{
9965 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
9966 IEM_MC_BEGIN(2, 1, 0, 0);
9967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9968 IEM_MC_LOCAL(uint16_t, u16Fsw);
9969 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9970 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9971
9972 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9973 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9974 IEM_MC_PREPARE_FPU_USAGE();
9975 IEM_MC_REF_FPUREG(pr80Value, 0);
9976 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fxam_r80, pu16Fsw, pr80Value);
9977 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9978 IEM_MC_ADVANCE_RIP_AND_FINISH();
9979
9980 IEM_MC_END();
9981}
9982
9983
9984/**
9985 * Common worker for FPU instructions pushing a constant onto the FPU stack.
9986 *
9987 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9988 */
9989FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
9990{
9991 IEM_MC_BEGIN(1, 1, 0, 0);
9992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9993 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9994 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9995
9996 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9997 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9998 IEM_MC_PREPARE_FPU_USAGE();
9999 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10000 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
10001 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
10002 } IEM_MC_ELSE() {
10003 IEM_MC_FPU_STACK_PUSH_OVERFLOW(pVCpu->iem.s.uFpuOpcode);
10004 } IEM_MC_ENDIF();
10005 IEM_MC_ADVANCE_RIP_AND_FINISH();
10006
10007 IEM_MC_END();
10008}
10009
10010
10011/** Opcode 0xd9 0xe8. */
10012FNIEMOP_DEF(iemOp_fld1)
10013{
10014 IEMOP_MNEMONIC(fld1, "fld1");
10015 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
10016}
10017
10018
10019/** Opcode 0xd9 0xe9. */
10020FNIEMOP_DEF(iemOp_fldl2t)
10021{
10022 IEMOP_MNEMONIC(fldl2t, "fldl2t");
10023 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
10024}
10025
10026
10027/** Opcode 0xd9 0xea. */
10028FNIEMOP_DEF(iemOp_fldl2e)
10029{
10030 IEMOP_MNEMONIC(fldl2e, "fldl2e");
10031 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
10032}
10033
10034/** Opcode 0xd9 0xeb. */
10035FNIEMOP_DEF(iemOp_fldpi)
10036{
10037 IEMOP_MNEMONIC(fldpi, "fldpi");
10038 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
10039}
10040
10041
10042/** Opcode 0xd9 0xec. */
10043FNIEMOP_DEF(iemOp_fldlg2)
10044{
10045 IEMOP_MNEMONIC(fldlg2, "fldlg2");
10046 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
10047}
10048
10049/** Opcode 0xd9 0xed. */
10050FNIEMOP_DEF(iemOp_fldln2)
10051{
10052 IEMOP_MNEMONIC(fldln2, "fldln2");
10053 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
10054}
10055
10056
10057/** Opcode 0xd9 0xee. */
10058FNIEMOP_DEF(iemOp_fldz)
10059{
10060 IEMOP_MNEMONIC(fldz, "fldz");
10061 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
10062}
10063
10064
10065/** Opcode 0xd9 0xf0.
10066 *
10067 * The f2xm1 instruction works on values +1.0 thru -1.0, currently (the range on
10068 * 287 & 8087 was +0.5 thru 0.0 according to docs). In addition is does appear
10069 * to produce proper results for +Inf and -Inf.
10070 *
10071 * This is probably usful in the implementation pow() and similar.
10072 */
10073FNIEMOP_DEF(iemOp_f2xm1)
10074{
10075 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
10076 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
10077}
10078
10079
10080/**
10081 * Common worker for FPU instructions working on STn and ST0, storing the result
10082 * in STn, and popping the stack unless IE, DE or ZE was raised.
10083 *
10084 * @param bRm Mod R/M byte.
10085 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10086 */
10087FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
10088{
10089 IEM_MC_BEGIN(3, 1, 0, 0);
10090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10091 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10092 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10093 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10094 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10095
10096 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10097 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10098
10099 IEM_MC_PREPARE_FPU_USAGE();
10100 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
10101 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
10102 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10103 } IEM_MC_ELSE() {
10104 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10105 } IEM_MC_ENDIF();
10106 IEM_MC_ADVANCE_RIP_AND_FINISH();
10107
10108 IEM_MC_END();
10109}
10110
10111
10112/** Opcode 0xd9 0xf1. */
10113FNIEMOP_DEF(iemOp_fyl2x)
10114{
10115 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
10116 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
10117}
10118
10119
10120/**
10121 * Common worker for FPU instructions working on ST0 and having two outputs, one
10122 * replacing ST0 and one pushed onto the stack.
10123 *
10124 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10125 */
10126FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
10127{
10128 IEM_MC_BEGIN(2, 1, 0, 0);
10129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10130 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
10131 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
10132 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10133
10134 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10135 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10136 IEM_MC_PREPARE_FPU_USAGE();
10137 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10138 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
10139 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo, pVCpu->iem.s.uFpuOpcode);
10140 } IEM_MC_ELSE() {
10141 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO(pVCpu->iem.s.uFpuOpcode);
10142 } IEM_MC_ENDIF();
10143 IEM_MC_ADVANCE_RIP_AND_FINISH();
10144
10145 IEM_MC_END();
10146}
10147
10148
10149/** Opcode 0xd9 0xf2. */
10150FNIEMOP_DEF(iemOp_fptan)
10151{
10152 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
10153 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
10154}
10155
10156
10157/** Opcode 0xd9 0xf3. */
10158FNIEMOP_DEF(iemOp_fpatan)
10159{
10160 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
10161 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
10162}
10163
10164
10165/** Opcode 0xd9 0xf4. */
10166FNIEMOP_DEF(iemOp_fxtract)
10167{
10168 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
10169 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
10170}
10171
10172
10173/** Opcode 0xd9 0xf5. */
10174FNIEMOP_DEF(iemOp_fprem1)
10175{
10176 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
10177 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
10178}
10179
10180
10181/** Opcode 0xd9 0xf6. */
10182FNIEMOP_DEF(iemOp_fdecstp)
10183{
10184 IEMOP_MNEMONIC(fdecstp, "fdecstp");
10185 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
10186 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
10187 * FINCSTP and FDECSTP. */
10188 IEM_MC_BEGIN(0, 0, 0, 0);
10189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10190
10191 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10192 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10193
10194 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10195 IEM_MC_FPU_STACK_DEC_TOP();
10196 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
10197
10198 IEM_MC_ADVANCE_RIP_AND_FINISH();
10199 IEM_MC_END();
10200}
10201
10202
10203/** Opcode 0xd9 0xf7. */
10204FNIEMOP_DEF(iemOp_fincstp)
10205{
10206 IEMOP_MNEMONIC(fincstp, "fincstp");
10207 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
10208 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
10209 * FINCSTP and FDECSTP. */
10210 IEM_MC_BEGIN(0, 0, 0, 0);
10211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10212
10213 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10214 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10215
10216 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10217 IEM_MC_FPU_STACK_INC_TOP();
10218 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
10219
10220 IEM_MC_ADVANCE_RIP_AND_FINISH();
10221 IEM_MC_END();
10222}
10223
10224
10225/** Opcode 0xd9 0xf8. */
10226FNIEMOP_DEF(iemOp_fprem)
10227{
10228 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
10229 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
10230}
10231
10232
10233/** Opcode 0xd9 0xf9. */
10234FNIEMOP_DEF(iemOp_fyl2xp1)
10235{
10236 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
10237 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
10238}
10239
10240
10241/** Opcode 0xd9 0xfa. */
10242FNIEMOP_DEF(iemOp_fsqrt)
10243{
10244 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
10245 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
10246}
10247
10248
10249/** Opcode 0xd9 0xfb. */
10250FNIEMOP_DEF(iemOp_fsincos)
10251{
10252 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
10253 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
10254}
10255
10256
10257/** Opcode 0xd9 0xfc. */
10258FNIEMOP_DEF(iemOp_frndint)
10259{
10260 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
10261 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
10262}
10263
10264
10265/** Opcode 0xd9 0xfd. */
10266FNIEMOP_DEF(iemOp_fscale)
10267{
10268 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
10269 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
10270}
10271
10272
10273/** Opcode 0xd9 0xfe. */
10274FNIEMOP_DEF(iemOp_fsin)
10275{
10276 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
10277 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
10278}
10279
10280
10281/** Opcode 0xd9 0xff. */
10282FNIEMOP_DEF(iemOp_fcos)
10283{
10284 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
10285 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
10286}
10287
10288
10289/** Used by iemOp_EscF1. */
10290IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
10291{
10292 /* 0xe0 */ iemOp_fchs,
10293 /* 0xe1 */ iemOp_fabs,
10294 /* 0xe2 */ iemOp_Invalid,
10295 /* 0xe3 */ iemOp_Invalid,
10296 /* 0xe4 */ iemOp_ftst,
10297 /* 0xe5 */ iemOp_fxam,
10298 /* 0xe6 */ iemOp_Invalid,
10299 /* 0xe7 */ iemOp_Invalid,
10300 /* 0xe8 */ iemOp_fld1,
10301 /* 0xe9 */ iemOp_fldl2t,
10302 /* 0xea */ iemOp_fldl2e,
10303 /* 0xeb */ iemOp_fldpi,
10304 /* 0xec */ iemOp_fldlg2,
10305 /* 0xed */ iemOp_fldln2,
10306 /* 0xee */ iemOp_fldz,
10307 /* 0xef */ iemOp_Invalid,
10308 /* 0xf0 */ iemOp_f2xm1,
10309 /* 0xf1 */ iemOp_fyl2x,
10310 /* 0xf2 */ iemOp_fptan,
10311 /* 0xf3 */ iemOp_fpatan,
10312 /* 0xf4 */ iemOp_fxtract,
10313 /* 0xf5 */ iemOp_fprem1,
10314 /* 0xf6 */ iemOp_fdecstp,
10315 /* 0xf7 */ iemOp_fincstp,
10316 /* 0xf8 */ iemOp_fprem,
10317 /* 0xf9 */ iemOp_fyl2xp1,
10318 /* 0xfa */ iemOp_fsqrt,
10319 /* 0xfb */ iemOp_fsincos,
10320 /* 0xfc */ iemOp_frndint,
10321 /* 0xfd */ iemOp_fscale,
10322 /* 0xfe */ iemOp_fsin,
10323 /* 0xff */ iemOp_fcos
10324};
10325
10326
10327/**
10328 * @opcode 0xd9
10329 */
10330FNIEMOP_DEF(iemOp_EscF1)
10331{
10332 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10333 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
10334
10335 if (IEM_IS_MODRM_REG_MODE(bRm))
10336 {
10337 switch (IEM_GET_MODRM_REG_8(bRm))
10338 {
10339 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
10340 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
10341 case 2:
10342 if (bRm == 0xd0)
10343 return FNIEMOP_CALL(iemOp_fnop);
10344 IEMOP_RAISE_INVALID_OPCODE_RET();
10345 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
10346 case 4:
10347 case 5:
10348 case 6:
10349 case 7:
10350 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
10351 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
10352 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10353 }
10354 }
10355 else
10356 {
10357 switch (IEM_GET_MODRM_REG_8(bRm))
10358 {
10359 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
10360 case 1: IEMOP_RAISE_INVALID_OPCODE_RET();
10361 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
10362 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
10363 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
10364 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
10365 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
10366 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
10367 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10368 }
10369 }
10370}
10371
10372
10373/** Opcode 0xda 11/0. */
10374FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
10375{
10376 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
10377 IEM_MC_BEGIN(0, 1, 0, 0);
10378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10379 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10380
10381 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10382 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10383
10384 IEM_MC_PREPARE_FPU_USAGE();
10385 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10386 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
10387 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10388 } IEM_MC_ENDIF();
10389 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10390 } IEM_MC_ELSE() {
10391 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10392 } IEM_MC_ENDIF();
10393 IEM_MC_ADVANCE_RIP_AND_FINISH();
10394
10395 IEM_MC_END();
10396}
10397
10398
10399/** Opcode 0xda 11/1. */
10400FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
10401{
10402 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
10403 IEM_MC_BEGIN(0, 1, 0, 0);
10404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10405 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10406
10407 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10408 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10409
10410 IEM_MC_PREPARE_FPU_USAGE();
10411 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10412 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10413 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10414 } IEM_MC_ENDIF();
10415 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10416 } IEM_MC_ELSE() {
10417 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10418 } IEM_MC_ENDIF();
10419 IEM_MC_ADVANCE_RIP_AND_FINISH();
10420
10421 IEM_MC_END();
10422}
10423
10424
10425/** Opcode 0xda 11/2. */
10426FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
10427{
10428 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
10429 IEM_MC_BEGIN(0, 1, 0, 0);
10430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10431 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10432
10433 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10434 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10435
10436 IEM_MC_PREPARE_FPU_USAGE();
10437 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10438 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10439 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10440 } IEM_MC_ENDIF();
10441 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10442 } IEM_MC_ELSE() {
10443 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10444 } IEM_MC_ENDIF();
10445 IEM_MC_ADVANCE_RIP_AND_FINISH();
10446
10447 IEM_MC_END();
10448}
10449
10450
10451/** Opcode 0xda 11/3. */
10452FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
10453{
10454 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
10455 IEM_MC_BEGIN(0, 1, 0, 0);
10456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10457 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10458
10459 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10460 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10461
10462 IEM_MC_PREPARE_FPU_USAGE();
10463 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10464 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
10465 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10466 } IEM_MC_ENDIF();
10467 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10468 } IEM_MC_ELSE() {
10469 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10470 } IEM_MC_ENDIF();
10471 IEM_MC_ADVANCE_RIP_AND_FINISH();
10472
10473 IEM_MC_END();
10474}
10475
10476
10477/**
10478 * Common worker for FPU instructions working on ST0 and ST1, only affecting
10479 * flags, and popping twice when done.
10480 *
10481 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10482 */
10483FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
10484{
10485 IEM_MC_BEGIN(3, 1, 0, 0);
10486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10487 IEM_MC_LOCAL(uint16_t, u16Fsw);
10488 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10489 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10490 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10491
10492 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10493 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10494
10495 IEM_MC_PREPARE_FPU_USAGE();
10496 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1) {
10497 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
10498 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10499 } IEM_MC_ELSE() {
10500 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP(pVCpu->iem.s.uFpuOpcode);
10501 } IEM_MC_ENDIF();
10502 IEM_MC_ADVANCE_RIP_AND_FINISH();
10503
10504 IEM_MC_END();
10505}
10506
10507
10508/** Opcode 0xda 0xe9. */
10509FNIEMOP_DEF(iemOp_fucompp)
10510{
10511 IEMOP_MNEMONIC(fucompp, "fucompp");
10512 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fucom_r80_by_r80);
10513}
10514
10515
10516/**
10517 * Common worker for FPU instructions working on ST0 and an m32i, and storing
10518 * the result in ST0.
10519 *
10520 * @param bRm Mod R/M byte.
10521 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10522 */
10523FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
10524{
10525 IEM_MC_BEGIN(3, 3, 0, 0);
10526 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10527 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10528 IEM_MC_LOCAL(int32_t, i32Val2);
10529 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10530 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10531 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10532
10533 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10534 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10535
10536 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10537 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10538 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10539
10540 IEM_MC_PREPARE_FPU_USAGE();
10541 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10542 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
10543 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10544 } IEM_MC_ELSE() {
10545 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10546 } IEM_MC_ENDIF();
10547 IEM_MC_ADVANCE_RIP_AND_FINISH();
10548
10549 IEM_MC_END();
10550}
10551
10552
10553/** Opcode 0xda !11/0. */
10554FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
10555{
10556 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
10557 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
10558}
10559
10560
10561/** Opcode 0xda !11/1. */
10562FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
10563{
10564 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
10565 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
10566}
10567
10568
10569/** Opcode 0xda !11/2. */
10570FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
10571{
10572 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
10573
10574 IEM_MC_BEGIN(3, 3, 0, 0);
10575 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10576 IEM_MC_LOCAL(uint16_t, u16Fsw);
10577 IEM_MC_LOCAL(int32_t, i32Val2);
10578 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10579 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10580 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10581
10582 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10584
10585 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10586 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10587 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10588
10589 IEM_MC_PREPARE_FPU_USAGE();
10590 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10591 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
10592 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10593 } IEM_MC_ELSE() {
10594 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10595 } IEM_MC_ENDIF();
10596 IEM_MC_ADVANCE_RIP_AND_FINISH();
10597
10598 IEM_MC_END();
10599}
10600
10601
10602/** Opcode 0xda !11/3. */
10603FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
10604{
10605 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
10606
10607 IEM_MC_BEGIN(3, 3, 0, 0);
10608 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10609 IEM_MC_LOCAL(uint16_t, u16Fsw);
10610 IEM_MC_LOCAL(int32_t, i32Val2);
10611 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10612 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10613 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10614
10615 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10617
10618 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10619 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10620 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10621
10622 IEM_MC_PREPARE_FPU_USAGE();
10623 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10624 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
10625 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10626 } IEM_MC_ELSE() {
10627 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10628 } IEM_MC_ENDIF();
10629 IEM_MC_ADVANCE_RIP_AND_FINISH();
10630
10631 IEM_MC_END();
10632}
10633
10634
10635/** Opcode 0xda !11/4. */
10636FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
10637{
10638 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
10639 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
10640}
10641
10642
10643/** Opcode 0xda !11/5. */
10644FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
10645{
10646 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
10647 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
10648}
10649
10650
10651/** Opcode 0xda !11/6. */
10652FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
10653{
10654 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
10655 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
10656}
10657
10658
10659/** Opcode 0xda !11/7. */
10660FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
10661{
10662 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
10663 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
10664}
10665
10666
10667/**
10668 * @opcode 0xda
10669 */
10670FNIEMOP_DEF(iemOp_EscF2)
10671{
10672 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10673 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
10674 if (IEM_IS_MODRM_REG_MODE(bRm))
10675 {
10676 switch (IEM_GET_MODRM_REG_8(bRm))
10677 {
10678 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
10679 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
10680 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
10681 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
10682 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
10683 case 5:
10684 if (bRm == 0xe9)
10685 return FNIEMOP_CALL(iemOp_fucompp);
10686 IEMOP_RAISE_INVALID_OPCODE_RET();
10687 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10688 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
10689 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10690 }
10691 }
10692 else
10693 {
10694 switch (IEM_GET_MODRM_REG_8(bRm))
10695 {
10696 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
10697 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
10698 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
10699 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
10700 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
10701 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
10702 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
10703 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
10704 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10705 }
10706 }
10707}
10708
10709
10710/** Opcode 0xdb !11/0. */
10711FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
10712{
10713 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
10714
10715 IEM_MC_BEGIN(2, 3, 0, 0);
10716 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10717 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10718 IEM_MC_LOCAL(int32_t, i32Val);
10719 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10720 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
10721
10722 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10724
10725 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10726 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10727 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10728
10729 IEM_MC_PREPARE_FPU_USAGE();
10730 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10731 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i32, pFpuRes, pi32Val);
10732 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10733 } IEM_MC_ELSE() {
10734 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10735 } IEM_MC_ENDIF();
10736 IEM_MC_ADVANCE_RIP_AND_FINISH();
10737
10738 IEM_MC_END();
10739}
10740
10741
10742/** Opcode 0xdb !11/1. */
10743FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
10744{
10745 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
10746 IEM_MC_BEGIN(3, 3, 0, 0);
10747 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10749
10750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10751 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10752 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10753 IEM_MC_PREPARE_FPU_USAGE();
10754
10755 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10756 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10757 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10758
10759 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10760 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10761 IEM_MC_LOCAL(uint16_t, u16Fsw);
10762 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
10763 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10764 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
10765 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10766 } IEM_MC_ELSE() {
10767 IEM_MC_IF_FCW_IM() {
10768 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10769 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
10770 } IEM_MC_ELSE() {
10771 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
10772 } IEM_MC_ENDIF();
10773 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10774 } IEM_MC_ENDIF();
10775 IEM_MC_ADVANCE_RIP_AND_FINISH();
10776
10777 IEM_MC_END();
10778}
10779
10780
10781/** Opcode 0xdb !11/2. */
10782FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
10783{
10784 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
10785 IEM_MC_BEGIN(3, 3, 0, 0);
10786 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10788
10789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10790 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10791 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10792 IEM_MC_PREPARE_FPU_USAGE();
10793
10794 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10795 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10796 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10797
10798 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10799 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10800 IEM_MC_LOCAL(uint16_t, u16Fsw);
10801 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
10802 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10803 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
10804 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10805 } IEM_MC_ELSE() {
10806 IEM_MC_IF_FCW_IM() {
10807 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10808 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
10809 } IEM_MC_ELSE() {
10810 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
10811 } IEM_MC_ENDIF();
10812 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10813 } IEM_MC_ENDIF();
10814 IEM_MC_ADVANCE_RIP_AND_FINISH();
10815
10816 IEM_MC_END();
10817}
10818
10819
10820/** Opcode 0xdb !11/3. */
10821FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
10822{
10823 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
10824 IEM_MC_BEGIN(3, 2, 0, 0);
10825 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10826 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10827
10828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10829 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10830 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10831 IEM_MC_PREPARE_FPU_USAGE();
10832
10833 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10834 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10835 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10836
10837 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10838 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10839 IEM_MC_LOCAL(uint16_t, u16Fsw);
10840 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
10841 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10842 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
10843 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10844 } IEM_MC_ELSE() {
10845 IEM_MC_IF_FCW_IM() {
10846 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10847 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
10848 } IEM_MC_ELSE() {
10849 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
10850 } IEM_MC_ENDIF();
10851 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10852 } IEM_MC_ENDIF();
10853 IEM_MC_ADVANCE_RIP_AND_FINISH();
10854
10855 IEM_MC_END();
10856}
10857
10858
10859/** Opcode 0xdb !11/5. */
10860FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
10861{
10862 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
10863
10864 IEM_MC_BEGIN(2, 3, 0, 0);
10865 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10866 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10867 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
10868 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10869 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
10870
10871 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10872 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10873
10874 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10875 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10876 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10877
10878 IEM_MC_PREPARE_FPU_USAGE();
10879 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10880 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
10881 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10882 } IEM_MC_ELSE() {
10883 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10884 } IEM_MC_ENDIF();
10885 IEM_MC_ADVANCE_RIP_AND_FINISH();
10886
10887 IEM_MC_END();
10888}
10889
10890
10891/** Opcode 0xdb !11/7. */
10892FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
10893{
10894 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
10895 IEM_MC_BEGIN(3, 3, 0, 0);
10896 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10897 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10898
10899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10900 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10901 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10902 IEM_MC_PREPARE_FPU_USAGE();
10903
10904 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10905 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
10906 IEM_MC_MEM_MAP_R80_WO(pr80Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10907
10908 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10909 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10910 IEM_MC_LOCAL(uint16_t, u16Fsw);
10911 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
10912 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
10913 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
10914 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10915 } IEM_MC_ELSE() {
10916 IEM_MC_IF_FCW_IM() {
10917 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
10918 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
10919 } IEM_MC_ELSE() {
10920 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
10921 } IEM_MC_ENDIF();
10922 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10923 } IEM_MC_ENDIF();
10924 IEM_MC_ADVANCE_RIP_AND_FINISH();
10925
10926 IEM_MC_END();
10927}
10928
10929
10930/** Opcode 0xdb 11/0. */
10931FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
10932{
10933 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
10934 IEM_MC_BEGIN(0, 1, 0, 0);
10935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10936 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10937
10938 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10939 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10940
10941 IEM_MC_PREPARE_FPU_USAGE();
10942 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10943 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF) {
10944 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10945 } IEM_MC_ENDIF();
10946 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10947 } IEM_MC_ELSE() {
10948 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10949 } IEM_MC_ENDIF();
10950 IEM_MC_ADVANCE_RIP_AND_FINISH();
10951
10952 IEM_MC_END();
10953}
10954
10955
10956/** Opcode 0xdb 11/1. */
10957FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
10958{
10959 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
10960 IEM_MC_BEGIN(0, 1, 0, 0);
10961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10962 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10963
10964 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10965 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10966
10967 IEM_MC_PREPARE_FPU_USAGE();
10968 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10969 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10970 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10971 } IEM_MC_ENDIF();
10972 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10973 } IEM_MC_ELSE() {
10974 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10975 } IEM_MC_ENDIF();
10976 IEM_MC_ADVANCE_RIP_AND_FINISH();
10977
10978 IEM_MC_END();
10979}
10980
10981
10982/** Opcode 0xdb 11/2. */
10983FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
10984{
10985 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
10986 IEM_MC_BEGIN(0, 1, 0, 0);
10987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10988 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10989
10990 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10991 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10992
10993 IEM_MC_PREPARE_FPU_USAGE();
10994 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10995 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10996 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10997 } IEM_MC_ENDIF();
10998 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10999 } IEM_MC_ELSE() {
11000 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11001 } IEM_MC_ENDIF();
11002 IEM_MC_ADVANCE_RIP_AND_FINISH();
11003
11004 IEM_MC_END();
11005}
11006
11007
11008/** Opcode 0xdb 11/3. */
11009FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
11010{
11011 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
11012 IEM_MC_BEGIN(0, 1, 0, 0);
11013 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11014 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11015
11016 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11017 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11018
11019 IEM_MC_PREPARE_FPU_USAGE();
11020 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11021 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF) {
11022 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11023 } IEM_MC_ENDIF();
11024 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11025 } IEM_MC_ELSE() {
11026 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11027 } IEM_MC_ENDIF();
11028 IEM_MC_ADVANCE_RIP_AND_FINISH();
11029
11030 IEM_MC_END();
11031}
11032
11033
11034/** Opcode 0xdb 0xe0. */
11035FNIEMOP_DEF(iemOp_fneni)
11036{
11037 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
11038 IEM_MC_BEGIN(0, 0, 0, 0);
11039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11040 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11041 IEM_MC_ADVANCE_RIP_AND_FINISH();
11042 IEM_MC_END();
11043}
11044
11045
11046/** Opcode 0xdb 0xe1. */
11047FNIEMOP_DEF(iemOp_fndisi)
11048{
11049 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
11050 IEM_MC_BEGIN(0, 0, 0, 0);
11051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11052 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11053 IEM_MC_ADVANCE_RIP_AND_FINISH();
11054 IEM_MC_END();
11055}
11056
11057
11058/** Opcode 0xdb 0xe2. */
11059FNIEMOP_DEF(iemOp_fnclex)
11060{
11061 IEMOP_MNEMONIC(fnclex, "fnclex");
11062 IEM_MC_BEGIN(0, 0, 0, 0);
11063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11064 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11065 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11066 IEM_MC_CLEAR_FSW_EX();
11067 IEM_MC_ADVANCE_RIP_AND_FINISH();
11068 IEM_MC_END();
11069}
11070
11071
11072/** Opcode 0xdb 0xe3. */
11073FNIEMOP_DEF(iemOp_fninit)
11074{
11075 IEMOP_MNEMONIC(fninit, "fninit");
11076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11077 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_FPU, 0, iemCImpl_finit, false /*fCheckXcpts*/);
11078}
11079
11080
11081/** Opcode 0xdb 0xe4. */
11082FNIEMOP_DEF(iemOp_fnsetpm)
11083{
11084 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
11085 IEM_MC_BEGIN(0, 0, 0, 0);
11086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11087 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11088 IEM_MC_ADVANCE_RIP_AND_FINISH();
11089 IEM_MC_END();
11090}
11091
11092
11093/** Opcode 0xdb 0xe5. */
11094FNIEMOP_DEF(iemOp_frstpm)
11095{
11096 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
11097#if 0 /* #UDs on newer CPUs */
11098 IEM_MC_BEGIN(0, 0, 0, 0);
11099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11100 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11101 IEM_MC_ADVANCE_RIP_AND_FINISH();
11102 IEM_MC_END();
11103 return VINF_SUCCESS;
11104#else
11105 IEMOP_RAISE_INVALID_OPCODE_RET();
11106#endif
11107}
11108
11109
11110/** Opcode 0xdb 11/5. */
11111FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
11112{
11113 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
11114 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
11115 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), true /*fUCmp*/,
11116 0 /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11117}
11118
11119
11120/** Opcode 0xdb 11/6. */
11121FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
11122{
11123 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
11124 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
11125 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
11126 false /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11127}
11128
11129
11130/**
11131 * @opcode 0xdb
11132 */
11133FNIEMOP_DEF(iemOp_EscF3)
11134{
11135 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11136 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
11137 if (IEM_IS_MODRM_REG_MODE(bRm))
11138 {
11139 switch (IEM_GET_MODRM_REG_8(bRm))
11140 {
11141 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
11142 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
11143 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
11144 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
11145 case 4:
11146 switch (bRm)
11147 {
11148 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
11149 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
11150 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
11151 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
11152 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
11153 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
11154 case 0xe6: IEMOP_RAISE_INVALID_OPCODE_RET();
11155 case 0xe7: IEMOP_RAISE_INVALID_OPCODE_RET();
11156 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11157 }
11158 break;
11159 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
11160 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
11161 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11162 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11163 }
11164 }
11165 else
11166 {
11167 switch (IEM_GET_MODRM_REG_8(bRm))
11168 {
11169 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
11170 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
11171 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
11172 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
11173 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
11174 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
11175 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
11176 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
11177 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11178 }
11179 }
11180}
11181
11182
11183/**
11184 * Common worker for FPU instructions working on STn and ST0, and storing the
11185 * result in STn unless IE, DE or ZE was raised.
11186 *
11187 * @param bRm Mod R/M byte.
11188 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11189 */
11190FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
11191{
11192 IEM_MC_BEGIN(3, 1, 0, 0);
11193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11194 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11195 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11196 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11197 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11198
11199 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11200 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11201
11202 IEM_MC_PREPARE_FPU_USAGE();
11203 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
11204 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
11205 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11206 } IEM_MC_ELSE() {
11207 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11208 } IEM_MC_ENDIF();
11209 IEM_MC_ADVANCE_RIP_AND_FINISH();
11210
11211 IEM_MC_END();
11212}
11213
11214
11215/** Opcode 0xdc 11/0. */
11216FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
11217{
11218 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
11219 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
11220}
11221
11222
11223/** Opcode 0xdc 11/1. */
11224FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
11225{
11226 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
11227 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
11228}
11229
11230
11231/** Opcode 0xdc 11/4. */
11232FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
11233{
11234 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
11235 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
11236}
11237
11238
11239/** Opcode 0xdc 11/5. */
11240FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
11241{
11242 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
11243 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
11244}
11245
11246
11247/** Opcode 0xdc 11/6. */
11248FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
11249{
11250 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
11251 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
11252}
11253
11254
11255/** Opcode 0xdc 11/7. */
11256FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
11257{
11258 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
11259 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
11260}
11261
11262
11263/**
11264 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
11265 * memory operand, and storing the result in ST0.
11266 *
11267 * @param bRm Mod R/M byte.
11268 * @param pfnImpl Pointer to the instruction implementation (assembly).
11269 */
11270FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
11271{
11272 IEM_MC_BEGIN(3, 3, 0, 0);
11273 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11274 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11275 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
11276 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11277 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
11278 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
11279
11280 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11282 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11283 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11284
11285 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11286 IEM_MC_PREPARE_FPU_USAGE();
11287 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0) {
11288 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
11289 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11290 } IEM_MC_ELSE() {
11291 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11292 } IEM_MC_ENDIF();
11293 IEM_MC_ADVANCE_RIP_AND_FINISH();
11294
11295 IEM_MC_END();
11296}
11297
11298
11299/** Opcode 0xdc !11/0. */
11300FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
11301{
11302 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
11303 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
11304}
11305
11306
11307/** Opcode 0xdc !11/1. */
11308FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
11309{
11310 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
11311 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
11312}
11313
11314
11315/** Opcode 0xdc !11/2. */
11316FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
11317{
11318 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
11319
11320 IEM_MC_BEGIN(3, 3, 0, 0);
11321 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11322 IEM_MC_LOCAL(uint16_t, u16Fsw);
11323 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
11324 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11325 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11326 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
11327
11328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11330
11331 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11332 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11333 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11334
11335 IEM_MC_PREPARE_FPU_USAGE();
11336 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11337 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
11338 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11339 } IEM_MC_ELSE() {
11340 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11341 } IEM_MC_ENDIF();
11342 IEM_MC_ADVANCE_RIP_AND_FINISH();
11343
11344 IEM_MC_END();
11345}
11346
11347
11348/** Opcode 0xdc !11/3. */
11349FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
11350{
11351 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
11352
11353 IEM_MC_BEGIN(3, 3, 0, 0);
11354 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11355 IEM_MC_LOCAL(uint16_t, u16Fsw);
11356 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
11357 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11358 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11359 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
11360
11361 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11363
11364 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11365 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11366 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11367
11368 IEM_MC_PREPARE_FPU_USAGE();
11369 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11370 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
11371 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11372 } IEM_MC_ELSE() {
11373 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11374 } IEM_MC_ENDIF();
11375 IEM_MC_ADVANCE_RIP_AND_FINISH();
11376
11377 IEM_MC_END();
11378}
11379
11380
11381/** Opcode 0xdc !11/4. */
11382FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
11383{
11384 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
11385 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
11386}
11387
11388
11389/** Opcode 0xdc !11/5. */
11390FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
11391{
11392 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
11393 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
11394}
11395
11396
11397/** Opcode 0xdc !11/6. */
11398FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
11399{
11400 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
11401 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
11402}
11403
11404
11405/** Opcode 0xdc !11/7. */
11406FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
11407{
11408 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
11409 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
11410}
11411
11412
11413/**
11414 * @opcode 0xdc
11415 */
11416FNIEMOP_DEF(iemOp_EscF4)
11417{
11418 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11419 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
11420 if (IEM_IS_MODRM_REG_MODE(bRm))
11421 {
11422 switch (IEM_GET_MODRM_REG_8(bRm))
11423 {
11424 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
11425 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
11426 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
11427 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
11428 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
11429 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
11430 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
11431 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
11432 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11433 }
11434 }
11435 else
11436 {
11437 switch (IEM_GET_MODRM_REG_8(bRm))
11438 {
11439 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
11440 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
11441 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
11442 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
11443 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
11444 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
11445 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
11446 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
11447 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11448 }
11449 }
11450}
11451
11452
11453/** Opcode 0xdd !11/0.
11454 * @sa iemOp_fld_m32r */
11455FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
11456{
11457 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
11458
11459 IEM_MC_BEGIN(2, 3, 0, 0);
11460 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11461 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11462 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
11463 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11464 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
11465
11466 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11468 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11469 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11470
11471 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11472 IEM_MC_PREPARE_FPU_USAGE();
11473 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11474 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r64, pFpuRes, pr64Val);
11475 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11476 } IEM_MC_ELSE() {
11477 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11478 } IEM_MC_ENDIF();
11479 IEM_MC_ADVANCE_RIP_AND_FINISH();
11480
11481 IEM_MC_END();
11482}
11483
11484
11485/** Opcode 0xdd !11/0. */
11486FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
11487{
11488 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
11489 IEM_MC_BEGIN(3, 3, 0, 0);
11490 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11491 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11492
11493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11494 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11495 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11496 IEM_MC_PREPARE_FPU_USAGE();
11497
11498 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11499 IEM_MC_ARG(int64_t *, pi64Dst, 1);
11500 IEM_MC_MEM_MAP_I64_WO(pi64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11501
11502 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11503 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11504 IEM_MC_LOCAL(uint16_t, u16Fsw);
11505 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11506 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
11507 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11508 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11509 } IEM_MC_ELSE() {
11510 IEM_MC_IF_FCW_IM() {
11511 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
11512 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11513 } IEM_MC_ELSE() {
11514 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11515 } IEM_MC_ENDIF();
11516 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11517 } IEM_MC_ENDIF();
11518 IEM_MC_ADVANCE_RIP_AND_FINISH();
11519
11520 IEM_MC_END();
11521}
11522
11523
11524/** Opcode 0xdd !11/0. */
11525FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
11526{
11527 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
11528 IEM_MC_BEGIN(3, 3, 0, 0);
11529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11530 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11531
11532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11533 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11534 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11535 IEM_MC_PREPARE_FPU_USAGE();
11536
11537 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11538 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
11539 IEM_MC_MEM_MAP_R64_WO(pr64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11540
11541 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11542 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11543 IEM_MC_LOCAL(uint16_t, u16Fsw);
11544 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11545 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
11546 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11547 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11548 } IEM_MC_ELSE() {
11549 IEM_MC_IF_FCW_IM() {
11550 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
11551 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11552 } IEM_MC_ELSE() {
11553 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11554 } IEM_MC_ENDIF();
11555 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11556 } IEM_MC_ENDIF();
11557 IEM_MC_ADVANCE_RIP_AND_FINISH();
11558
11559 IEM_MC_END();
11560}
11561
11562
11563
11564
11565/** Opcode 0xdd !11/0. */
11566FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
11567{
11568 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
11569 IEM_MC_BEGIN(3, 3, 0, 0);
11570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11572
11573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11574 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11575 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11576 IEM_MC_PREPARE_FPU_USAGE();
11577
11578 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11579 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
11580 IEM_MC_MEM_MAP_R64_WO(pr64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11581
11582 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11583 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11584 IEM_MC_LOCAL(uint16_t, u16Fsw);
11585 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11586 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
11587 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11588 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11589 } IEM_MC_ELSE() {
11590 IEM_MC_IF_FCW_IM() {
11591 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
11592 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11593 } IEM_MC_ELSE() {
11594 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11595 } IEM_MC_ENDIF();
11596 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11597 } IEM_MC_ENDIF();
11598 IEM_MC_ADVANCE_RIP_AND_FINISH();
11599
11600 IEM_MC_END();
11601}
11602
11603
11604/** Opcode 0xdd !11/0. */
11605FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
11606{
11607 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
11608 IEM_MC_BEGIN(3, 0, 0, 0);
11609 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
11610 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11611
11612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11613 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11614 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11615
11616 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
11617 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
11618 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
11619 IEM_MC_END();
11620}
11621
11622
11623/** Opcode 0xdd !11/0. */
11624FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
11625{
11626 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
11627 IEM_MC_BEGIN(3, 0, 0, 0);
11628 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
11629 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11630
11631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11632 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11633 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); /* Note! Implicit fninit after the save, do not use FOR_READ here! */
11634
11635 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
11636 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
11637 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
11638 IEM_MC_END();
11639}
11640
11641/** Opcode 0xdd !11/0. */
11642FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
11643{
11644 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
11645
11646 IEM_MC_BEGIN(0, 2, 0, 0);
11647 IEM_MC_LOCAL(uint16_t, u16Tmp);
11648 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11649
11650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11652 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11653
11654 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
11655 IEM_MC_FETCH_FSW(u16Tmp);
11656 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
11657 IEM_MC_ADVANCE_RIP_AND_FINISH();
11658
11659/** @todo Debug / drop a hint to the verifier that things may differ
11660 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
11661 * NT4SP1. (X86_FSW_PE) */
11662 IEM_MC_END();
11663}
11664
11665
11666/** Opcode 0xdd 11/0. */
11667FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
11668{
11669 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
11670 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
11671 unmodified. */
11672 IEM_MC_BEGIN(0, 0, 0, 0);
11673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11674
11675 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11676 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11677
11678 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11679 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
11680 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11681
11682 IEM_MC_ADVANCE_RIP_AND_FINISH();
11683 IEM_MC_END();
11684}
11685
11686
11687/** Opcode 0xdd 11/1. */
11688FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
11689{
11690 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
11691 IEM_MC_BEGIN(0, 2, 0, 0);
11692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11693 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
11694 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11695 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11696 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11697
11698 IEM_MC_PREPARE_FPU_USAGE();
11699 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11700 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
11701 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11702 } IEM_MC_ELSE() {
11703 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11704 } IEM_MC_ENDIF();
11705
11706 IEM_MC_ADVANCE_RIP_AND_FINISH();
11707 IEM_MC_END();
11708}
11709
11710
11711/** Opcode 0xdd 11/3. */
11712FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
11713{
11714 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
11715 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
11716}
11717
11718
11719/** Opcode 0xdd 11/4. */
11720FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
11721{
11722 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
11723 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
11724}
11725
11726
11727/**
11728 * @opcode 0xdd
11729 */
11730FNIEMOP_DEF(iemOp_EscF5)
11731{
11732 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11733 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
11734 if (IEM_IS_MODRM_REG_MODE(bRm))
11735 {
11736 switch (IEM_GET_MODRM_REG_8(bRm))
11737 {
11738 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
11739 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
11740 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
11741 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
11742 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
11743 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
11744 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
11745 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11746 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11747 }
11748 }
11749 else
11750 {
11751 switch (IEM_GET_MODRM_REG_8(bRm))
11752 {
11753 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
11754 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
11755 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
11756 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
11757 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
11758 case 5: IEMOP_RAISE_INVALID_OPCODE_RET();
11759 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
11760 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
11761 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11762 }
11763 }
11764}
11765
11766
11767/** Opcode 0xde 11/0. */
11768FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
11769{
11770 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
11771 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
11772}
11773
11774
11775/** Opcode 0xde 11/0. */
11776FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
11777{
11778 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
11779 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
11780}
11781
11782
11783/** Opcode 0xde 0xd9. */
11784FNIEMOP_DEF(iemOp_fcompp)
11785{
11786 IEMOP_MNEMONIC(fcompp, "fcompp");
11787 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fcom_r80_by_r80);
11788}
11789
11790
11791/** Opcode 0xde 11/4. */
11792FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
11793{
11794 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
11795 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
11796}
11797
11798
11799/** Opcode 0xde 11/5. */
11800FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
11801{
11802 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
11803 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
11804}
11805
11806
11807/** Opcode 0xde 11/6. */
11808FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
11809{
11810 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
11811 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
11812}
11813
11814
11815/** Opcode 0xde 11/7. */
11816FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
11817{
11818 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
11819 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
11820}
11821
11822
11823/**
11824 * Common worker for FPU instructions working on ST0 and an m16i, and storing
11825 * the result in ST0.
11826 *
11827 * @param bRm Mod R/M byte.
11828 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11829 */
11830FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
11831{
11832 IEM_MC_BEGIN(3, 3, 0, 0);
11833 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11834 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11835 IEM_MC_LOCAL(int16_t, i16Val2);
11836 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11837 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11838 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11839
11840 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11842
11843 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11844 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11845 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11846
11847 IEM_MC_PREPARE_FPU_USAGE();
11848 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11849 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
11850 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
11851 } IEM_MC_ELSE() {
11852 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11853 } IEM_MC_ENDIF();
11854 IEM_MC_ADVANCE_RIP_AND_FINISH();
11855
11856 IEM_MC_END();
11857}
11858
11859
11860/** Opcode 0xde !11/0. */
11861FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
11862{
11863 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
11864 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
11865}
11866
11867
11868/** Opcode 0xde !11/1. */
11869FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
11870{
11871 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
11872 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
11873}
11874
11875
11876/** Opcode 0xde !11/2. */
11877FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
11878{
11879 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
11880
11881 IEM_MC_BEGIN(3, 3, 0, 0);
11882 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11883 IEM_MC_LOCAL(uint16_t, u16Fsw);
11884 IEM_MC_LOCAL(int16_t, i16Val2);
11885 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11886 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11887 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11888
11889 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11891
11892 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11893 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11894 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11895
11896 IEM_MC_PREPARE_FPU_USAGE();
11897 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11898 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
11899 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11900 } IEM_MC_ELSE() {
11901 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11902 } IEM_MC_ENDIF();
11903 IEM_MC_ADVANCE_RIP_AND_FINISH();
11904
11905 IEM_MC_END();
11906}
11907
11908
11909/** Opcode 0xde !11/3. */
11910FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
11911{
11912 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
11913
11914 IEM_MC_BEGIN(3, 3, 0, 0);
11915 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11916 IEM_MC_LOCAL(uint16_t, u16Fsw);
11917 IEM_MC_LOCAL(int16_t, i16Val2);
11918 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11919 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11920 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11921
11922 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11924
11925 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11926 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11927 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11928
11929 IEM_MC_PREPARE_FPU_USAGE();
11930 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11931 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
11932 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11933 } IEM_MC_ELSE() {
11934 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11935 } IEM_MC_ENDIF();
11936 IEM_MC_ADVANCE_RIP_AND_FINISH();
11937
11938 IEM_MC_END();
11939}
11940
11941
11942/** Opcode 0xde !11/4. */
11943FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
11944{
11945 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
11946 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
11947}
11948
11949
11950/** Opcode 0xde !11/5. */
11951FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
11952{
11953 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
11954 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
11955}
11956
11957
11958/** Opcode 0xde !11/6. */
11959FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
11960{
11961 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
11962 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
11963}
11964
11965
11966/** Opcode 0xde !11/7. */
11967FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
11968{
11969 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
11970 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
11971}
11972
11973
11974/**
11975 * @opcode 0xde
11976 */
11977FNIEMOP_DEF(iemOp_EscF6)
11978{
11979 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11980 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
11981 if (IEM_IS_MODRM_REG_MODE(bRm))
11982 {
11983 switch (IEM_GET_MODRM_REG_8(bRm))
11984 {
11985 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
11986 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
11987 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
11988 case 3: if (bRm == 0xd9)
11989 return FNIEMOP_CALL(iemOp_fcompp);
11990 IEMOP_RAISE_INVALID_OPCODE_RET();
11991 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
11992 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
11993 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
11994 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
11995 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11996 }
11997 }
11998 else
11999 {
12000 switch (IEM_GET_MODRM_REG_8(bRm))
12001 {
12002 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
12003 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
12004 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
12005 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
12006 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
12007 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
12008 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
12009 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
12010 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12011 }
12012 }
12013}
12014
12015
12016/** Opcode 0xdf 11/0.
12017 * Undocument instruction, assumed to work like ffree + fincstp. */
12018FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
12019{
12020 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
12021 IEM_MC_BEGIN(0, 0, 0, 0);
12022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12023
12024 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12025 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12026
12027 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12028 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
12029 IEM_MC_FPU_STACK_INC_TOP();
12030 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12031
12032 IEM_MC_ADVANCE_RIP_AND_FINISH();
12033 IEM_MC_END();
12034}
12035
12036
12037/** Opcode 0xdf 0xe0. */
12038FNIEMOP_DEF(iemOp_fnstsw_ax)
12039{
12040 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
12041 IEM_MC_BEGIN(0, 1, 0, 0);
12042 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12043 IEM_MC_LOCAL(uint16_t, u16Tmp);
12044 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12045 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
12046 IEM_MC_FETCH_FSW(u16Tmp);
12047 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
12048 IEM_MC_ADVANCE_RIP_AND_FINISH();
12049 IEM_MC_END();
12050}
12051
12052
12053/** Opcode 0xdf 11/5. */
12054FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
12055{
12056 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
12057 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
12058 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
12059 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
12060}
12061
12062
12063/** Opcode 0xdf 11/6. */
12064FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
12065{
12066 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
12067 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
12068 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
12069 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
12070}
12071
12072
12073/** Opcode 0xdf !11/0. */
12074FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
12075{
12076 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
12077
12078 IEM_MC_BEGIN(2, 3, 0, 0);
12079 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12080 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12081 IEM_MC_LOCAL(int16_t, i16Val);
12082 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12083 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
12084
12085 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12087
12088 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12089 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12090 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12091
12092 IEM_MC_PREPARE_FPU_USAGE();
12093 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12094 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i16, pFpuRes, pi16Val);
12095 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12096 } IEM_MC_ELSE() {
12097 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12098 } IEM_MC_ENDIF();
12099 IEM_MC_ADVANCE_RIP_AND_FINISH();
12100
12101 IEM_MC_END();
12102}
12103
12104
12105/** Opcode 0xdf !11/1. */
12106FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
12107{
12108 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
12109 IEM_MC_BEGIN(3, 3, 0, 0);
12110 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12111 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12112
12113 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12114 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12115 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12116 IEM_MC_PREPARE_FPU_USAGE();
12117
12118 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12119 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12120 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12121
12122 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12123 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12124 IEM_MC_LOCAL(uint16_t, u16Fsw);
12125 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12126 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12127 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12128 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12129 } IEM_MC_ELSE() {
12130 IEM_MC_IF_FCW_IM() {
12131 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12132 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12133 } IEM_MC_ELSE() {
12134 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12135 } IEM_MC_ENDIF();
12136 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12137 } IEM_MC_ENDIF();
12138 IEM_MC_ADVANCE_RIP_AND_FINISH();
12139
12140 IEM_MC_END();
12141}
12142
12143
12144/** Opcode 0xdf !11/2. */
12145FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
12146{
12147 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
12148 IEM_MC_BEGIN(3, 3, 0, 0);
12149 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12150 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12151
12152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12153 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12154 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12155 IEM_MC_PREPARE_FPU_USAGE();
12156
12157 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12158 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12159 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12160
12161 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12162 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12163 IEM_MC_LOCAL(uint16_t, u16Fsw);
12164 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12165 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12166 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12167 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12168 } IEM_MC_ELSE() {
12169 IEM_MC_IF_FCW_IM() {
12170 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12171 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12172 } IEM_MC_ELSE() {
12173 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12174 } IEM_MC_ENDIF();
12175 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12176 } IEM_MC_ENDIF();
12177 IEM_MC_ADVANCE_RIP_AND_FINISH();
12178
12179 IEM_MC_END();
12180}
12181
12182
12183/** Opcode 0xdf !11/3. */
12184FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
12185{
12186 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
12187 IEM_MC_BEGIN(3, 3, 0, 0);
12188 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12189 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12190
12191 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12192 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12193 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12194 IEM_MC_PREPARE_FPU_USAGE();
12195
12196 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12197 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12198 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12199
12200 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12201 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12202 IEM_MC_LOCAL(uint16_t, u16Fsw);
12203 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12204 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12205 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12206 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12207 } IEM_MC_ELSE() {
12208 IEM_MC_IF_FCW_IM() {
12209 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12210 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12211 } IEM_MC_ELSE() {
12212 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12213 } IEM_MC_ENDIF();
12214 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12215 } IEM_MC_ENDIF();
12216 IEM_MC_ADVANCE_RIP_AND_FINISH();
12217
12218 IEM_MC_END();
12219}
12220
12221
12222/** Opcode 0xdf !11/4. */
12223FNIEMOP_DEF_1(iemOp_fbld_m80d, uint8_t, bRm)
12224{
12225 IEMOP_MNEMONIC(fbld_m80d, "fbld m80d");
12226
12227 IEM_MC_BEGIN(2, 3, 0, 0);
12228 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12229 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12230 IEM_MC_LOCAL(RTPBCD80U, d80Val);
12231 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12232 IEM_MC_ARG_LOCAL_REF(PCRTPBCD80U, pd80Val, d80Val, 1);
12233
12234 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12236
12237 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12238 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12239 IEM_MC_FETCH_MEM_D80(d80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12240
12241 IEM_MC_PREPARE_FPU_USAGE();
12242 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12243 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_d80, pFpuRes, pd80Val);
12244 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12245 } IEM_MC_ELSE() {
12246 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12247 } IEM_MC_ENDIF();
12248 IEM_MC_ADVANCE_RIP_AND_FINISH();
12249
12250 IEM_MC_END();
12251}
12252
12253
12254/** Opcode 0xdf !11/5. */
12255FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
12256{
12257 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
12258
12259 IEM_MC_BEGIN(2, 3, 0, 0);
12260 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12261 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12262 IEM_MC_LOCAL(int64_t, i64Val);
12263 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12264 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
12265
12266 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12267 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12268
12269 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12270 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12271 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12272
12273 IEM_MC_PREPARE_FPU_USAGE();
12274 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12275 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i64, pFpuRes, pi64Val);
12276 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12277 } IEM_MC_ELSE() {
12278 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12279 } IEM_MC_ENDIF();
12280 IEM_MC_ADVANCE_RIP_AND_FINISH();
12281
12282 IEM_MC_END();
12283}
12284
12285
12286/** Opcode 0xdf !11/6. */
12287FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
12288{
12289 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
12290 IEM_MC_BEGIN(3, 3, 0, 0);
12291 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12292 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12293
12294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12295 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12296 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12297 IEM_MC_PREPARE_FPU_USAGE();
12298
12299 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12300 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
12301 IEM_MC_MEM_MAP_D80_WO(pd80Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12302
12303 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12304 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12305 IEM_MC_LOCAL(uint16_t, u16Fsw);
12306 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12307 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
12308 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12309 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12310 } IEM_MC_ELSE() {
12311 IEM_MC_IF_FCW_IM() {
12312 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
12313 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12314 } IEM_MC_ELSE() {
12315 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12316 } IEM_MC_ENDIF();
12317 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12318 } IEM_MC_ENDIF();
12319 IEM_MC_ADVANCE_RIP_AND_FINISH();
12320
12321 IEM_MC_END();
12322}
12323
12324
12325/** Opcode 0xdf !11/7. */
12326FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
12327{
12328 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
12329 IEM_MC_BEGIN(3, 3, 0, 0);
12330 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12331 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12332
12333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12334 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12335 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12336 IEM_MC_PREPARE_FPU_USAGE();
12337
12338 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12339 IEM_MC_ARG(int64_t *, pi64Dst, 1);
12340 IEM_MC_MEM_MAP_I64_WO(pi64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12341
12342 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12343 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12344 IEM_MC_LOCAL(uint16_t, u16Fsw);
12345 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12346 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
12347 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12348 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12349 } IEM_MC_ELSE() {
12350 IEM_MC_IF_FCW_IM() {
12351 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
12352 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12353 } IEM_MC_ELSE() {
12354 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12355 } IEM_MC_ENDIF();
12356 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12357 } IEM_MC_ENDIF();
12358 IEM_MC_ADVANCE_RIP_AND_FINISH();
12359
12360 IEM_MC_END();
12361}
12362
12363
12364/**
12365 * @opcode 0xdf
12366 */
12367FNIEMOP_DEF(iemOp_EscF7)
12368{
12369 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12370 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdf & 0x7);
12371 if (IEM_IS_MODRM_REG_MODE(bRm))
12372 {
12373 switch (IEM_GET_MODRM_REG_8(bRm))
12374 {
12375 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
12376 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
12377 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
12378 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
12379 case 4: if (bRm == 0xe0)
12380 return FNIEMOP_CALL(iemOp_fnstsw_ax);
12381 IEMOP_RAISE_INVALID_OPCODE_RET();
12382 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
12383 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
12384 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
12385 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12386 }
12387 }
12388 else
12389 {
12390 switch (IEM_GET_MODRM_REG_8(bRm))
12391 {
12392 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
12393 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
12394 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
12395 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
12396 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
12397 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
12398 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
12399 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
12400 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12401 }
12402 }
12403}
12404
12405
12406/**
12407 * @opcode 0xe0
12408 */
12409FNIEMOP_DEF(iemOp_loopne_Jb)
12410{
12411 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
12412 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12413 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12414
12415 switch (pVCpu->iem.s.enmEffAddrMode)
12416 {
12417 case IEMMODE_16BIT:
12418 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12420 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
12421 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12422 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12423 } IEM_MC_ELSE() {
12424 IEM_MC_ADVANCE_RIP_AND_FINISH();
12425 } IEM_MC_ENDIF();
12426 IEM_MC_END();
12427 break;
12428
12429 case IEMMODE_32BIT:
12430 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12432 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
12433 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12434 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12435 } IEM_MC_ELSE() {
12436 IEM_MC_ADVANCE_RIP_AND_FINISH();
12437 } IEM_MC_ENDIF();
12438 IEM_MC_END();
12439 break;
12440
12441 case IEMMODE_64BIT:
12442 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12444 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
12445 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12446 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12447 } IEM_MC_ELSE() {
12448 IEM_MC_ADVANCE_RIP_AND_FINISH();
12449 } IEM_MC_ENDIF();
12450 IEM_MC_END();
12451 break;
12452
12453 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12454 }
12455}
12456
12457
12458/**
12459 * @opcode 0xe1
12460 */
12461FNIEMOP_DEF(iemOp_loope_Jb)
12462{
12463 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
12464 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12465 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12466
12467 switch (pVCpu->iem.s.enmEffAddrMode)
12468 {
12469 case IEMMODE_16BIT:
12470 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12472 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
12473 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
12474 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12475 } IEM_MC_ELSE() {
12476 IEM_MC_ADVANCE_RIP_AND_FINISH();
12477 } IEM_MC_ENDIF();
12478 IEM_MC_END();
12479 break;
12480
12481 case IEMMODE_32BIT:
12482 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12484 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
12485 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
12486 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12487 } IEM_MC_ELSE() {
12488 IEM_MC_ADVANCE_RIP_AND_FINISH();
12489 } IEM_MC_ENDIF();
12490 IEM_MC_END();
12491 break;
12492
12493 case IEMMODE_64BIT:
12494 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12496 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
12497 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
12498 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12499 } IEM_MC_ELSE() {
12500 IEM_MC_ADVANCE_RIP_AND_FINISH();
12501 } IEM_MC_ENDIF();
12502 IEM_MC_END();
12503 break;
12504
12505 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12506 }
12507}
12508
12509
12510/**
12511 * @opcode 0xe2
12512 */
12513FNIEMOP_DEF(iemOp_loop_Jb)
12514{
12515 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
12516 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12517 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12518
12519 /** @todo Check out the \#GP case if EIP < CS.Base or EIP > CS.Limit when
12520 * using the 32-bit operand size override. How can that be restarted? See
12521 * weird pseudo code in intel manual. */
12522
12523 /* NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
12524 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
12525 * the loop causes guest crashes, but when logging it's nice to skip a few million
12526 * lines of useless output. */
12527#if defined(LOG_ENABLED)
12528 if ((LogIs3Enabled() || LogIs4Enabled()) && -(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm)
12529 switch (pVCpu->iem.s.enmEffAddrMode)
12530 {
12531 case IEMMODE_16BIT:
12532 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12534 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
12535 IEM_MC_ADVANCE_RIP_AND_FINISH();
12536 IEM_MC_END();
12537 break;
12538
12539 case IEMMODE_32BIT:
12540 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12542 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
12543 IEM_MC_ADVANCE_RIP_AND_FINISH();
12544 IEM_MC_END();
12545 break;
12546
12547 case IEMMODE_64BIT:
12548 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12550 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
12551 IEM_MC_ADVANCE_RIP_AND_FINISH();
12552 IEM_MC_END();
12553 break;
12554
12555 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12556 }
12557#endif
12558
12559 switch (pVCpu->iem.s.enmEffAddrMode)
12560 {
12561 case IEMMODE_16BIT:
12562 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12564 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
12565 IEM_MC_IF_CX_IS_NZ() {
12566 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12567 } IEM_MC_ELSE() {
12568 IEM_MC_ADVANCE_RIP_AND_FINISH();
12569 } IEM_MC_ENDIF();
12570 IEM_MC_END();
12571 break;
12572
12573 case IEMMODE_32BIT:
12574 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12576 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
12577 IEM_MC_IF_ECX_IS_NZ() {
12578 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12579 } IEM_MC_ELSE() {
12580 IEM_MC_ADVANCE_RIP_AND_FINISH();
12581 } IEM_MC_ENDIF();
12582 IEM_MC_END();
12583 break;
12584
12585 case IEMMODE_64BIT:
12586 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12588 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
12589 IEM_MC_IF_RCX_IS_NZ() {
12590 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12591 } IEM_MC_ELSE() {
12592 IEM_MC_ADVANCE_RIP_AND_FINISH();
12593 } IEM_MC_ENDIF();
12594 IEM_MC_END();
12595 break;
12596
12597 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12598 }
12599}
12600
12601
12602/**
12603 * @opcode 0xe3
12604 */
12605FNIEMOP_DEF(iemOp_jecxz_Jb)
12606{
12607 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
12608 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12609 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12610
12611 switch (pVCpu->iem.s.enmEffAddrMode)
12612 {
12613 case IEMMODE_16BIT:
12614 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12616 IEM_MC_IF_CX_IS_NZ() {
12617 IEM_MC_ADVANCE_RIP_AND_FINISH();
12618 } IEM_MC_ELSE() {
12619 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12620 } IEM_MC_ENDIF();
12621 IEM_MC_END();
12622 break;
12623
12624 case IEMMODE_32BIT:
12625 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12627 IEM_MC_IF_ECX_IS_NZ() {
12628 IEM_MC_ADVANCE_RIP_AND_FINISH();
12629 } IEM_MC_ELSE() {
12630 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12631 } IEM_MC_ENDIF();
12632 IEM_MC_END();
12633 break;
12634
12635 case IEMMODE_64BIT:
12636 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12638 IEM_MC_IF_RCX_IS_NZ() {
12639 IEM_MC_ADVANCE_RIP_AND_FINISH();
12640 } IEM_MC_ELSE() {
12641 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12642 } IEM_MC_ENDIF();
12643 IEM_MC_END();
12644 break;
12645
12646 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12647 }
12648}
12649
12650
12651/** Opcode 0xe4 */
12652FNIEMOP_DEF(iemOp_in_AL_Ib)
12653{
12654 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
12655 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12657 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
12658 iemCImpl_in, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12659}
12660
12661
12662/** Opcode 0xe5 */
12663FNIEMOP_DEF(iemOp_in_eAX_Ib)
12664{
12665 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
12666 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12668 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
12669 iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12670 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12671}
12672
12673
12674/** Opcode 0xe6 */
12675FNIEMOP_DEF(iemOp_out_Ib_AL)
12676{
12677 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
12678 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12680 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
12681 iemCImpl_out, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12682}
12683
12684
12685/** Opcode 0xe7 */
12686FNIEMOP_DEF(iemOp_out_Ib_eAX)
12687{
12688 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
12689 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12691 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
12692 iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12693 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12694}
12695
12696
12697/**
12698 * @opcode 0xe8
12699 */
12700FNIEMOP_DEF(iemOp_call_Jv)
12701{
12702 IEMOP_MNEMONIC(call_Jv, "call Jv");
12703 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12704 switch (pVCpu->iem.s.enmEffOpSize)
12705 {
12706 case IEMMODE_16BIT:
12707 {
12708 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12709 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
12710 iemCImpl_call_rel_16, (int16_t)u16Imm);
12711 }
12712
12713 case IEMMODE_32BIT:
12714 {
12715 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12716 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
12717 iemCImpl_call_rel_32, (int32_t)u32Imm);
12718 }
12719
12720 case IEMMODE_64BIT:
12721 {
12722 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12723 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
12724 iemCImpl_call_rel_64, u64Imm);
12725 }
12726
12727 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12728 }
12729}
12730
12731
12732/**
12733 * @opcode 0xe9
12734 */
12735FNIEMOP_DEF(iemOp_jmp_Jv)
12736{
12737 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
12738 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12739 switch (pVCpu->iem.s.enmEffOpSize)
12740 {
12741 case IEMMODE_16BIT:
12742 IEM_MC_BEGIN(0, 0, 0, 0);
12743 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
12744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12745 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
12746 IEM_MC_END();
12747 break;
12748
12749 case IEMMODE_64BIT:
12750 case IEMMODE_32BIT:
12751 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12752 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
12753 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12754 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
12755 IEM_MC_END();
12756 break;
12757
12758 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12759 }
12760}
12761
12762
12763/**
12764 * @opcode 0xea
12765 */
12766FNIEMOP_DEF(iemOp_jmp_Ap)
12767{
12768 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
12769 IEMOP_HLP_NO_64BIT();
12770
12771 /* Decode the far pointer address and pass it on to the far call C implementation. */
12772 uint32_t off32Seg;
12773 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
12774 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
12775 else
12776 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
12777 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
12778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12779 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
12780 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, UINT64_MAX,
12781 iemCImpl_FarJmp, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
12782 /** @todo make task-switches, ring-switches, ++ return non-zero status */
12783}
12784
12785
12786/**
12787 * @opcode 0xeb
12788 */
12789FNIEMOP_DEF(iemOp_jmp_Jb)
12790{
12791 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
12792 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12793 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12794
12795 IEM_MC_BEGIN(0, 0, 0, 0);
12796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12797 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12798 IEM_MC_END();
12799}
12800
12801
12802/** Opcode 0xec */
12803FNIEMOP_DEF(iemOp_in_AL_DX)
12804{
12805 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
12806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12807 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12808 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
12809 iemCImpl_in_eAX_DX, 1, pVCpu->iem.s.enmEffAddrMode);
12810}
12811
12812
12813/** Opcode 0xed */
12814FNIEMOP_DEF(iemOp_in_eAX_DX)
12815{
12816 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
12817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12818 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12819 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
12820 iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12821 pVCpu->iem.s.enmEffAddrMode);
12822}
12823
12824
12825/** Opcode 0xee */
12826FNIEMOP_DEF(iemOp_out_DX_AL)
12827{
12828 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
12829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12830 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
12831 iemCImpl_out_DX_eAX, 1, pVCpu->iem.s.enmEffAddrMode);
12832}
12833
12834
12835/** Opcode 0xef */
12836FNIEMOP_DEF(iemOp_out_DX_eAX)
12837{
12838 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
12839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12840 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
12841 iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12842 pVCpu->iem.s.enmEffAddrMode);
12843}
12844
12845
12846/**
12847 * @opcode 0xf0
12848 */
12849FNIEMOP_DEF(iemOp_lock)
12850{
12851 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
12852 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
12853
12854 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12855 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12856}
12857
12858
12859/**
12860 * @opcode 0xf1
12861 */
12862FNIEMOP_DEF(iemOp_int1)
12863{
12864 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
12865 /** @todo Does not generate \#UD on 286, or so they say... Was allegedly a
12866 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
12867 * LOADALL memo. Needs some testing. */
12868 IEMOP_HLP_MIN_386();
12869 /** @todo testcase! */
12870 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
12871 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
12872 iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
12873}
12874
12875
12876/**
12877 * @opcode 0xf2
12878 */
12879FNIEMOP_DEF(iemOp_repne)
12880{
12881 /* This overrides any previous REPE prefix. */
12882 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
12883 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
12884 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
12885
12886 /* For the 4 entry opcode tables, REPNZ overrides any previous
12887 REPZ and operand size prefixes. */
12888 pVCpu->iem.s.idxPrefix = 3;
12889
12890 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12891 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12892}
12893
12894
12895/**
12896 * @opcode 0xf3
12897 */
12898FNIEMOP_DEF(iemOp_repe)
12899{
12900 /* This overrides any previous REPNE prefix. */
12901 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
12902 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
12903 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
12904
12905 /* For the 4 entry opcode tables, REPNZ overrides any previous
12906 REPNZ and operand size prefixes. */
12907 pVCpu->iem.s.idxPrefix = 2;
12908
12909 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12910 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12911}
12912
12913
12914/**
12915 * @opcode 0xf4
12916 */
12917FNIEMOP_DEF(iemOp_hlt)
12918{
12919 IEMOP_MNEMONIC(hlt, "hlt");
12920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12921 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, 0, iemCImpl_hlt);
12922}
12923
12924
12925/**
12926 * @opcode 0xf5
12927 */
12928FNIEMOP_DEF(iemOp_cmc)
12929{
12930 IEMOP_MNEMONIC(cmc, "cmc");
12931 IEM_MC_BEGIN(0, 0, 0, 0);
12932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12933 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
12934 IEM_MC_ADVANCE_RIP_AND_FINISH();
12935 IEM_MC_END();
12936}
12937
12938
12939/**
12940 * Body for of 'inc/dec/not/neg Eb'.
12941 */
12942#define IEMOP_BODY_UNARY_Eb(a_bRm, a_fnNormalU8, a_fnLockedU8) \
12943 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
12944 { \
12945 /* register access */ \
12946 IEM_MC_BEGIN(2, 0, 0, 0); \
12947 IEMOP_HLP_DONE_DECODING(); \
12948 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12949 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
12950 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
12951 IEM_MC_REF_EFLAGS(pEFlags); \
12952 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
12953 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12954 IEM_MC_END(); \
12955 } \
12956 else \
12957 { \
12958 /* memory access. */ \
12959 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
12960 { \
12961 IEM_MC_BEGIN(2, 2, 0, 0); \
12962 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12963 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12964 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12965 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12966 \
12967 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
12968 IEMOP_HLP_DONE_DECODING(); \
12969 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12970 IEM_MC_FETCH_EFLAGS(EFlags); \
12971 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
12972 \
12973 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
12974 IEM_MC_COMMIT_EFLAGS(EFlags); \
12975 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12976 IEM_MC_END(); \
12977 } \
12978 else \
12979 { \
12980 IEM_MC_BEGIN(2, 2, 0, 0); \
12981 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
12982 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
12983 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
12984 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
12985 \
12986 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
12987 IEMOP_HLP_DONE_DECODING(); \
12988 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
12989 IEM_MC_FETCH_EFLAGS(EFlags); \
12990 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU8, pu8Dst, pEFlags); \
12991 \
12992 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
12993 IEM_MC_COMMIT_EFLAGS(EFlags); \
12994 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
12995 IEM_MC_END(); \
12996 } \
12997 } \
12998 (void)0
12999
13000
13001/**
13002 * Body for 'inc/dec/not/neg Ev' (groups 3 and 5).
13003 */
13004#define IEMOP_BODY_UNARY_Ev(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
13005 if (IEM_IS_MODRM_REG_MODE(bRm)) \
13006 { \
13007 /* \
13008 * Register target \
13009 */ \
13010 switch (pVCpu->iem.s.enmEffOpSize) \
13011 { \
13012 case IEMMODE_16BIT: \
13013 IEM_MC_BEGIN(2, 0, 0, 0); \
13014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13015 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13016 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13017 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13018 IEM_MC_REF_EFLAGS(pEFlags); \
13019 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
13020 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13021 IEM_MC_END(); \
13022 break; \
13023 \
13024 case IEMMODE_32BIT: \
13025 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0); \
13026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13027 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13028 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13029 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13030 IEM_MC_REF_EFLAGS(pEFlags); \
13031 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
13032 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
13033 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13034 IEM_MC_END(); \
13035 break; \
13036 \
13037 case IEMMODE_64BIT: \
13038 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0); \
13039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13040 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13041 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13042 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13043 IEM_MC_REF_EFLAGS(pEFlags); \
13044 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
13045 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13046 IEM_MC_END(); \
13047 break; \
13048 \
13049 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13050 } \
13051 } \
13052 else \
13053 { \
13054 /* \
13055 * Memory target. \
13056 */ \
13057 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
13058 { \
13059 switch (pVCpu->iem.s.enmEffOpSize) \
13060 { \
13061 case IEMMODE_16BIT: \
13062 IEM_MC_BEGIN(2, 3, 0, 0); \
13063 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13064 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13065 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13066 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13067 \
13068 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13069 IEMOP_HLP_DONE_DECODING(); \
13070 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13071 IEM_MC_FETCH_EFLAGS(EFlags); \
13072 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
13073 \
13074 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13075 IEM_MC_COMMIT_EFLAGS(EFlags); \
13076 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13077 IEM_MC_END(); \
13078 break; \
13079 \
13080 case IEMMODE_32BIT: \
13081 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386, 0); \
13082 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13083 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13084 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13085 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13086 \
13087 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13088 IEMOP_HLP_DONE_DECODING(); \
13089 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13090 IEM_MC_FETCH_EFLAGS(EFlags); \
13091 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
13092 \
13093 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13094 IEM_MC_COMMIT_EFLAGS(EFlags); \
13095 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13096 IEM_MC_END(); \
13097 break; \
13098 \
13099 case IEMMODE_64BIT: \
13100 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT, 0); \
13101 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13102 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13103 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13104 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13105 \
13106 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13107 IEMOP_HLP_DONE_DECODING(); \
13108 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13109 IEM_MC_FETCH_EFLAGS(EFlags); \
13110 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
13111 \
13112 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13113 IEM_MC_COMMIT_EFLAGS(EFlags); \
13114 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13115 IEM_MC_END(); \
13116 break; \
13117 \
13118 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13119 } \
13120 } \
13121 else \
13122 { \
13123 (void)0
13124
13125#define IEMOP_BODY_UNARY_Ev_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
13126 switch (pVCpu->iem.s.enmEffOpSize) \
13127 { \
13128 case IEMMODE_16BIT: \
13129 IEM_MC_BEGIN(2, 3, 0, 0); \
13130 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13131 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13133 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13134 \
13135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13136 IEMOP_HLP_DONE_DECODING(); \
13137 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13138 IEM_MC_FETCH_EFLAGS(EFlags); \
13139 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU16, pu16Dst, pEFlags); \
13140 \
13141 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13142 IEM_MC_COMMIT_EFLAGS(EFlags); \
13143 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13144 IEM_MC_END(); \
13145 break; \
13146 \
13147 case IEMMODE_32BIT: \
13148 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386, 0); \
13149 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13150 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13151 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13152 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13153 \
13154 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13155 IEMOP_HLP_DONE_DECODING(); \
13156 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13157 IEM_MC_FETCH_EFLAGS(EFlags); \
13158 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU32, pu32Dst, pEFlags); \
13159 \
13160 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13161 IEM_MC_COMMIT_EFLAGS(EFlags); \
13162 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13163 IEM_MC_END(); \
13164 break; \
13165 \
13166 case IEMMODE_64BIT: \
13167 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT, 0); \
13168 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13169 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13170 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13171 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13172 \
13173 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13174 IEMOP_HLP_DONE_DECODING(); \
13175 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13176 IEM_MC_FETCH_EFLAGS(EFlags); \
13177 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU64, pu64Dst, pEFlags); \
13178 \
13179 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13180 IEM_MC_COMMIT_EFLAGS(EFlags); \
13181 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13182 IEM_MC_END(); \
13183 break; \
13184 \
13185 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13186 } \
13187 } \
13188 } \
13189 (void)0
13190
13191
13192/**
13193 * @opmaps grp3_f6
13194 * @opcode /0
13195 * @todo also /1
13196 */
13197FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
13198{
13199 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
13200 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
13201
13202 if (IEM_IS_MODRM_REG_MODE(bRm))
13203 {
13204 /* register access */
13205 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13206 IEM_MC_BEGIN(3, 0, 0, 0);
13207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13208 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13209 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
13210 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13211 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13212 IEM_MC_REF_EFLAGS(pEFlags);
13213 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
13214 IEM_MC_ADVANCE_RIP_AND_FINISH();
13215 IEM_MC_END();
13216 }
13217 else
13218 {
13219 /* memory access. */
13220 IEM_MC_BEGIN(3, 3, 0, 0);
13221 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13222 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13223
13224 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13226
13227 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13228 IEM_MC_ARG(uint8_t const *, pu8Dst, 0);
13229 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13230
13231 IEM_MC_ARG_CONST(uint8_t, u8Src, u8Imm, 1);
13232 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13233 IEM_MC_FETCH_EFLAGS(EFlags);
13234 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
13235
13236 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
13237 IEM_MC_COMMIT_EFLAGS(EFlags);
13238 IEM_MC_ADVANCE_RIP_AND_FINISH();
13239 IEM_MC_END();
13240 }
13241}
13242
13243
13244/** Opcode 0xf6 /4, /5, /6 and /7. */
13245FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
13246{
13247 if (IEM_IS_MODRM_REG_MODE(bRm))
13248 {
13249 /* register access */
13250 IEM_MC_BEGIN(3, 1, 0, 0);
13251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13252 IEM_MC_ARG(uint16_t *, pu16AX, 0);
13253 IEM_MC_ARG(uint8_t, u8Value, 1);
13254 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13255 IEM_MC_LOCAL(int32_t, rc);
13256
13257 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
13258 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
13259 IEM_MC_REF_EFLAGS(pEFlags);
13260 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
13261 IEM_MC_IF_LOCAL_IS_Z(rc) {
13262 IEM_MC_ADVANCE_RIP_AND_FINISH();
13263 } IEM_MC_ELSE() {
13264 IEM_MC_RAISE_DIVIDE_ERROR();
13265 } IEM_MC_ENDIF();
13266
13267 IEM_MC_END();
13268 }
13269 else
13270 {
13271 /* memory access. */
13272 IEM_MC_BEGIN(3, 2, 0, 0);
13273 IEM_MC_ARG(uint16_t *, pu16AX, 0);
13274 IEM_MC_ARG(uint8_t, u8Value, 1);
13275 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13276 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13277 IEM_MC_LOCAL(int32_t, rc);
13278
13279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13281 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13282 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
13283 IEM_MC_REF_EFLAGS(pEFlags);
13284 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
13285 IEM_MC_IF_LOCAL_IS_Z(rc) {
13286 IEM_MC_ADVANCE_RIP_AND_FINISH();
13287 } IEM_MC_ELSE() {
13288 IEM_MC_RAISE_DIVIDE_ERROR();
13289 } IEM_MC_ENDIF();
13290
13291 IEM_MC_END();
13292 }
13293}
13294
13295
13296/** Opcode 0xf7 /4, /5, /6 and /7. */
13297FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
13298{
13299 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13300
13301 if (IEM_IS_MODRM_REG_MODE(bRm))
13302 {
13303 /* register access */
13304 switch (pVCpu->iem.s.enmEffOpSize)
13305 {
13306 case IEMMODE_16BIT:
13307 IEM_MC_BEGIN(4, 1, 0, 0);
13308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13309 IEM_MC_ARG(uint16_t *, pu16AX, 0);
13310 IEM_MC_ARG(uint16_t *, pu16DX, 1);
13311 IEM_MC_ARG(uint16_t, u16Value, 2);
13312 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13313 IEM_MC_LOCAL(int32_t, rc);
13314
13315 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
13316 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
13317 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
13318 IEM_MC_REF_EFLAGS(pEFlags);
13319 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
13320 IEM_MC_IF_LOCAL_IS_Z(rc) {
13321 IEM_MC_ADVANCE_RIP_AND_FINISH();
13322 } IEM_MC_ELSE() {
13323 IEM_MC_RAISE_DIVIDE_ERROR();
13324 } IEM_MC_ENDIF();
13325
13326 IEM_MC_END();
13327 break;
13328
13329 case IEMMODE_32BIT:
13330 IEM_MC_BEGIN(4, 1, IEM_MC_F_MIN_386, 0);
13331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13332 IEM_MC_ARG(uint32_t *, pu32AX, 0);
13333 IEM_MC_ARG(uint32_t *, pu32DX, 1);
13334 IEM_MC_ARG(uint32_t, u32Value, 2);
13335 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13336 IEM_MC_LOCAL(int32_t, rc);
13337
13338 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
13339 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
13340 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
13341 IEM_MC_REF_EFLAGS(pEFlags);
13342 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
13343 IEM_MC_IF_LOCAL_IS_Z(rc) {
13344 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX);
13345 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX);
13346 IEM_MC_ADVANCE_RIP_AND_FINISH();
13347 } IEM_MC_ELSE() {
13348 IEM_MC_RAISE_DIVIDE_ERROR();
13349 } IEM_MC_ENDIF();
13350
13351 IEM_MC_END();
13352 break;
13353
13354 case IEMMODE_64BIT:
13355 IEM_MC_BEGIN(4, 1, IEM_MC_F_64BIT, 0);
13356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13357 IEM_MC_ARG(uint64_t *, pu64AX, 0);
13358 IEM_MC_ARG(uint64_t *, pu64DX, 1);
13359 IEM_MC_ARG(uint64_t, u64Value, 2);
13360 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13361 IEM_MC_LOCAL(int32_t, rc);
13362
13363 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
13364 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
13365 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
13366 IEM_MC_REF_EFLAGS(pEFlags);
13367 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
13368 IEM_MC_IF_LOCAL_IS_Z(rc) {
13369 IEM_MC_ADVANCE_RIP_AND_FINISH();
13370 } IEM_MC_ELSE() {
13371 IEM_MC_RAISE_DIVIDE_ERROR();
13372 } IEM_MC_ENDIF();
13373
13374 IEM_MC_END();
13375 break;
13376
13377 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13378 }
13379 }
13380 else
13381 {
13382 /* memory access. */
13383 switch (pVCpu->iem.s.enmEffOpSize)
13384 {
13385 case IEMMODE_16BIT:
13386 IEM_MC_BEGIN(4, 2, 0, 0);
13387 IEM_MC_ARG(uint16_t *, pu16AX, 0);
13388 IEM_MC_ARG(uint16_t *, pu16DX, 1);
13389 IEM_MC_ARG(uint16_t, u16Value, 2);
13390 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13391 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13392 IEM_MC_LOCAL(int32_t, rc);
13393
13394 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13396 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13397 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
13398 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
13399 IEM_MC_REF_EFLAGS(pEFlags);
13400 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
13401 IEM_MC_IF_LOCAL_IS_Z(rc) {
13402 IEM_MC_ADVANCE_RIP_AND_FINISH();
13403 } IEM_MC_ELSE() {
13404 IEM_MC_RAISE_DIVIDE_ERROR();
13405 } IEM_MC_ENDIF();
13406
13407 IEM_MC_END();
13408 break;
13409
13410 case IEMMODE_32BIT:
13411 IEM_MC_BEGIN(4, 2, IEM_MC_F_MIN_386, 0);
13412 IEM_MC_ARG(uint32_t *, pu32AX, 0);
13413 IEM_MC_ARG(uint32_t *, pu32DX, 1);
13414 IEM_MC_ARG(uint32_t, u32Value, 2);
13415 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13417 IEM_MC_LOCAL(int32_t, rc);
13418
13419 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13421 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13422 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
13423 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
13424 IEM_MC_REF_EFLAGS(pEFlags);
13425 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
13426 IEM_MC_IF_LOCAL_IS_Z(rc) {
13427 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX);
13428 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX);
13429 IEM_MC_ADVANCE_RIP_AND_FINISH();
13430 } IEM_MC_ELSE() {
13431 IEM_MC_RAISE_DIVIDE_ERROR();
13432 } IEM_MC_ENDIF();
13433
13434 IEM_MC_END();
13435 break;
13436
13437 case IEMMODE_64BIT:
13438 IEM_MC_BEGIN(4, 2, IEM_MC_F_64BIT, 0);
13439 IEM_MC_ARG(uint64_t *, pu64AX, 0);
13440 IEM_MC_ARG(uint64_t *, pu64DX, 1);
13441 IEM_MC_ARG(uint64_t, u64Value, 2);
13442 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13443 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13444 IEM_MC_LOCAL(int32_t, rc);
13445
13446 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13448 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13449 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
13450 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
13451 IEM_MC_REF_EFLAGS(pEFlags);
13452 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
13453 IEM_MC_IF_LOCAL_IS_Z(rc) {
13454 IEM_MC_ADVANCE_RIP_AND_FINISH();
13455 } IEM_MC_ELSE() {
13456 IEM_MC_RAISE_DIVIDE_ERROR();
13457 } IEM_MC_ENDIF();
13458
13459 IEM_MC_END();
13460 break;
13461
13462 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13463 }
13464 }
13465}
13466
13467
13468/**
13469 * @opmaps grp3_f6
13470 * @opcode /2
13471 */
13472FNIEMOP_DEF_1(iemOp_grp3_not_Eb, uint8_t, bRm)
13473{
13474 IEMOP_MNEMONIC(not_Eb, "not Eb");
13475 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_not_u8, iemAImpl_not_u8_locked);
13476}
13477
13478
13479/**
13480 * @opmaps grp3_f6
13481 * @opcode /3
13482 */
13483FNIEMOP_DEF_1(iemOp_grp3_neg_Eb, uint8_t, bRm)
13484{
13485 IEMOP_MNEMONIC(net_Eb, "neg Eb");
13486 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_neg_u8, iemAImpl_neg_u8_locked);
13487}
13488
13489
13490/**
13491 * @opcode 0xf6
13492 */
13493FNIEMOP_DEF(iemOp_Grp3_Eb)
13494{
13495 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13496 switch (IEM_GET_MODRM_REG_8(bRm))
13497 {
13498 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
13499 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
13500 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Eb, bRm);
13501 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Eb, bRm);
13502 case 4:
13503 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
13504 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13505 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
13506 case 5:
13507 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
13508 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13509 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
13510 case 6:
13511 IEMOP_MNEMONIC(div_Eb, "div Eb");
13512 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13513 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
13514 case 7:
13515 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
13516 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13517 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
13518 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13519 }
13520}
13521
13522
13523/** Opcode 0xf7 /0. */
13524FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
13525{
13526 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
13527 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
13528
13529 if (IEM_IS_MODRM_REG_MODE(bRm))
13530 {
13531 /* register access */
13532 switch (pVCpu->iem.s.enmEffOpSize)
13533 {
13534 case IEMMODE_16BIT:
13535 IEM_MC_BEGIN(3, 0, 0, 0);
13536 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13538 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13539 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
13540 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13541 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13542 IEM_MC_REF_EFLAGS(pEFlags);
13543 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
13544 IEM_MC_ADVANCE_RIP_AND_FINISH();
13545 IEM_MC_END();
13546 break;
13547
13548 case IEMMODE_32BIT:
13549 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
13550 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13552 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13553 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
13554 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13555 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13556 IEM_MC_REF_EFLAGS(pEFlags);
13557 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
13558 /* No clearing the high dword here - test doesn't write back the result. */
13559 IEM_MC_ADVANCE_RIP_AND_FINISH();
13560 IEM_MC_END();
13561 break;
13562
13563 case IEMMODE_64BIT:
13564 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
13565 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13567 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13568 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
13569 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13570 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13571 IEM_MC_REF_EFLAGS(pEFlags);
13572 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
13573 IEM_MC_ADVANCE_RIP_AND_FINISH();
13574 IEM_MC_END();
13575 break;
13576
13577 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13578 }
13579 }
13580 else
13581 {
13582 /* memory access. */
13583 switch (pVCpu->iem.s.enmEffOpSize)
13584 {
13585 case IEMMODE_16BIT:
13586 IEM_MC_BEGIN(3, 3, 0, 0);
13587 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13588 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
13589
13590 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13592
13593 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13594 IEM_MC_ARG(uint16_t const *, pu16Dst, 0);
13595 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13596
13597 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
13598 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13599 IEM_MC_FETCH_EFLAGS(EFlags);
13600 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
13601
13602 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
13603 IEM_MC_COMMIT_EFLAGS(EFlags);
13604 IEM_MC_ADVANCE_RIP_AND_FINISH();
13605 IEM_MC_END();
13606 break;
13607
13608 case IEMMODE_32BIT:
13609 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
13610 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13611 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13612
13613 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13615
13616 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13617 IEM_MC_ARG(uint32_t const *, pu32Dst, 0);
13618 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13619
13620 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
13621 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13622 IEM_MC_FETCH_EFLAGS(EFlags);
13623 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
13624
13625 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
13626 IEM_MC_COMMIT_EFLAGS(EFlags);
13627 IEM_MC_ADVANCE_RIP_AND_FINISH();
13628 IEM_MC_END();
13629 break;
13630
13631 case IEMMODE_64BIT:
13632 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
13633 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13634 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13635
13636 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13638
13639 IEM_MC_ARG(uint64_t const *, pu64Dst, 0);
13640 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13641 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13642
13643 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1);
13644 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13645 IEM_MC_FETCH_EFLAGS(EFlags);
13646 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
13647
13648 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
13649 IEM_MC_COMMIT_EFLAGS(EFlags);
13650 IEM_MC_ADVANCE_RIP_AND_FINISH();
13651 IEM_MC_END();
13652 break;
13653
13654 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13655 }
13656 }
13657}
13658
13659
13660/** Opcode 0xf7 /2. */
13661FNIEMOP_DEF_1(iemOp_grp3_not_Ev, uint8_t, bRm)
13662{
13663 IEMOP_MNEMONIC(not_Ev, "not Ev");
13664 IEMOP_BODY_UNARY_Ev( iemAImpl_not_u16, iemAImpl_not_u32, iemAImpl_not_u64);
13665 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_not_u16_locked, iemAImpl_not_u32_locked, iemAImpl_not_u64_locked);
13666}
13667
13668
13669/** Opcode 0xf7 /3. */
13670FNIEMOP_DEF_1(iemOp_grp3_neg_Ev, uint8_t, bRm)
13671{
13672 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
13673 IEMOP_BODY_UNARY_Ev( iemAImpl_neg_u16, iemAImpl_neg_u32, iemAImpl_neg_u64);
13674 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_neg_u16_locked, iemAImpl_neg_u32_locked, iemAImpl_neg_u64_locked);
13675}
13676
13677
13678/**
13679 * @opcode 0xf7
13680 */
13681FNIEMOP_DEF(iemOp_Grp3_Ev)
13682{
13683 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13684 switch (IEM_GET_MODRM_REG_8(bRm))
13685 {
13686 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
13687 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
13688 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Ev, bRm);
13689 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Ev, bRm);
13690 case 4:
13691 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
13692 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13693 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
13694 case 5:
13695 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
13696 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13697 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
13698 case 6:
13699 IEMOP_MNEMONIC(div_Ev, "div Ev");
13700 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13701 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
13702 case 7:
13703 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
13704 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13705 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
13706 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13707 }
13708}
13709
13710
13711/**
13712 * @opcode 0xf8
13713 */
13714FNIEMOP_DEF(iemOp_clc)
13715{
13716 IEMOP_MNEMONIC(clc, "clc");
13717 IEM_MC_BEGIN(0, 0, 0, 0);
13718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13719 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
13720 IEM_MC_ADVANCE_RIP_AND_FINISH();
13721 IEM_MC_END();
13722}
13723
13724
13725/**
13726 * @opcode 0xf9
13727 */
13728FNIEMOP_DEF(iemOp_stc)
13729{
13730 IEMOP_MNEMONIC(stc, "stc");
13731 IEM_MC_BEGIN(0, 0, 0, 0);
13732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13733 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
13734 IEM_MC_ADVANCE_RIP_AND_FINISH();
13735 IEM_MC_END();
13736}
13737
13738
13739/**
13740 * @opcode 0xfa
13741 */
13742FNIEMOP_DEF(iemOp_cli)
13743{
13744 IEMOP_MNEMONIC(cli, "cli");
13745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13746 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_CHECK_IRQ_BEFORE, 0, iemCImpl_cli);
13747}
13748
13749
13750FNIEMOP_DEF(iemOp_sti)
13751{
13752 IEMOP_MNEMONIC(sti, "sti");
13753 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13754 IEM_MC_DEFER_TO_CIMPL_0_RET( IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_AFTER
13755 | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_INHIBIT_SHADOW, 0, iemCImpl_sti);
13756}
13757
13758
13759/**
13760 * @opcode 0xfc
13761 */
13762FNIEMOP_DEF(iemOp_cld)
13763{
13764 IEMOP_MNEMONIC(cld, "cld");
13765 IEM_MC_BEGIN(0, 0, 0, 0);
13766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13767 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
13768 IEM_MC_ADVANCE_RIP_AND_FINISH();
13769 IEM_MC_END();
13770}
13771
13772
13773/**
13774 * @opcode 0xfd
13775 */
13776FNIEMOP_DEF(iemOp_std)
13777{
13778 IEMOP_MNEMONIC(std, "std");
13779 IEM_MC_BEGIN(0, 0, 0, 0);
13780 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13781 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
13782 IEM_MC_ADVANCE_RIP_AND_FINISH();
13783 IEM_MC_END();
13784}
13785
13786
13787/**
13788 * @opmaps grp4
13789 * @opcode /0
13790 */
13791FNIEMOP_DEF_1(iemOp_Grp4_inc_Eb, uint8_t, bRm)
13792{
13793 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
13794 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_inc_u8, iemAImpl_inc_u8_locked);
13795}
13796
13797
13798/**
13799 * @opmaps grp4
13800 * @opcode /1
13801 */
13802FNIEMOP_DEF_1(iemOp_Grp4_dec_Eb, uint8_t, bRm)
13803{
13804 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
13805 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_dec_u8, iemAImpl_dec_u8_locked);
13806}
13807
13808
13809/**
13810 * @opcode 0xfe
13811 */
13812FNIEMOP_DEF(iemOp_Grp4)
13813{
13814 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13815 switch (IEM_GET_MODRM_REG_8(bRm))
13816 {
13817 case 0: return FNIEMOP_CALL_1(iemOp_Grp4_inc_Eb, bRm);
13818 case 1: return FNIEMOP_CALL_1(iemOp_Grp4_dec_Eb, bRm);
13819 default:
13820 /** @todo is the eff-addr decoded? */
13821 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
13822 IEMOP_RAISE_INVALID_OPCODE_RET();
13823 }
13824}
13825
13826/** Opcode 0xff /0. */
13827FNIEMOP_DEF_1(iemOp_Grp5_inc_Ev, uint8_t, bRm)
13828{
13829 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
13830 IEMOP_BODY_UNARY_Ev( iemAImpl_inc_u16, iemAImpl_inc_u32, iemAImpl_inc_u64);
13831 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_inc_u16_locked, iemAImpl_inc_u32_locked, iemAImpl_inc_u64_locked);
13832}
13833
13834
13835/** Opcode 0xff /1. */
13836FNIEMOP_DEF_1(iemOp_Grp5_dec_Ev, uint8_t, bRm)
13837{
13838 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
13839 IEMOP_BODY_UNARY_Ev( iemAImpl_dec_u16, iemAImpl_dec_u32, iemAImpl_dec_u64);
13840 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_dec_u16_locked, iemAImpl_dec_u32_locked, iemAImpl_dec_u64_locked);
13841}
13842
13843
13844/**
13845 * Opcode 0xff /2.
13846 * @param bRm The RM byte.
13847 */
13848FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
13849{
13850 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
13851 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13852
13853 if (IEM_IS_MODRM_REG_MODE(bRm))
13854 {
13855 /* The new RIP is taken from a register. */
13856 switch (pVCpu->iem.s.enmEffOpSize)
13857 {
13858 case IEMMODE_16BIT:
13859 IEM_MC_BEGIN(1, 0, 0, 0);
13860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13861 IEM_MC_ARG(uint16_t, u16Target, 0);
13862 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13863 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_16, u16Target);
13864 IEM_MC_END();
13865 break;
13866
13867 case IEMMODE_32BIT:
13868 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_386, 0);
13869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13870 IEM_MC_ARG(uint32_t, u32Target, 0);
13871 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13872 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_32, u32Target);
13873 IEM_MC_END();
13874 break;
13875
13876 case IEMMODE_64BIT:
13877 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT, 0);
13878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13879 IEM_MC_ARG(uint64_t, u64Target, 0);
13880 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13881 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_64, u64Target);
13882 IEM_MC_END();
13883 break;
13884
13885 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13886 }
13887 }
13888 else
13889 {
13890 /* The new RIP is taken from a register. */
13891 switch (pVCpu->iem.s.enmEffOpSize)
13892 {
13893 case IEMMODE_16BIT:
13894 IEM_MC_BEGIN(1, 1, 0, 0);
13895 IEM_MC_ARG(uint16_t, u16Target, 0);
13896 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13897 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13899 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13900 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_16, u16Target);
13901 IEM_MC_END();
13902 break;
13903
13904 case IEMMODE_32BIT:
13905 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_386, 0);
13906 IEM_MC_ARG(uint32_t, u32Target, 0);
13907 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13908 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13910 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13911 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_32, u32Target);
13912 IEM_MC_END();
13913 break;
13914
13915 case IEMMODE_64BIT:
13916 IEM_MC_BEGIN(1, 1, IEM_MC_F_64BIT, 0);
13917 IEM_MC_ARG(uint64_t, u64Target, 0);
13918 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13919 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13921 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13922 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_64, u64Target);
13923 IEM_MC_END();
13924 break;
13925
13926 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13927 }
13928 }
13929}
13930
13931#define IEMOP_BODY_GRP5_FAR_EP(a_bRm, a_fnCImpl, a_fCImplExtra) \
13932 /* Registers? How?? */ \
13933 if (RT_LIKELY(IEM_IS_MODRM_MEM_MODE(a_bRm))) \
13934 { /* likely */ } \
13935 else \
13936 IEMOP_RAISE_INVALID_OPCODE_RET(); /* callf eax is not legal */ \
13937 \
13938 /* 64-bit mode: Default is 32-bit, but only intel respects a REX.W prefix. */ \
13939 /** @todo what does VIA do? */ \
13940 if (!IEM_IS_64BIT_CODE(pVCpu) || pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT || IEM_IS_GUEST_CPU_INTEL(pVCpu)) \
13941 { /* likely */ } \
13942 else \
13943 pVCpu->iem.s.enmEffOpSize = IEMMODE_32BIT; \
13944 \
13945 /* Far pointer loaded from memory. */ \
13946 switch (pVCpu->iem.s.enmEffOpSize) \
13947 { \
13948 case IEMMODE_16BIT: \
13949 IEM_MC_BEGIN(3, 1, 0, 0); \
13950 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13951 IEM_MC_ARG(uint16_t, offSeg, 1); \
13952 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2); \
13953 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13954 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13956 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13957 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2); \
13958 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
13959 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 0, \
13960 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13961 IEM_MC_END(); \
13962 break; \
13963 \
13964 case IEMMODE_32BIT: \
13965 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0); \
13966 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13967 IEM_MC_ARG(uint32_t, offSeg, 1); \
13968 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2); \
13969 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13970 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13972 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13973 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4); \
13974 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
13975 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 0, \
13976 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13977 IEM_MC_END(); \
13978 break; \
13979 \
13980 case IEMMODE_64BIT: \
13981 Assert(!IEM_IS_GUEST_CPU_AMD(pVCpu)); \
13982 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0); \
13983 IEM_MC_ARG(uint16_t, u16Sel, 0); \
13984 IEM_MC_ARG(uint64_t, offSeg, 1); \
13985 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_64BIT, 2); \
13986 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
13987 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
13988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13989 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
13990 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8); \
13991 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
13992 | IEM_CIMPL_F_MODE /* no gates */, 0, \
13993 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
13994 IEM_MC_END(); \
13995 break; \
13996 \
13997 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13998 } do {} while (0)
13999
14000
14001/**
14002 * Opcode 0xff /3.
14003 * @param bRm The RM byte.
14004 */
14005FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
14006{
14007 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
14008 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_callf, IEM_CIMPL_F_BRANCH_STACK);
14009}
14010
14011
14012/**
14013 * Opcode 0xff /4.
14014 * @param bRm The RM byte.
14015 */
14016FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
14017{
14018 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
14019 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
14020
14021 if (IEM_IS_MODRM_REG_MODE(bRm))
14022 {
14023 /* The new RIP is taken from a register. */
14024 switch (pVCpu->iem.s.enmEffOpSize)
14025 {
14026 case IEMMODE_16BIT:
14027 IEM_MC_BEGIN(0, 1, 0, 0);
14028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14029 IEM_MC_LOCAL(uint16_t, u16Target);
14030 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14031 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
14032 IEM_MC_END();
14033 break;
14034
14035 case IEMMODE_32BIT:
14036 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
14037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14038 IEM_MC_LOCAL(uint32_t, u32Target);
14039 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14040 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
14041 IEM_MC_END();
14042 break;
14043
14044 case IEMMODE_64BIT:
14045 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
14046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14047 IEM_MC_LOCAL(uint64_t, u64Target);
14048 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14049 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
14050 IEM_MC_END();
14051 break;
14052
14053 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14054 }
14055 }
14056 else
14057 {
14058 /* The new RIP is taken from a memory location. */
14059 switch (pVCpu->iem.s.enmEffOpSize)
14060 {
14061 case IEMMODE_16BIT:
14062 IEM_MC_BEGIN(0, 2, 0, 0);
14063 IEM_MC_LOCAL(uint16_t, u16Target);
14064 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14065 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14066 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14067 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14068 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
14069 IEM_MC_END();
14070 break;
14071
14072 case IEMMODE_32BIT:
14073 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
14074 IEM_MC_LOCAL(uint32_t, u32Target);
14075 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14076 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14077 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14078 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14079 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
14080 IEM_MC_END();
14081 break;
14082
14083 case IEMMODE_64BIT:
14084 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
14085 IEM_MC_LOCAL(uint64_t, u64Target);
14086 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14087 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14089 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14090 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
14091 IEM_MC_END();
14092 break;
14093
14094 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14095 }
14096 }
14097}
14098
14099
14100/**
14101 * Opcode 0xff /5.
14102 * @param bRm The RM byte.
14103 */
14104FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
14105{
14106 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
14107 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_FarJmp, 0);
14108}
14109
14110
14111/**
14112 * Opcode 0xff /6.
14113 * @param bRm The RM byte.
14114 */
14115FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
14116{
14117 IEMOP_MNEMONIC(push_Ev, "push Ev");
14118
14119 /* Registers are handled by a common worker. */
14120 if (IEM_IS_MODRM_REG_MODE(bRm))
14121 return FNIEMOP_CALL_1(iemOpCommonPushGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
14122
14123 /* Memory we do here. */
14124 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
14125 switch (pVCpu->iem.s.enmEffOpSize)
14126 {
14127 case IEMMODE_16BIT:
14128 IEM_MC_BEGIN(0, 2, 0, 0);
14129 IEM_MC_LOCAL(uint16_t, u16Src);
14130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14131 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14133 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14134 IEM_MC_PUSH_U16(u16Src);
14135 IEM_MC_ADVANCE_RIP_AND_FINISH();
14136 IEM_MC_END();
14137 break;
14138
14139 case IEMMODE_32BIT:
14140 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
14141 IEM_MC_LOCAL(uint32_t, u32Src);
14142 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14143 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14145 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14146 IEM_MC_PUSH_U32(u32Src);
14147 IEM_MC_ADVANCE_RIP_AND_FINISH();
14148 IEM_MC_END();
14149 break;
14150
14151 case IEMMODE_64BIT:
14152 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
14153 IEM_MC_LOCAL(uint64_t, u64Src);
14154 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14155 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14157 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14158 IEM_MC_PUSH_U64(u64Src);
14159 IEM_MC_ADVANCE_RIP_AND_FINISH();
14160 IEM_MC_END();
14161 break;
14162
14163 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14164 }
14165}
14166
14167
14168/**
14169 * @opcode 0xff
14170 */
14171FNIEMOP_DEF(iemOp_Grp5)
14172{
14173 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14174 switch (IEM_GET_MODRM_REG_8(bRm))
14175 {
14176 case 0:
14177 return FNIEMOP_CALL_1(iemOp_Grp5_inc_Ev, bRm);
14178 case 1:
14179 return FNIEMOP_CALL_1(iemOp_Grp5_dec_Ev, bRm);
14180 case 2:
14181 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
14182 case 3:
14183 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
14184 case 4:
14185 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
14186 case 5:
14187 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
14188 case 6:
14189 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
14190 case 7:
14191 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
14192 IEMOP_RAISE_INVALID_OPCODE_RET();
14193 }
14194 AssertFailedReturn(VERR_IEM_IPE_3);
14195}
14196
14197
14198
14199const PFNIEMOP g_apfnOneByteMap[256] =
14200{
14201 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
14202 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
14203 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
14204 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
14205 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
14206 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
14207 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
14208 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
14209 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
14210 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
14211 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
14212 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
14213 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
14214 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
14215 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
14216 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
14217 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
14218 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
14219 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
14220 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
14221 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
14222 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
14223 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
14224 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
14225 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
14226 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
14227 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
14228 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
14229 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
14230 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
14231 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
14232 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
14233 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
14234 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
14235 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
14236 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
14237 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
14238 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
14239 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
14240 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
14241 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
14242 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
14243 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
14244 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
14245 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
14246 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
14247 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
14248 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
14249 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
14250 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
14251 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
14252 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
14253 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
14254 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
14255 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
14256 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
14257 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
14258 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
14259 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
14260 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
14261 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
14262 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
14263 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
14264 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
14265};
14266
14267
14268/** @} */
14269
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette