VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstOneByte.cpp.h@ 103192

Last change on this file since 103192 was 103192, checked in by vboxsync, 10 months ago

fix for r161455, missing break? bugref:10372

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 552.4 KB
Line 
1/* $Id: IEMAllInstOneByte.cpp.h 103192 2024-02-05 05:42:19Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*******************************************************************************
30* Global Variables *
31*******************************************************************************/
32extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
33
34/* Instruction group definitions: */
35
36/** @defgroup og_gen General
37 * @{ */
38 /** @defgroup og_gen_arith Arithmetic
39 * @{ */
40 /** @defgroup og_gen_arith_bin Binary numbers */
41 /** @defgroup og_gen_arith_dec Decimal numbers */
42 /** @} */
43/** @} */
44
45/** @defgroup og_stack Stack
46 * @{ */
47 /** @defgroup og_stack_sreg Segment registers */
48/** @} */
49
50/** @defgroup og_prefix Prefixes */
51/** @defgroup og_escapes Escape bytes */
52
53
54
55/** @name One byte opcodes.
56 * @{
57 */
58
59/**
60 * Body for instructions like ADD, AND, OR, TEST, CMP, ++ with a byte
61 * memory/register as the destination.
62 */
63#define IEMOP_BODY_BINARY_rm_r8_RW(a_fnNormalU8, a_fnLockedU8) \
64 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
65 \
66 /* \
67 * If rm is denoting a register, no more instruction bytes. \
68 */ \
69 if (IEM_IS_MODRM_REG_MODE(bRm)) \
70 { \
71 IEM_MC_BEGIN(3, 0, 0, 0); \
72 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
73 IEM_MC_ARG(uint8_t, u8Src, 1); \
74 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
75 \
76 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
77 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
78 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
79 IEM_MC_REF_EFLAGS(pEFlags); \
80 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
81 \
82 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
83 IEM_MC_END(); \
84 } \
85 else \
86 { \
87 /* \
88 * We're accessing memory. \
89 * Note! We're putting the eflags on the stack here so we can commit them \
90 * after the memory. \
91 */ \
92 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
93 { \
94 IEM_MC_BEGIN(3, 3, 0, 0); \
95 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
96 IEM_MC_ARG(uint8_t, u8Src, 1); \
97 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
98 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
99 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
100 \
101 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
102 IEMOP_HLP_DONE_DECODING(); \
103 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
104 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
105 IEM_MC_FETCH_EFLAGS(EFlags); \
106 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
107 \
108 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
109 IEM_MC_COMMIT_EFLAGS(EFlags); \
110 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
111 IEM_MC_END(); \
112 } \
113 else \
114 { \
115 IEM_MC_BEGIN(3, 3, 0, 0); \
116 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
117 IEM_MC_ARG(uint8_t, u8Src, 1); \
118 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
120 IEM_MC_LOCAL(uint8_t, bMapInfoDst); \
121 \
122 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
123 IEMOP_HLP_DONE_DECODING(); \
124 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bMapInfoDst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
125 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
126 IEM_MC_FETCH_EFLAGS(EFlags); \
127 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
128 \
129 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bMapInfoDst); \
130 IEM_MC_COMMIT_EFLAGS(EFlags); \
131 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
132 IEM_MC_END(); \
133 } \
134 } \
135 (void)0
136
137/**
138 * Body for instructions like TEST & CMP, ++ with a byte memory/registers as
139 * operands.
140 */
141#define IEMOP_BODY_BINARY_rm_r8_RO(a_fnNormalU8) \
142 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
143 \
144 /* \
145 * If rm is denoting a register, no more instruction bytes. \
146 */ \
147 if (IEM_IS_MODRM_REG_MODE(bRm)) \
148 { \
149 IEM_MC_BEGIN(3, 0, 0, 0); \
150 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
151 IEM_MC_ARG(uint8_t, u8Src, 1); \
152 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
153 \
154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
155 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
156 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
157 IEM_MC_REF_EFLAGS(pEFlags); \
158 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
159 \
160 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
161 IEM_MC_END(); \
162 } \
163 else \
164 { \
165 /* \
166 * We're accessing memory. \
167 * Note! We're putting the eflags on the stack here so we can commit them \
168 * after the memory. \
169 */ \
170 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
171 { \
172 IEM_MC_BEGIN(3, 3, 0, 0); \
173 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
174 IEM_MC_ARG(uint8_t, u8Src, 1); \
175 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
177 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
178 \
179 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
180 IEMOP_HLP_DONE_DECODING(); \
181 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
182 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
183 IEM_MC_FETCH_EFLAGS(EFlags); \
184 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
185 \
186 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
187 IEM_MC_COMMIT_EFLAGS(EFlags); \
188 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
189 IEM_MC_END(); \
190 } \
191 else \
192 { \
193 IEMOP_HLP_DONE_DECODING(); \
194 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
195 } \
196 } \
197 (void)0
198
199/**
200 * Body for byte instructions like ADD, AND, OR, ++ with a register as the
201 * destination.
202 */
203#define IEMOP_BODY_BINARY_r8_rm(a_fnNormalU8) \
204 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
205 \
206 /* \
207 * If rm is denoting a register, no more instruction bytes. \
208 */ \
209 if (IEM_IS_MODRM_REG_MODE(bRm)) \
210 { \
211 IEM_MC_BEGIN(3, 0, 0, 0); \
212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
213 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
214 IEM_MC_ARG(uint8_t, u8Src, 1); \
215 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
216 \
217 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
218 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
219 IEM_MC_REF_EFLAGS(pEFlags); \
220 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
221 \
222 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
223 IEM_MC_END(); \
224 } \
225 else \
226 { \
227 /* \
228 * We're accessing memory. \
229 */ \
230 IEM_MC_BEGIN(3, 1, 0, 0); \
231 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
232 IEM_MC_ARG(uint8_t, u8Src, 1); \
233 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
234 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
235 \
236 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
238 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
239 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
240 IEM_MC_REF_EFLAGS(pEFlags); \
241 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
242 \
243 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
244 IEM_MC_END(); \
245 } \
246 (void)0
247
248
249/**
250 * Body for word/dword/qword instructions like ADD, AND, OR, ++ with
251 * memory/register as the destination.
252 */
253#define IEMOP_BODY_BINARY_rm_rv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
254 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
255 \
256 /* \
257 * If rm is denoting a register, no more instruction bytes. \
258 */ \
259 if (IEM_IS_MODRM_REG_MODE(bRm)) \
260 { \
261 switch (pVCpu->iem.s.enmEffOpSize) \
262 { \
263 case IEMMODE_16BIT: \
264 IEM_MC_BEGIN(3, 0, 0, 0); \
265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
266 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
267 IEM_MC_ARG(uint16_t, u16Src, 1); \
268 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
269 \
270 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
271 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
272 IEM_MC_REF_EFLAGS(pEFlags); \
273 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
274 \
275 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
276 IEM_MC_END(); \
277 break; \
278 \
279 case IEMMODE_32BIT: \
280 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
282 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
283 IEM_MC_ARG(uint32_t, u32Src, 1); \
284 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
285 \
286 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
287 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
288 IEM_MC_REF_EFLAGS(pEFlags); \
289 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
290 \
291 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
292 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
293 IEM_MC_END(); \
294 break; \
295 \
296 case IEMMODE_64BIT: \
297 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
299 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
300 IEM_MC_ARG(uint64_t, u64Src, 1); \
301 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
302 \
303 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
304 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
305 IEM_MC_REF_EFLAGS(pEFlags); \
306 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
307 \
308 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
309 IEM_MC_END(); \
310 break; \
311 \
312 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
313 } \
314 } \
315 else \
316 { \
317 /* \
318 * We're accessing memory. \
319 * Note! We're putting the eflags on the stack here so we can commit them \
320 * after the memory. \
321 */ \
322 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
323 { \
324 switch (pVCpu->iem.s.enmEffOpSize) \
325 { \
326 case IEMMODE_16BIT: \
327 IEM_MC_BEGIN(3, 3, 0, 0); \
328 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
329 IEM_MC_ARG(uint16_t, u16Src, 1); \
330 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
331 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
332 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
333 \
334 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
335 IEMOP_HLP_DONE_DECODING(); \
336 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
337 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
338 IEM_MC_FETCH_EFLAGS(EFlags); \
339 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
340 \
341 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
342 IEM_MC_COMMIT_EFLAGS(EFlags); \
343 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
344 IEM_MC_END(); \
345 break; \
346 \
347 case IEMMODE_32BIT: \
348 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
349 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
350 IEM_MC_ARG(uint32_t, u32Src, 1); \
351 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
352 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
353 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
354 \
355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
356 IEMOP_HLP_DONE_DECODING(); \
357 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
358 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
359 IEM_MC_FETCH_EFLAGS(EFlags); \
360 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
361 \
362 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
363 IEM_MC_COMMIT_EFLAGS(EFlags); \
364 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
365 IEM_MC_END(); \
366 break; \
367 \
368 case IEMMODE_64BIT: \
369 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
370 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
371 IEM_MC_ARG(uint64_t, u64Src, 1); \
372 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
373 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
374 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
375 \
376 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
377 IEMOP_HLP_DONE_DECODING(); \
378 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
379 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
380 IEM_MC_FETCH_EFLAGS(EFlags); \
381 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
382 \
383 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
384 IEM_MC_COMMIT_EFLAGS(EFlags); \
385 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
386 IEM_MC_END(); \
387 break; \
388 \
389 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
390 } \
391 } \
392 else \
393 { \
394 (void)0
395/* Separate macro to work around parsing issue in IEMAllInstPython.py */
396#define IEMOP_BODY_BINARY_rm_rv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
397 switch (pVCpu->iem.s.enmEffOpSize) \
398 { \
399 case IEMMODE_16BIT: \
400 IEM_MC_BEGIN(3, 3, 0, 0); \
401 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
402 IEM_MC_ARG(uint16_t, u16Src, 1); \
403 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
404 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
405 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
406 \
407 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
408 IEMOP_HLP_DONE_DECODING(); \
409 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
410 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
411 IEM_MC_FETCH_EFLAGS(EFlags); \
412 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
413 \
414 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
415 IEM_MC_COMMIT_EFLAGS(EFlags); \
416 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
417 IEM_MC_END(); \
418 break; \
419 \
420 case IEMMODE_32BIT: \
421 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
422 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
423 IEM_MC_ARG(uint32_t, u32Src, 1); \
424 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
425 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
426 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
427 \
428 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
429 IEMOP_HLP_DONE_DECODING(); \
430 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
431 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
432 IEM_MC_FETCH_EFLAGS(EFlags); \
433 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
434 \
435 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo /* CMP,TEST */); \
436 IEM_MC_COMMIT_EFLAGS(EFlags); \
437 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
438 IEM_MC_END(); \
439 break; \
440 \
441 case IEMMODE_64BIT: \
442 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
443 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
444 IEM_MC_ARG(uint64_t, u64Src, 1); \
445 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
446 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
447 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
448 \
449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
450 IEMOP_HLP_DONE_DECODING(); \
451 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
452 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
453 IEM_MC_FETCH_EFLAGS(EFlags); \
454 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
455 \
456 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
457 IEM_MC_COMMIT_EFLAGS(EFlags); \
458 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
459 IEM_MC_END(); \
460 break; \
461 \
462 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
463 } \
464 } \
465 } \
466 (void)0
467
468/**
469 * Body for read-only word/dword/qword instructions like TEST and CMP with
470 * memory/register as the destination.
471 */
472#define IEMOP_BODY_BINARY_rm_rv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
473 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
474 \
475 /* \
476 * If rm is denoting a register, no more instruction bytes. \
477 */ \
478 if (IEM_IS_MODRM_REG_MODE(bRm)) \
479 { \
480 switch (pVCpu->iem.s.enmEffOpSize) \
481 { \
482 case IEMMODE_16BIT: \
483 IEM_MC_BEGIN(3, 0, 0, 0); \
484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
485 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
486 IEM_MC_ARG(uint16_t, u16Src, 1); \
487 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
488 \
489 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
490 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
491 IEM_MC_REF_EFLAGS(pEFlags); \
492 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
493 \
494 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
495 IEM_MC_END(); \
496 break; \
497 \
498 case IEMMODE_32BIT: \
499 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
501 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
502 IEM_MC_ARG(uint32_t, u32Src, 1); \
503 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
504 \
505 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
506 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
507 IEM_MC_REF_EFLAGS(pEFlags); \
508 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
509 \
510 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
511 IEM_MC_END(); \
512 break; \
513 \
514 case IEMMODE_64BIT: \
515 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
517 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
518 IEM_MC_ARG(uint64_t, u64Src, 1); \
519 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
520 \
521 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
522 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
523 IEM_MC_REF_EFLAGS(pEFlags); \
524 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
525 \
526 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
527 IEM_MC_END(); \
528 break; \
529 \
530 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
531 } \
532 } \
533 else \
534 { \
535 /* \
536 * We're accessing memory. \
537 * Note! We're putting the eflags on the stack here so we can commit them \
538 * after the memory. \
539 */ \
540 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
541 { \
542 switch (pVCpu->iem.s.enmEffOpSize) \
543 { \
544 case IEMMODE_16BIT: \
545 IEM_MC_BEGIN(3, 3, 0, 0); \
546 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
547 IEM_MC_ARG(uint16_t, u16Src, 1); \
548 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
549 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
550 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
551 \
552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
553 IEMOP_HLP_DONE_DECODING(); \
554 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
555 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
556 IEM_MC_FETCH_EFLAGS(EFlags); \
557 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
558 \
559 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
560 IEM_MC_COMMIT_EFLAGS(EFlags); \
561 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
562 IEM_MC_END(); \
563 break; \
564 \
565 case IEMMODE_32BIT: \
566 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
567 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
568 IEM_MC_ARG(uint32_t, u32Src, 1); \
569 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
571 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
572 \
573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
574 IEMOP_HLP_DONE_DECODING(); \
575 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
576 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
577 IEM_MC_FETCH_EFLAGS(EFlags); \
578 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
579 \
580 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
581 IEM_MC_COMMIT_EFLAGS(EFlags); \
582 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
583 IEM_MC_END(); \
584 break; \
585 \
586 case IEMMODE_64BIT: \
587 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
588 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
589 IEM_MC_ARG(uint64_t, u64Src, 1); \
590 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
591 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
592 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
593 \
594 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
595 IEMOP_HLP_DONE_DECODING(); \
596 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
597 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
598 IEM_MC_FETCH_EFLAGS(EFlags); \
599 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
600 \
601 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
602 IEM_MC_COMMIT_EFLAGS(EFlags); \
603 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
604 IEM_MC_END(); \
605 break; \
606 \
607 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
608 } \
609 } \
610 else \
611 { \
612 IEMOP_HLP_DONE_DECODING(); \
613 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
614 } \
615 } \
616 (void)0
617
618
619/**
620 * Body for instructions like ADD, AND, OR, ++ with working on AL with
621 * a byte immediate.
622 */
623#define IEMOP_BODY_BINARY_AL_Ib(a_fnNormalU8) \
624 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
625 \
626 IEM_MC_BEGIN(3, 0, 0, 0); \
627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
628 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
629 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1); \
630 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
631 \
632 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX); \
633 IEM_MC_REF_EFLAGS(pEFlags); \
634 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
635 \
636 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
637 IEM_MC_END()
638
639/**
640 * Body for instructions like ADD, AND, OR, ++ with working on
641 * AX/EAX/RAX with a word/dword immediate.
642 */
643#define IEMOP_BODY_BINARY_rAX_Iz(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fModifiesDstReg) \
644 switch (pVCpu->iem.s.enmEffOpSize) \
645 { \
646 case IEMMODE_16BIT: \
647 { \
648 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
649 \
650 IEM_MC_BEGIN(3, 0, 0, 0); \
651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
652 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
653 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1); \
654 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
655 \
656 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX); \
657 IEM_MC_REF_EFLAGS(pEFlags); \
658 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
659 \
660 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
661 IEM_MC_END(); \
662 } \
663 \
664 case IEMMODE_32BIT: \
665 { \
666 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
667 \
668 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
670 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
671 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1); \
672 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
673 \
674 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX); \
675 IEM_MC_REF_EFLAGS(pEFlags); \
676 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
677 \
678 if (a_fModifiesDstReg) \
679 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
680 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
681 IEM_MC_END(); \
682 } \
683 \
684 case IEMMODE_64BIT: \
685 { \
686 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
687 \
688 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
690 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
691 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1); \
692 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
693 \
694 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX); \
695 IEM_MC_REF_EFLAGS(pEFlags); \
696 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
697 \
698 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
699 IEM_MC_END(); \
700 } \
701 \
702 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
703 } \
704 (void)0
705
706
707
708/* Instruction specification format - work in progress: */
709
710/**
711 * @opcode 0x00
712 * @opmnemonic add
713 * @op1 rm:Eb
714 * @op2 reg:Gb
715 * @opmaps one
716 * @openc ModR/M
717 * @opflclass arithmetic
718 * @ophints harmless ignores_op_sizes
719 * @opstats add_Eb_Gb
720 * @opgroup og_gen_arith_bin
721 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
722 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
723 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
724 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
725 */
726FNIEMOP_DEF(iemOp_add_Eb_Gb)
727{
728 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
729 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_add_u8, iemAImpl_add_u8_locked);
730}
731
732
733/**
734 * @opcode 0x01
735 * @opgroup og_gen_arith_bin
736 * @opflclass arithmetic
737 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
738 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
739 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
740 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
741 */
742FNIEMOP_DEF(iemOp_add_Ev_Gv)
743{
744 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
745 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
746 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
747}
748
749
750/**
751 * @opcode 0x02
752 * @opgroup og_gen_arith_bin
753 * @opflclass arithmetic
754 * @opcopytests iemOp_add_Eb_Gb
755 */
756FNIEMOP_DEF(iemOp_add_Gb_Eb)
757{
758 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
759 IEMOP_BODY_BINARY_r8_rm(iemAImpl_add_u8);
760}
761
762
763/**
764 * @opcode 0x03
765 * @opgroup og_gen_arith_bin
766 * @opflclass arithmetic
767 * @opcopytests iemOp_add_Ev_Gv
768 */
769FNIEMOP_DEF(iemOp_add_Gv_Ev)
770{
771 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
772 IEMOP_BODY_BINARY_rv_rm(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1, 0);
773}
774
775
776/**
777 * @opcode 0x04
778 * @opgroup og_gen_arith_bin
779 * @opflclass arithmetic
780 * @opcopytests iemOp_add_Eb_Gb
781 */
782FNIEMOP_DEF(iemOp_add_Al_Ib)
783{
784 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
785 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_add_u8);
786}
787
788
789/**
790 * @opcode 0x05
791 * @opgroup og_gen_arith_bin
792 * @opflclass arithmetic
793 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
794 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
795 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
796 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
797 */
798FNIEMOP_DEF(iemOp_add_eAX_Iz)
799{
800 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
801 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
802}
803
804
805/**
806 * @opcode 0x06
807 * @opgroup og_stack_sreg
808 */
809FNIEMOP_DEF(iemOp_push_ES)
810{
811 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
812 IEMOP_HLP_NO_64BIT();
813 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
814}
815
816
817/**
818 * @opcode 0x07
819 * @opgroup og_stack_sreg
820 */
821FNIEMOP_DEF(iemOp_pop_ES)
822{
823 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
824 IEMOP_HLP_NO_64BIT();
825 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
826 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE,
827 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
828 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
829 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
830 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
831 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES),
832 iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
833}
834
835
836/**
837 * @opcode 0x08
838 * @opgroup og_gen_arith_bin
839 * @opflclass logical
840 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
841 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
842 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
843 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
844 */
845FNIEMOP_DEF(iemOp_or_Eb_Gb)
846{
847 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
848 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
849 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_or_u8, iemAImpl_or_u8_locked);
850}
851
852
853/*
854 * @opcode 0x09
855 * @opgroup og_gen_arith_bin
856 * @opflclass logical
857 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
858 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
859 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
860 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
861 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
862 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
863 * @note AF is documented as undefined, but both modern AMD and Intel CPUs clears it.
864 */
865FNIEMOP_DEF(iemOp_or_Ev_Gv)
866{
867 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
868 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
869 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
870 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
871}
872
873
874/**
875 * @opcode 0x0a
876 * @opgroup og_gen_arith_bin
877 * @opflclass logical
878 * @opcopytests iemOp_or_Eb_Gb
879 */
880FNIEMOP_DEF(iemOp_or_Gb_Eb)
881{
882 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
883 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
884 IEMOP_BODY_BINARY_r8_rm(iemAImpl_or_u8);
885}
886
887
888/**
889 * @opcode 0x0b
890 * @opgroup og_gen_arith_bin
891 * @opflclass logical
892 * @opcopytests iemOp_or_Ev_Gv
893 */
894FNIEMOP_DEF(iemOp_or_Gv_Ev)
895{
896 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
897 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
898 IEMOP_BODY_BINARY_rv_rm(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1, 0);
899}
900
901
902/**
903 * @opcode 0x0c
904 * @opgroup og_gen_arith_bin
905 * @opflclass logical
906 * @opcopytests iemOp_or_Eb_Gb
907 */
908FNIEMOP_DEF(iemOp_or_Al_Ib)
909{
910 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
911 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
912 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_or_u8);
913}
914
915
916/**
917 * @opcode 0x0d
918 * @opgroup og_gen_arith_bin
919 * @opflclass logical
920 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
921 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
922 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
923 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
924 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
925 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
926 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
927 */
928FNIEMOP_DEF(iemOp_or_eAX_Iz)
929{
930 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
931 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
932 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
933}
934
935
936/**
937 * @opcode 0x0e
938 * @opgroup og_stack_sreg
939 */
940FNIEMOP_DEF(iemOp_push_CS)
941{
942 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, 0);
943 IEMOP_HLP_NO_64BIT();
944 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
945}
946
947
948/**
949 * @opcode 0x0f
950 * @opmnemonic EscTwo0f
951 * @openc two0f
952 * @opdisenum OP_2B_ESC
953 * @ophints harmless
954 * @opgroup og_escapes
955 */
956FNIEMOP_DEF(iemOp_2byteEscape)
957{
958#if 0 /// @todo def VBOX_STRICT
959 /* Sanity check the table the first time around. */
960 static bool s_fTested = false;
961 if (RT_LIKELY(s_fTested)) { /* likely */ }
962 else
963 {
964 s_fTested = true;
965 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
966 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
967 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
968 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
969 }
970#endif
971
972 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
973 {
974 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
975 IEMOP_HLP_MIN_286();
976 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
977 }
978 /* @opdone */
979
980 /*
981 * On the 8086 this is a POP CS instruction.
982 * For the time being we don't specify this this.
983 */
984 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, IEMOPHINT_SKIP_PYTHON);
985 IEMOP_HLP_NO_64BIT();
986 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
987 /** @todo eliminate END_TB here */
988 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_END_TB,
989 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
990 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_CS),
991 iemCImpl_pop_Sreg, X86_SREG_CS, pVCpu->iem.s.enmEffOpSize);
992}
993
994/**
995 * @opcode 0x10
996 * @opgroup og_gen_arith_bin
997 * @opflclass arithmetic_carry
998 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
999 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1000 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1001 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1002 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1003 */
1004FNIEMOP_DEF(iemOp_adc_Eb_Gb)
1005{
1006 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1007 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_adc_u8, iemAImpl_adc_u8_locked);
1008}
1009
1010
1011/**
1012 * @opcode 0x11
1013 * @opgroup og_gen_arith_bin
1014 * @opflclass arithmetic_carry
1015 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1016 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1017 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1018 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1019 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1020 */
1021FNIEMOP_DEF(iemOp_adc_Ev_Gv)
1022{
1023 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1024 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
1025 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
1026}
1027
1028
1029/**
1030 * @opcode 0x12
1031 * @opgroup og_gen_arith_bin
1032 * @opflclass arithmetic_carry
1033 * @opcopytests iemOp_adc_Eb_Gb
1034 */
1035FNIEMOP_DEF(iemOp_adc_Gb_Eb)
1036{
1037 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1038 IEMOP_BODY_BINARY_r8_rm(iemAImpl_adc_u8);
1039}
1040
1041
1042/**
1043 * @opcode 0x13
1044 * @opgroup og_gen_arith_bin
1045 * @opflclass arithmetic_carry
1046 * @opcopytests iemOp_adc_Ev_Gv
1047 */
1048FNIEMOP_DEF(iemOp_adc_Gv_Ev)
1049{
1050 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1051 IEMOP_BODY_BINARY_rv_rm(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1, 0);
1052}
1053
1054
1055/**
1056 * @opcode 0x14
1057 * @opgroup og_gen_arith_bin
1058 * @opflclass arithmetic_carry
1059 * @opcopytests iemOp_adc_Eb_Gb
1060 */
1061FNIEMOP_DEF(iemOp_adc_Al_Ib)
1062{
1063 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1064 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_adc_u8);
1065}
1066
1067
1068/**
1069 * @opcode 0x15
1070 * @opgroup og_gen_arith_bin
1071 * @opflclass arithmetic_carry
1072 * @opcopytests iemOp_adc_Ev_Gv
1073 */
1074FNIEMOP_DEF(iemOp_adc_eAX_Iz)
1075{
1076 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1077 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
1078}
1079
1080
1081/**
1082 * @opcode 0x16
1083 */
1084FNIEMOP_DEF(iemOp_push_SS)
1085{
1086 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1087 IEMOP_HLP_NO_64BIT();
1088 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
1089}
1090
1091
1092/**
1093 * @opcode 0x17
1094 */
1095FNIEMOP_DEF(iemOp_pop_SS)
1096{
1097 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
1098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1099 IEMOP_HLP_NO_64BIT();
1100 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_INHIBIT_SHADOW,
1101 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1102 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_SS)
1103 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_SS)
1104 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_SS)
1105 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_SS),
1106 iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
1107}
1108
1109
1110/**
1111 * @opcode 0x18
1112 * @opgroup og_gen_arith_bin
1113 * @opflclass arithmetic_carry
1114 */
1115FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
1116{
1117 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1118 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_sbb_u8, iemAImpl_sbb_u8_locked);
1119}
1120
1121
1122/**
1123 * @opcode 0x19
1124 * @opgroup og_gen_arith_bin
1125 * @opflclass arithmetic_carry
1126 */
1127FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
1128{
1129 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1130 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
1131 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
1132}
1133
1134
1135/**
1136 * @opcode 0x1a
1137 * @opgroup og_gen_arith_bin
1138 * @opflclass arithmetic_carry
1139 */
1140FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
1141{
1142 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1143 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sbb_u8);
1144}
1145
1146
1147/**
1148 * @opcode 0x1b
1149 * @opgroup og_gen_arith_bin
1150 * @opflclass arithmetic_carry
1151 */
1152FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
1153{
1154 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1155 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1, 0);
1156}
1157
1158
1159/**
1160 * @opcode 0x1c
1161 * @opgroup og_gen_arith_bin
1162 * @opflclass arithmetic_carry
1163 */
1164FNIEMOP_DEF(iemOp_sbb_Al_Ib)
1165{
1166 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1167 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sbb_u8);
1168}
1169
1170
1171/**
1172 * @opcode 0x1d
1173 * @opgroup og_gen_arith_bin
1174 * @opflclass arithmetic_carry
1175 */
1176FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
1177{
1178 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1179 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
1180}
1181
1182
1183/**
1184 * @opcode 0x1e
1185 * @opgroup og_stack_sreg
1186 */
1187FNIEMOP_DEF(iemOp_push_DS)
1188{
1189 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1190 IEMOP_HLP_NO_64BIT();
1191 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
1192}
1193
1194
1195/**
1196 * @opcode 0x1f
1197 * @opgroup og_stack_sreg
1198 */
1199FNIEMOP_DEF(iemOp_pop_DS)
1200{
1201 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1203 IEMOP_HLP_NO_64BIT();
1204 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE,
1205 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1206 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
1207 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
1208 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
1209 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS),
1210 iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
1211}
1212
1213
1214/**
1215 * @opcode 0x20
1216 * @opgroup og_gen_arith_bin
1217 * @opflclass logical
1218 */
1219FNIEMOP_DEF(iemOp_and_Eb_Gb)
1220{
1221 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1222 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1223 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_and_u8, iemAImpl_and_u8_locked);
1224}
1225
1226
1227/**
1228 * @opcode 0x21
1229 * @opgroup og_gen_arith_bin
1230 * @opflclass logical
1231 */
1232FNIEMOP_DEF(iemOp_and_Ev_Gv)
1233{
1234 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1235 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1236 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
1237 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
1238}
1239
1240
1241/**
1242 * @opcode 0x22
1243 * @opgroup og_gen_arith_bin
1244 * @opflclass logical
1245 */
1246FNIEMOP_DEF(iemOp_and_Gb_Eb)
1247{
1248 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1249 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1250 IEMOP_BODY_BINARY_r8_rm(iemAImpl_and_u8);
1251}
1252
1253
1254/**
1255 * @opcode 0x23
1256 * @opgroup og_gen_arith_bin
1257 * @opflclass logical
1258 */
1259FNIEMOP_DEF(iemOp_and_Gv_Ev)
1260{
1261 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1262 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1263 IEMOP_BODY_BINARY_rv_rm(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1, 0);
1264}
1265
1266
1267/**
1268 * @opcode 0x24
1269 * @opgroup og_gen_arith_bin
1270 * @opflclass logical
1271 */
1272FNIEMOP_DEF(iemOp_and_Al_Ib)
1273{
1274 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
1275 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1276 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_and_u8);
1277}
1278
1279
1280/**
1281 * @opcode 0x25
1282 * @opgroup og_gen_arith_bin
1283 * @opflclass logical
1284 */
1285FNIEMOP_DEF(iemOp_and_eAX_Iz)
1286{
1287 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1288 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1289 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1290}
1291
1292
1293/**
1294 * @opcode 0x26
1295 * @opmnemonic SEG
1296 * @op1 ES
1297 * @opgroup og_prefix
1298 * @openc prefix
1299 * @opdisenum OP_SEG
1300 * @ophints harmless
1301 */
1302FNIEMOP_DEF(iemOp_seg_ES)
1303{
1304 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
1305 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
1306 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
1307
1308 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1309 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1310}
1311
1312
1313/**
1314 * @opcode 0x27
1315 * @opfltest af,cf
1316 * @opflmodify cf,pf,af,zf,sf,of
1317 * @opflundef of
1318 */
1319FNIEMOP_DEF(iemOp_daa)
1320{
1321 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1322 IEMOP_HLP_NO_64BIT();
1323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1324 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1325 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_daa);
1326}
1327
1328
1329/**
1330 * @opcode 0x28
1331 * @opgroup og_gen_arith_bin
1332 * @opflclass arithmetic
1333 */
1334FNIEMOP_DEF(iemOp_sub_Eb_Gb)
1335{
1336 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1337 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_sub_u8, iemAImpl_sub_u8_locked);
1338}
1339
1340
1341/**
1342 * @opcode 0x29
1343 * @opgroup og_gen_arith_bin
1344 * @opflclass arithmetic
1345 */
1346FNIEMOP_DEF(iemOp_sub_Ev_Gv)
1347{
1348 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1349 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
1350 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
1351}
1352
1353
1354/**
1355 * @opcode 0x2a
1356 * @opgroup og_gen_arith_bin
1357 * @opflclass arithmetic
1358 */
1359FNIEMOP_DEF(iemOp_sub_Gb_Eb)
1360{
1361 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1362 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sub_u8);
1363}
1364
1365
1366/**
1367 * @opcode 0x2b
1368 * @opgroup og_gen_arith_bin
1369 * @opflclass arithmetic
1370 */
1371FNIEMOP_DEF(iemOp_sub_Gv_Ev)
1372{
1373 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1374 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1, 0);
1375}
1376
1377
1378/**
1379 * @opcode 0x2c
1380 * @opgroup og_gen_arith_bin
1381 * @opflclass arithmetic
1382 */
1383FNIEMOP_DEF(iemOp_sub_Al_Ib)
1384{
1385 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1386 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sub_u8);
1387}
1388
1389
1390/**
1391 * @opcode 0x2d
1392 * @opgroup og_gen_arith_bin
1393 * @opflclass arithmetic
1394 */
1395FNIEMOP_DEF(iemOp_sub_eAX_Iz)
1396{
1397 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1398 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1399}
1400
1401
1402/**
1403 * @opcode 0x2e
1404 * @opmnemonic SEG
1405 * @op1 CS
1406 * @opgroup og_prefix
1407 * @openc prefix
1408 * @opdisenum OP_SEG
1409 * @ophints harmless
1410 */
1411FNIEMOP_DEF(iemOp_seg_CS)
1412{
1413 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
1414 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
1415 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
1416
1417 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1418 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1419}
1420
1421
1422/**
1423 * @opcode 0x2f
1424 * @opfltest af,cf
1425 * @opflmodify cf,pf,af,zf,sf,of
1426 * @opflundef of
1427 */
1428FNIEMOP_DEF(iemOp_das)
1429{
1430 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1431 IEMOP_HLP_NO_64BIT();
1432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1433 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1434 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_das);
1435}
1436
1437
1438/**
1439 * @opcode 0x30
1440 * @opgroup og_gen_arith_bin
1441 * @opflclass logical
1442 */
1443FNIEMOP_DEF(iemOp_xor_Eb_Gb)
1444{
1445 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1446 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1447 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_xor_u8, iemAImpl_xor_u8_locked);
1448}
1449
1450
1451/**
1452 * @opcode 0x31
1453 * @opgroup og_gen_arith_bin
1454 * @opflclass logical
1455 */
1456FNIEMOP_DEF(iemOp_xor_Ev_Gv)
1457{
1458 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1459 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1460 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
1461 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
1462}
1463
1464
1465/**
1466 * @opcode 0x32
1467 * @opgroup og_gen_arith_bin
1468 * @opflclass logical
1469 */
1470FNIEMOP_DEF(iemOp_xor_Gb_Eb)
1471{
1472 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1473 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1474 IEMOP_BODY_BINARY_r8_rm(iemAImpl_xor_u8);
1475}
1476
1477
1478/**
1479 * @opcode 0x33
1480 * @opgroup og_gen_arith_bin
1481 * @opflclass logical
1482 */
1483FNIEMOP_DEF(iemOp_xor_Gv_Ev)
1484{
1485 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1486 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1487 IEMOP_BODY_BINARY_rv_rm(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1, 0);
1488}
1489
1490
1491/**
1492 * @opcode 0x34
1493 * @opgroup og_gen_arith_bin
1494 * @opflclass logical
1495 */
1496FNIEMOP_DEF(iemOp_xor_Al_Ib)
1497{
1498 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1499 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1500 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_xor_u8);
1501}
1502
1503
1504/**
1505 * @opcode 0x35
1506 * @opgroup og_gen_arith_bin
1507 * @opflclass logical
1508 */
1509FNIEMOP_DEF(iemOp_xor_eAX_Iz)
1510{
1511 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1512 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1513 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1514}
1515
1516
1517/**
1518 * @opcode 0x36
1519 * @opmnemonic SEG
1520 * @op1 SS
1521 * @opgroup og_prefix
1522 * @openc prefix
1523 * @opdisenum OP_SEG
1524 * @ophints harmless
1525 */
1526FNIEMOP_DEF(iemOp_seg_SS)
1527{
1528 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
1529 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
1530 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
1531
1532 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1533 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1534}
1535
1536
1537/**
1538 * @opcode 0x37
1539 * @opfltest af
1540 * @opflmodify cf,pf,af,zf,sf,of
1541 * @opflundef pf,zf,sf,of
1542 * @opgroup og_gen_arith_dec
1543 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
1544 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
1545 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1546 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1547 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1548 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
1549 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
1550 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
1551 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
1552 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
1553 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
1554 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
1555 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
1556 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
1557 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
1558 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1559 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1560 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1561 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1562 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
1563 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
1564 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
1565 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
1566 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
1567 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
1568 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1569 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
1570 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
1571 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
1572 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
1573 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
1574 */
1575FNIEMOP_DEF(iemOp_aaa)
1576{
1577 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1578 IEMOP_HLP_NO_64BIT();
1579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1580 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1581
1582 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aaa);
1583}
1584
1585
1586/**
1587 * @opcode 0x38
1588 * @opflclass arithmetic
1589 */
1590FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
1591{
1592 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
1593 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_cmp_u8);
1594}
1595
1596
1597/**
1598 * @opcode 0x39
1599 * @opflclass arithmetic
1600 */
1601FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
1602{
1603 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
1604 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
1605}
1606
1607
1608/**
1609 * @opcode 0x3a
1610 * @opflclass arithmetic
1611 */
1612FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
1613{
1614 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
1615 IEMOP_BODY_BINARY_r8_rm(iemAImpl_cmp_u8);
1616}
1617
1618
1619/**
1620 * @opcode 0x3b
1621 * @opflclass arithmetic
1622 */
1623FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
1624{
1625 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
1626 IEMOP_BODY_BINARY_rv_rm(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0, 0);
1627}
1628
1629
1630/**
1631 * @opcode 0x3c
1632 * @opflclass arithmetic
1633 */
1634FNIEMOP_DEF(iemOp_cmp_Al_Ib)
1635{
1636 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
1637 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_cmp_u8);
1638}
1639
1640
1641/**
1642 * @opcode 0x3d
1643 * @opflclass arithmetic
1644 */
1645FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1646{
1647 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1648 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1649}
1650
1651
1652/**
1653 * @opcode 0x3e
1654 */
1655FNIEMOP_DEF(iemOp_seg_DS)
1656{
1657 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1658 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1659 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1660
1661 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1662 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1663}
1664
1665
1666/**
1667 * @opcode 0x3f
1668 * @opfltest af
1669 * @opflmodify cf,pf,af,zf,sf,of
1670 * @opflundef pf,zf,sf,of
1671 * @opgroup og_gen_arith_dec
1672 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1673 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1674 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1675 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1676 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1677 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1678 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1679 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1680 * @optest amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1681 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1682 * @optest amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1683 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1684 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1685 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1686 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1687 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1688 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1689 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1690 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1691 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1692 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
1693 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1694 * @optest amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
1695 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1696 * @optest amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
1697 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1698 * @optest amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
1699 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1700 * @optest amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
1701 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1702 * @optest amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
1703 * @optest intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1704 * @optest amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1705 * @optest intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1706 * @optest amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1707 */
1708FNIEMOP_DEF(iemOp_aas)
1709{
1710 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1711 IEMOP_HLP_NO_64BIT();
1712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1713 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1714
1715 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aas);
1716}
1717
1718
1719/**
1720 * Common 'inc/dec register' helper.
1721 *
1722 * Not for 64-bit code, only for what became the rex prefixes.
1723 */
1724#define IEMOP_BODY_UNARY_GReg(a_fnNormalU16, a_fnNormalU32, a_iReg) \
1725 switch (pVCpu->iem.s.enmEffOpSize) \
1726 { \
1727 case IEMMODE_16BIT: \
1728 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_64BIT, 0); \
1729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1730 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
1731 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1732 IEM_MC_REF_GREG_U16(pu16Dst, a_iReg); \
1733 IEM_MC_REF_EFLAGS(pEFlags); \
1734 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
1735 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1736 IEM_MC_END(); \
1737 break; \
1738 \
1739 case IEMMODE_32BIT: \
1740 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0); \
1741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1742 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
1743 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1744 IEM_MC_REF_GREG_U32(pu32Dst, a_iReg); \
1745 IEM_MC_REF_EFLAGS(pEFlags); \
1746 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
1747 IEM_MC_CLEAR_HIGH_GREG_U64(a_iReg); \
1748 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1749 IEM_MC_END(); \
1750 break; \
1751 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1752 } \
1753 (void)0
1754
1755/**
1756 * @opcode 0x40
1757 * @opflclass incdec
1758 */
1759FNIEMOP_DEF(iemOp_inc_eAX)
1760{
1761 /*
1762 * This is a REX prefix in 64-bit mode.
1763 */
1764 if (IEM_IS_64BIT_CODE(pVCpu))
1765 {
1766 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1767 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1768
1769 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1770 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1771 }
1772
1773 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1774 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xAX);
1775}
1776
1777
1778/**
1779 * @opcode 0x41
1780 * @opflclass incdec
1781 */
1782FNIEMOP_DEF(iemOp_inc_eCX)
1783{
1784 /*
1785 * This is a REX prefix in 64-bit mode.
1786 */
1787 if (IEM_IS_64BIT_CODE(pVCpu))
1788 {
1789 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1790 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1791 pVCpu->iem.s.uRexB = 1 << 3;
1792
1793 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1794 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1795 }
1796
1797 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1798 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xCX);
1799}
1800
1801
1802/**
1803 * @opcode 0x42
1804 * @opflclass incdec
1805 */
1806FNIEMOP_DEF(iemOp_inc_eDX)
1807{
1808 /*
1809 * This is a REX prefix in 64-bit mode.
1810 */
1811 if (IEM_IS_64BIT_CODE(pVCpu))
1812 {
1813 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1814 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1815 pVCpu->iem.s.uRexIndex = 1 << 3;
1816
1817 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1818 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1819 }
1820
1821 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1822 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDX);
1823}
1824
1825
1826
1827/**
1828 * @opcode 0x43
1829 * @opflclass incdec
1830 */
1831FNIEMOP_DEF(iemOp_inc_eBX)
1832{
1833 /*
1834 * This is a REX prefix in 64-bit mode.
1835 */
1836 if (IEM_IS_64BIT_CODE(pVCpu))
1837 {
1838 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1839 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1840 pVCpu->iem.s.uRexB = 1 << 3;
1841 pVCpu->iem.s.uRexIndex = 1 << 3;
1842
1843 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1844 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1845 }
1846
1847 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1848 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBX);
1849}
1850
1851
1852/**
1853 * @opcode 0x44
1854 * @opflclass incdec
1855 */
1856FNIEMOP_DEF(iemOp_inc_eSP)
1857{
1858 /*
1859 * This is a REX prefix in 64-bit mode.
1860 */
1861 if (IEM_IS_64BIT_CODE(pVCpu))
1862 {
1863 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1864 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1865 pVCpu->iem.s.uRexReg = 1 << 3;
1866
1867 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1868 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1869 }
1870
1871 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1872 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSP);
1873}
1874
1875
1876/**
1877 * @opcode 0x45
1878 * @opflclass incdec
1879 */
1880FNIEMOP_DEF(iemOp_inc_eBP)
1881{
1882 /*
1883 * This is a REX prefix in 64-bit mode.
1884 */
1885 if (IEM_IS_64BIT_CODE(pVCpu))
1886 {
1887 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1888 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1889 pVCpu->iem.s.uRexReg = 1 << 3;
1890 pVCpu->iem.s.uRexB = 1 << 3;
1891
1892 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1893 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1894 }
1895
1896 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1897 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBP);
1898}
1899
1900
1901/**
1902 * @opcode 0x46
1903 * @opflclass incdec
1904 */
1905FNIEMOP_DEF(iemOp_inc_eSI)
1906{
1907 /*
1908 * This is a REX prefix in 64-bit mode.
1909 */
1910 if (IEM_IS_64BIT_CODE(pVCpu))
1911 {
1912 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1913 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1914 pVCpu->iem.s.uRexReg = 1 << 3;
1915 pVCpu->iem.s.uRexIndex = 1 << 3;
1916
1917 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1918 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1919 }
1920
1921 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1922 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSI);
1923}
1924
1925
1926/**
1927 * @opcode 0x47
1928 * @opflclass incdec
1929 */
1930FNIEMOP_DEF(iemOp_inc_eDI)
1931{
1932 /*
1933 * This is a REX prefix in 64-bit mode.
1934 */
1935 if (IEM_IS_64BIT_CODE(pVCpu))
1936 {
1937 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1938 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1939 pVCpu->iem.s.uRexReg = 1 << 3;
1940 pVCpu->iem.s.uRexB = 1 << 3;
1941 pVCpu->iem.s.uRexIndex = 1 << 3;
1942
1943 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1944 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1945 }
1946
1947 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1948 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDI);
1949}
1950
1951
1952/**
1953 * @opcode 0x48
1954 * @opflclass incdec
1955 */
1956FNIEMOP_DEF(iemOp_dec_eAX)
1957{
1958 /*
1959 * This is a REX prefix in 64-bit mode.
1960 */
1961 if (IEM_IS_64BIT_CODE(pVCpu))
1962 {
1963 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
1964 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
1965 iemRecalEffOpSize(pVCpu);
1966
1967 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1968 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1969 }
1970
1971 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
1972 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xAX);
1973}
1974
1975
1976/**
1977 * @opcode 0x49
1978 * @opflclass incdec
1979 */
1980FNIEMOP_DEF(iemOp_dec_eCX)
1981{
1982 /*
1983 * This is a REX prefix in 64-bit mode.
1984 */
1985 if (IEM_IS_64BIT_CODE(pVCpu))
1986 {
1987 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
1988 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1989 pVCpu->iem.s.uRexB = 1 << 3;
1990 iemRecalEffOpSize(pVCpu);
1991
1992 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1993 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1994 }
1995
1996 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
1997 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xCX);
1998}
1999
2000
2001/**
2002 * @opcode 0x4a
2003 * @opflclass incdec
2004 */
2005FNIEMOP_DEF(iemOp_dec_eDX)
2006{
2007 /*
2008 * This is a REX prefix in 64-bit mode.
2009 */
2010 if (IEM_IS_64BIT_CODE(pVCpu))
2011 {
2012 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
2013 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2014 pVCpu->iem.s.uRexIndex = 1 << 3;
2015 iemRecalEffOpSize(pVCpu);
2016
2017 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2018 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2019 }
2020
2021 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
2022 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDX);
2023}
2024
2025
2026/**
2027 * @opcode 0x4b
2028 * @opflclass incdec
2029 */
2030FNIEMOP_DEF(iemOp_dec_eBX)
2031{
2032 /*
2033 * This is a REX prefix in 64-bit mode.
2034 */
2035 if (IEM_IS_64BIT_CODE(pVCpu))
2036 {
2037 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
2038 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2039 pVCpu->iem.s.uRexB = 1 << 3;
2040 pVCpu->iem.s.uRexIndex = 1 << 3;
2041 iemRecalEffOpSize(pVCpu);
2042
2043 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2044 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2045 }
2046
2047 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
2048 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBX);
2049}
2050
2051
2052/**
2053 * @opcode 0x4c
2054 * @opflclass incdec
2055 */
2056FNIEMOP_DEF(iemOp_dec_eSP)
2057{
2058 /*
2059 * This is a REX prefix in 64-bit mode.
2060 */
2061 if (IEM_IS_64BIT_CODE(pVCpu))
2062 {
2063 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
2064 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
2065 pVCpu->iem.s.uRexReg = 1 << 3;
2066 iemRecalEffOpSize(pVCpu);
2067
2068 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2069 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2070 }
2071
2072 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
2073 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSP);
2074}
2075
2076
2077/**
2078 * @opcode 0x4d
2079 * @opflclass incdec
2080 */
2081FNIEMOP_DEF(iemOp_dec_eBP)
2082{
2083 /*
2084 * This is a REX prefix in 64-bit mode.
2085 */
2086 if (IEM_IS_64BIT_CODE(pVCpu))
2087 {
2088 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
2089 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2090 pVCpu->iem.s.uRexReg = 1 << 3;
2091 pVCpu->iem.s.uRexB = 1 << 3;
2092 iemRecalEffOpSize(pVCpu);
2093
2094 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2095 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2096 }
2097
2098 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
2099 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBP);
2100}
2101
2102
2103/**
2104 * @opcode 0x4e
2105 * @opflclass incdec
2106 */
2107FNIEMOP_DEF(iemOp_dec_eSI)
2108{
2109 /*
2110 * This is a REX prefix in 64-bit mode.
2111 */
2112 if (IEM_IS_64BIT_CODE(pVCpu))
2113 {
2114 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
2115 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2116 pVCpu->iem.s.uRexReg = 1 << 3;
2117 pVCpu->iem.s.uRexIndex = 1 << 3;
2118 iemRecalEffOpSize(pVCpu);
2119
2120 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2121 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2122 }
2123
2124 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
2125 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSI);
2126}
2127
2128
2129/**
2130 * @opcode 0x4f
2131 * @opflclass incdec
2132 */
2133FNIEMOP_DEF(iemOp_dec_eDI)
2134{
2135 /*
2136 * This is a REX prefix in 64-bit mode.
2137 */
2138 if (IEM_IS_64BIT_CODE(pVCpu))
2139 {
2140 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
2141 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2142 pVCpu->iem.s.uRexReg = 1 << 3;
2143 pVCpu->iem.s.uRexB = 1 << 3;
2144 pVCpu->iem.s.uRexIndex = 1 << 3;
2145 iemRecalEffOpSize(pVCpu);
2146
2147 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2148 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2149 }
2150
2151 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
2152 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDI);
2153}
2154
2155
2156/**
2157 * Common 'push register' helper.
2158 */
2159FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
2160{
2161 if (IEM_IS_64BIT_CODE(pVCpu))
2162 {
2163 iReg |= pVCpu->iem.s.uRexB;
2164 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2165 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2166 }
2167
2168 switch (pVCpu->iem.s.enmEffOpSize)
2169 {
2170 case IEMMODE_16BIT:
2171 IEM_MC_BEGIN(0, 1, 0, 0);
2172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2173 IEM_MC_LOCAL(uint16_t, u16Value);
2174 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
2175 IEM_MC_PUSH_U16(u16Value);
2176 IEM_MC_ADVANCE_RIP_AND_FINISH();
2177 IEM_MC_END();
2178 break;
2179
2180 case IEMMODE_32BIT:
2181 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2183 IEM_MC_LOCAL(uint32_t, u32Value);
2184 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
2185 IEM_MC_PUSH_U32(u32Value);
2186 IEM_MC_ADVANCE_RIP_AND_FINISH();
2187 IEM_MC_END();
2188 break;
2189
2190 case IEMMODE_64BIT:
2191 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2193 IEM_MC_LOCAL(uint64_t, u64Value);
2194 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
2195 IEM_MC_PUSH_U64(u64Value);
2196 IEM_MC_ADVANCE_RIP_AND_FINISH();
2197 IEM_MC_END();
2198 break;
2199
2200 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2201 }
2202}
2203
2204
2205/**
2206 * @opcode 0x50
2207 */
2208FNIEMOP_DEF(iemOp_push_eAX)
2209{
2210 IEMOP_MNEMONIC(push_rAX, "push rAX");
2211 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
2212}
2213
2214
2215/**
2216 * @opcode 0x51
2217 */
2218FNIEMOP_DEF(iemOp_push_eCX)
2219{
2220 IEMOP_MNEMONIC(push_rCX, "push rCX");
2221 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
2222}
2223
2224
2225/**
2226 * @opcode 0x52
2227 */
2228FNIEMOP_DEF(iemOp_push_eDX)
2229{
2230 IEMOP_MNEMONIC(push_rDX, "push rDX");
2231 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
2232}
2233
2234
2235/**
2236 * @opcode 0x53
2237 */
2238FNIEMOP_DEF(iemOp_push_eBX)
2239{
2240 IEMOP_MNEMONIC(push_rBX, "push rBX");
2241 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
2242}
2243
2244
2245/**
2246 * @opcode 0x54
2247 */
2248FNIEMOP_DEF(iemOp_push_eSP)
2249{
2250 IEMOP_MNEMONIC(push_rSP, "push rSP");
2251 if (IEM_GET_TARGET_CPU(pVCpu) != IEMTARGETCPU_8086)
2252 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
2253
2254 /* 8086 works differently wrt to 'push sp' compared to 80186 and later. */
2255 IEM_MC_BEGIN(0, 1, IEM_MC_F_ONLY_8086, 0);
2256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2257 IEM_MC_LOCAL(uint16_t, u16Value);
2258 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
2259 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
2260 IEM_MC_PUSH_U16(u16Value);
2261 IEM_MC_ADVANCE_RIP_AND_FINISH();
2262 IEM_MC_END();
2263}
2264
2265
2266/**
2267 * @opcode 0x55
2268 */
2269FNIEMOP_DEF(iemOp_push_eBP)
2270{
2271 IEMOP_MNEMONIC(push_rBP, "push rBP");
2272 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
2273}
2274
2275
2276/**
2277 * @opcode 0x56
2278 */
2279FNIEMOP_DEF(iemOp_push_eSI)
2280{
2281 IEMOP_MNEMONIC(push_rSI, "push rSI");
2282 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
2283}
2284
2285
2286/**
2287 * @opcode 0x57
2288 */
2289FNIEMOP_DEF(iemOp_push_eDI)
2290{
2291 IEMOP_MNEMONIC(push_rDI, "push rDI");
2292 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
2293}
2294
2295
2296/**
2297 * Common 'pop register' helper.
2298 */
2299FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
2300{
2301 if (IEM_IS_64BIT_CODE(pVCpu))
2302 {
2303 iReg |= pVCpu->iem.s.uRexB;
2304 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2305 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2306 }
2307
2308 switch (pVCpu->iem.s.enmEffOpSize)
2309 {
2310 case IEMMODE_16BIT:
2311 IEM_MC_BEGIN(0, 0, 0, 0);
2312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2313 IEM_MC_POP_GREG_U16(iReg);
2314 IEM_MC_ADVANCE_RIP_AND_FINISH();
2315 IEM_MC_END();
2316 break;
2317
2318 case IEMMODE_32BIT:
2319 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2321 IEM_MC_POP_GREG_U32(iReg);
2322 IEM_MC_ADVANCE_RIP_AND_FINISH();
2323 IEM_MC_END();
2324 break;
2325
2326 case IEMMODE_64BIT:
2327 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
2328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2329 IEM_MC_POP_GREG_U64(iReg);
2330 IEM_MC_ADVANCE_RIP_AND_FINISH();
2331 IEM_MC_END();
2332 break;
2333
2334 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2335 }
2336}
2337
2338
2339/**
2340 * @opcode 0x58
2341 */
2342FNIEMOP_DEF(iemOp_pop_eAX)
2343{
2344 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
2345 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
2346}
2347
2348
2349/**
2350 * @opcode 0x59
2351 */
2352FNIEMOP_DEF(iemOp_pop_eCX)
2353{
2354 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
2355 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
2356}
2357
2358
2359/**
2360 * @opcode 0x5a
2361 */
2362FNIEMOP_DEF(iemOp_pop_eDX)
2363{
2364 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
2365 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
2366}
2367
2368
2369/**
2370 * @opcode 0x5b
2371 */
2372FNIEMOP_DEF(iemOp_pop_eBX)
2373{
2374 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
2375 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
2376}
2377
2378
2379/**
2380 * @opcode 0x5c
2381 */
2382FNIEMOP_DEF(iemOp_pop_eSP)
2383{
2384 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
2385 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
2386}
2387
2388
2389/**
2390 * @opcode 0x5d
2391 */
2392FNIEMOP_DEF(iemOp_pop_eBP)
2393{
2394 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
2395 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
2396}
2397
2398
2399/**
2400 * @opcode 0x5e
2401 */
2402FNIEMOP_DEF(iemOp_pop_eSI)
2403{
2404 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
2405 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
2406}
2407
2408
2409/**
2410 * @opcode 0x5f
2411 */
2412FNIEMOP_DEF(iemOp_pop_eDI)
2413{
2414 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
2415 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
2416}
2417
2418
2419/**
2420 * @opcode 0x60
2421 */
2422FNIEMOP_DEF(iemOp_pusha)
2423{
2424 IEMOP_MNEMONIC(pusha, "pusha");
2425 IEMOP_HLP_MIN_186();
2426 IEMOP_HLP_NO_64BIT();
2427 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2428 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pusha_16);
2429 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2430 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pusha_32);
2431}
2432
2433
2434/**
2435 * @opcode 0x61
2436 */
2437FNIEMOP_DEF(iemOp_popa__mvex)
2438{
2439 if (!IEM_IS_64BIT_CODE(pVCpu))
2440 {
2441 IEMOP_MNEMONIC(popa, "popa");
2442 IEMOP_HLP_MIN_186();
2443 IEMOP_HLP_NO_64BIT();
2444 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2445 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
2446 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
2447 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
2448 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
2449 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX)
2450 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
2451 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP)
2452 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
2453 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
2454 iemCImpl_popa_16);
2455 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2456 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
2457 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
2458 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
2459 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
2460 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX)
2461 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
2462 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP)
2463 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
2464 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
2465 iemCImpl_popa_32);
2466 }
2467 IEMOP_MNEMONIC(mvex, "mvex");
2468 Log(("mvex prefix is not supported!\n"));
2469 IEMOP_RAISE_INVALID_OPCODE_RET();
2470}
2471
2472
2473/**
2474 * @opcode 0x62
2475 * @opmnemonic bound
2476 * @op1 Gv_RO
2477 * @op2 Ma
2478 * @opmincpu 80186
2479 * @ophints harmless x86_invalid_64
2480 * @optest op1=0 op2=0 ->
2481 * @optest op1=1 op2=0 -> value.xcpt=5
2482 * @optest o16 / op1=0xffff op2=0x0000fffe ->
2483 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
2484 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
2485 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
2486 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
2487 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
2488 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
2489 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
2490 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
2491 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
2492 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
2493 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
2494 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
2495 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
2496 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
2497 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
2498 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
2499 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
2500 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
2501 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
2502 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
2503 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
2504 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
2505 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
2506 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
2507 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
2508 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
2509 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
2510 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
2511 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
2512 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
2513 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
2514 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
2515 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
2516 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
2517 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
2518 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
2519 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
2520 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
2521 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
2522 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
2523 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
2524 */
2525FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
2526{
2527 /* The BOUND instruction is invalid 64-bit mode. In legacy and
2528 compatability mode it is invalid with MOD=3.
2529
2530 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
2531 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
2532 given as R and X without an exact description, so we assume it builds on
2533 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
2534 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
2535 uint8_t bRm;
2536 if (!IEM_IS_64BIT_CODE(pVCpu))
2537 {
2538 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2539 IEMOP_HLP_MIN_186();
2540 IEM_OPCODE_GET_NEXT_U8(&bRm);
2541 if (IEM_IS_MODRM_MEM_MODE(bRm))
2542 {
2543 /** @todo testcase: check that there are two memory accesses involved. Check
2544 * whether they're both read before the \#BR triggers. */
2545 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2546 {
2547 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186 | IEM_MC_F_NOT_64BIT, 0);
2548 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2549 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
2550 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
2551 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2552
2553 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2555
2556 IEM_MC_FETCH_GREG_U16(u16Index, IEM_GET_MODRM_REG_8(bRm));
2557 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2558 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
2559
2560 IEM_MC_CALL_CIMPL_3(0, 0, iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
2561 IEM_MC_END();
2562 }
2563 else /* 32-bit operands */
2564 {
2565 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2566 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2567 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
2568 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
2569 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2570
2571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2573
2574 IEM_MC_FETCH_GREG_U32(u32Index, IEM_GET_MODRM_REG_8(bRm));
2575 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2576 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
2577
2578 IEM_MC_CALL_CIMPL_3(0, 0, iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
2579 IEM_MC_END();
2580 }
2581 }
2582
2583 /*
2584 * @opdone
2585 */
2586 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2587 {
2588 /* Note that there is no need for the CPU to fetch further bytes
2589 here because MODRM.MOD == 3. */
2590 Log(("evex not supported by the guest CPU!\n"));
2591 IEMOP_RAISE_INVALID_OPCODE_RET();
2592 }
2593 }
2594 else
2595 {
2596 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
2597 * does modr/m read, whereas AMD probably doesn't... */
2598 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2599 {
2600 Log(("evex not supported by the guest CPU!\n"));
2601 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
2602 }
2603 IEM_OPCODE_GET_NEXT_U8(&bRm);
2604 }
2605
2606 IEMOP_MNEMONIC(evex, "evex");
2607 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
2608 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
2609 Log(("evex prefix is not implemented!\n"));
2610 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2611}
2612
2613
2614/**
2615 * @opcode 0x63
2616 * @opflmodify zf
2617 * @note non-64-bit modes.
2618 */
2619FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
2620{
2621 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
2622 IEMOP_HLP_MIN_286();
2623 IEMOP_HLP_NO_REAL_OR_V86_MODE();
2624 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2625
2626 if (IEM_IS_MODRM_REG_MODE(bRm))
2627 {
2628 /* Register */
2629 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2630 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2631 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2632 IEM_MC_ARG(uint16_t, u16Src, 1);
2633 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2634
2635 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2636 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM_8(bRm));
2637 IEM_MC_REF_EFLAGS(pEFlags);
2638 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2639
2640 IEM_MC_ADVANCE_RIP_AND_FINISH();
2641 IEM_MC_END();
2642 }
2643 else
2644 {
2645 /* Memory */
2646 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2647 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2648 IEM_MC_ARG(uint16_t, u16Src, 1);
2649 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2650 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2651 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
2652
2653 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2654 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2655 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2656 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2657 IEM_MC_FETCH_EFLAGS(EFlags);
2658 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2659
2660 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
2661 IEM_MC_COMMIT_EFLAGS(EFlags);
2662 IEM_MC_ADVANCE_RIP_AND_FINISH();
2663 IEM_MC_END();
2664 }
2665}
2666
2667
2668/**
2669 * @opcode 0x63
2670 *
2671 * @note This is a weird one. It works like a regular move instruction if
2672 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2673 * @todo This definitely needs a testcase to verify the odd cases. */
2674FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2675{
2676 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2677
2678 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2679 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2680
2681 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2682 {
2683 if (IEM_IS_MODRM_REG_MODE(bRm))
2684 {
2685 /*
2686 * Register to register.
2687 */
2688 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2690 IEM_MC_LOCAL(uint64_t, u64Value);
2691 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
2692 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2693 IEM_MC_ADVANCE_RIP_AND_FINISH();
2694 IEM_MC_END();
2695 }
2696 else
2697 {
2698 /*
2699 * We're loading a register from memory.
2700 */
2701 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
2702 IEM_MC_LOCAL(uint64_t, u64Value);
2703 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2704 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2705 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2706 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2707 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2708 IEM_MC_ADVANCE_RIP_AND_FINISH();
2709 IEM_MC_END();
2710 }
2711 }
2712 else
2713 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
2714}
2715
2716
2717/**
2718 * @opcode 0x64
2719 * @opmnemonic segfs
2720 * @opmincpu 80386
2721 * @opgroup og_prefixes
2722 */
2723FNIEMOP_DEF(iemOp_seg_FS)
2724{
2725 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2726 IEMOP_HLP_MIN_386();
2727
2728 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2729 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2730
2731 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2732 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2733}
2734
2735
2736/**
2737 * @opcode 0x65
2738 * @opmnemonic seggs
2739 * @opmincpu 80386
2740 * @opgroup og_prefixes
2741 */
2742FNIEMOP_DEF(iemOp_seg_GS)
2743{
2744 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2745 IEMOP_HLP_MIN_386();
2746
2747 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2748 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2749
2750 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2751 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2752}
2753
2754
2755/**
2756 * @opcode 0x66
2757 * @opmnemonic opsize
2758 * @openc prefix
2759 * @opmincpu 80386
2760 * @ophints harmless
2761 * @opgroup og_prefixes
2762 */
2763FNIEMOP_DEF(iemOp_op_size)
2764{
2765 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2766 IEMOP_HLP_MIN_386();
2767
2768 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2769 iemRecalEffOpSize(pVCpu);
2770
2771 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2772 when REPZ or REPNZ are present. */
2773 if (pVCpu->iem.s.idxPrefix == 0)
2774 pVCpu->iem.s.idxPrefix = 1;
2775
2776 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2777 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2778}
2779
2780
2781/**
2782 * @opcode 0x67
2783 * @opmnemonic addrsize
2784 * @openc prefix
2785 * @opmincpu 80386
2786 * @ophints harmless
2787 * @opgroup og_prefixes
2788 */
2789FNIEMOP_DEF(iemOp_addr_size)
2790{
2791 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2792 IEMOP_HLP_MIN_386();
2793
2794 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2795 switch (pVCpu->iem.s.enmDefAddrMode)
2796 {
2797 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2798 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2799 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2800 default: AssertFailed();
2801 }
2802
2803 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2804 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2805}
2806
2807
2808/**
2809 * @opcode 0x68
2810 */
2811FNIEMOP_DEF(iemOp_push_Iz)
2812{
2813 IEMOP_MNEMONIC(push_Iz, "push Iz");
2814 IEMOP_HLP_MIN_186();
2815 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2816 switch (pVCpu->iem.s.enmEffOpSize)
2817 {
2818 case IEMMODE_16BIT:
2819 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_186, 0);
2820 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2822 IEM_MC_LOCAL_CONST(uint16_t, u16Value, u16Imm);
2823 IEM_MC_PUSH_U16(u16Value);
2824 IEM_MC_ADVANCE_RIP_AND_FINISH();
2825 IEM_MC_END();
2826 break;
2827
2828 case IEMMODE_32BIT:
2829 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2830 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2832 IEM_MC_LOCAL_CONST(uint32_t, u32Value, u32Imm);
2833 IEM_MC_PUSH_U32(u32Value);
2834 IEM_MC_ADVANCE_RIP_AND_FINISH();
2835 IEM_MC_END();
2836 break;
2837
2838 case IEMMODE_64BIT:
2839 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2840 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2842 IEM_MC_LOCAL_CONST(uint64_t, u64Value, u64Imm);
2843 IEM_MC_PUSH_U64(u64Value);
2844 IEM_MC_ADVANCE_RIP_AND_FINISH();
2845 IEM_MC_END();
2846 break;
2847
2848 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2849 }
2850}
2851
2852
2853/**
2854 * @opcode 0x69
2855 * @opflclass multiply
2856 */
2857FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
2858{
2859 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
2860 IEMOP_HLP_MIN_186();
2861 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2862 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2863
2864 switch (pVCpu->iem.s.enmEffOpSize)
2865 {
2866 case IEMMODE_16BIT:
2867 {
2868 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
2869 if (IEM_IS_MODRM_REG_MODE(bRm))
2870 {
2871 /* register operand */
2872 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2873 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186, 0);
2874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2875 IEM_MC_LOCAL(uint16_t, u16Tmp);
2876 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2877 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
2878 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
2879 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2880 IEM_MC_REF_EFLAGS(pEFlags);
2881 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2882 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2883
2884 IEM_MC_ADVANCE_RIP_AND_FINISH();
2885 IEM_MC_END();
2886 }
2887 else
2888 {
2889 /* memory operand */
2890 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186, 0);
2891 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2892 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2893
2894 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2896
2897 IEM_MC_LOCAL(uint16_t, u16Tmp);
2898 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2899
2900 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
2901 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
2902 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2903 IEM_MC_REF_EFLAGS(pEFlags);
2904 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2905 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2906
2907 IEM_MC_ADVANCE_RIP_AND_FINISH();
2908 IEM_MC_END();
2909 }
2910 break;
2911 }
2912
2913 case IEMMODE_32BIT:
2914 {
2915 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
2916 if (IEM_IS_MODRM_REG_MODE(bRm))
2917 {
2918 /* register operand */
2919 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2920 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
2921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2922 IEM_MC_LOCAL(uint32_t, u32Tmp);
2923 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2924
2925 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
2926 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
2927 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2928 IEM_MC_REF_EFLAGS(pEFlags);
2929 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2930 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2931
2932 IEM_MC_ADVANCE_RIP_AND_FINISH();
2933 IEM_MC_END();
2934 }
2935 else
2936 {
2937 /* memory operand */
2938 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0);
2939 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2940 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2941
2942 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2944
2945 IEM_MC_LOCAL(uint32_t, u32Tmp);
2946 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2947
2948 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
2949 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
2950 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2951 IEM_MC_REF_EFLAGS(pEFlags);
2952 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2953 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2954
2955 IEM_MC_ADVANCE_RIP_AND_FINISH();
2956 IEM_MC_END();
2957 }
2958 break;
2959 }
2960
2961 case IEMMODE_64BIT:
2962 {
2963 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
2964 if (IEM_IS_MODRM_REG_MODE(bRm))
2965 {
2966 /* register operand */
2967 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2968 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
2969 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2970 IEM_MC_LOCAL(uint64_t, u64Tmp);
2971 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2972
2973 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
2974 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
2975 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2976 IEM_MC_REF_EFLAGS(pEFlags);
2977 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
2978 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
2979
2980 IEM_MC_ADVANCE_RIP_AND_FINISH();
2981 IEM_MC_END();
2982 }
2983 else
2984 {
2985 /* memory operand */
2986 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
2987 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2988 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2989
2990 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); /* Not using IEM_OPCODE_GET_NEXT_S32_SX_U64 to reduce the */
2991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /* parameter count for the threaded function for this block. */
2992
2993 IEM_MC_LOCAL(uint64_t, u64Tmp);
2994 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2995
2996 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
2997 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int32_t)u32Imm, 1);
2998 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2999 IEM_MC_REF_EFLAGS(pEFlags);
3000 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3001 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3002
3003 IEM_MC_ADVANCE_RIP_AND_FINISH();
3004 IEM_MC_END();
3005 }
3006 break;
3007 }
3008
3009 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3010 }
3011}
3012
3013
3014/**
3015 * @opcode 0x6a
3016 */
3017FNIEMOP_DEF(iemOp_push_Ib)
3018{
3019 IEMOP_MNEMONIC(push_Ib, "push Ib");
3020 IEMOP_HLP_MIN_186();
3021 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3022 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3023
3024 switch (pVCpu->iem.s.enmEffOpSize)
3025 {
3026 case IEMMODE_16BIT:
3027 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_186, 0);
3028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3029 IEM_MC_LOCAL_CONST(uint16_t, uValue, (int16_t)i8Imm);
3030 IEM_MC_PUSH_U16(uValue);
3031 IEM_MC_ADVANCE_RIP_AND_FINISH();
3032 IEM_MC_END();
3033 break;
3034 case IEMMODE_32BIT:
3035 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
3036 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3037 IEM_MC_LOCAL_CONST(uint32_t, uValue, (int32_t)i8Imm);
3038 IEM_MC_PUSH_U32(uValue);
3039 IEM_MC_ADVANCE_RIP_AND_FINISH();
3040 IEM_MC_END();
3041 break;
3042 case IEMMODE_64BIT:
3043 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
3044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3045 IEM_MC_LOCAL_CONST(uint64_t, uValue, (int64_t)i8Imm);
3046 IEM_MC_PUSH_U64(uValue);
3047 IEM_MC_ADVANCE_RIP_AND_FINISH();
3048 IEM_MC_END();
3049 break;
3050 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3051 }
3052}
3053
3054
3055/**
3056 * @opcode 0x6b
3057 * @opflclass multiply
3058 */
3059FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
3060{
3061 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
3062 IEMOP_HLP_MIN_186();
3063 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3064 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3065
3066 switch (pVCpu->iem.s.enmEffOpSize)
3067 {
3068 case IEMMODE_16BIT:
3069 {
3070 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
3071 if (IEM_IS_MODRM_REG_MODE(bRm))
3072 {
3073 /* register operand */
3074 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186, 0);
3075 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3077
3078 IEM_MC_LOCAL(uint16_t, u16Tmp);
3079 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3080
3081 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3082 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
3083 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3084 IEM_MC_REF_EFLAGS(pEFlags);
3085 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3086 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3087
3088 IEM_MC_ADVANCE_RIP_AND_FINISH();
3089 IEM_MC_END();
3090 }
3091 else
3092 {
3093 /* memory operand */
3094 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186, 0);
3095
3096 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3097 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3098
3099 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
3100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3101
3102 IEM_MC_LOCAL(uint16_t, u16Tmp);
3103 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3104
3105 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3106 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
3107 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3108 IEM_MC_REF_EFLAGS(pEFlags);
3109 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3110 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3111
3112 IEM_MC_ADVANCE_RIP_AND_FINISH();
3113 IEM_MC_END();
3114 }
3115 break;
3116 }
3117
3118 case IEMMODE_32BIT:
3119 {
3120 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
3121 if (IEM_IS_MODRM_REG_MODE(bRm))
3122 {
3123 /* register operand */
3124 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3125 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
3126 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3127 IEM_MC_LOCAL(uint32_t, u32Tmp);
3128 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3129
3130 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3131 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
3132 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3133 IEM_MC_REF_EFLAGS(pEFlags);
3134 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3135 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3136
3137 IEM_MC_ADVANCE_RIP_AND_FINISH();
3138 IEM_MC_END();
3139 }
3140 else
3141 {
3142 /* memory operand */
3143 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0);
3144 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3145 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3146
3147 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
3148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3149
3150 IEM_MC_LOCAL(uint32_t, u32Tmp);
3151 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3152
3153 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3154 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
3155 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3156 IEM_MC_REF_EFLAGS(pEFlags);
3157 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3158 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3159
3160 IEM_MC_ADVANCE_RIP_AND_FINISH();
3161 IEM_MC_END();
3162 }
3163 break;
3164 }
3165
3166 case IEMMODE_64BIT:
3167 {
3168 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3169 if (IEM_IS_MODRM_REG_MODE(bRm))
3170 {
3171 /* register operand */
3172 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3173 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
3174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3175 IEM_MC_LOCAL(uint64_t, u64Tmp);
3176 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3177
3178 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3179 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 1);
3180 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3181 IEM_MC_REF_EFLAGS(pEFlags);
3182 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3183 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3184
3185 IEM_MC_ADVANCE_RIP_AND_FINISH();
3186 IEM_MC_END();
3187 }
3188 else
3189 {
3190 /* memory operand */
3191 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3192 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3194
3195 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); /* Not using IEM_OPCODE_GET_NEXT_S8_SX_U64 to reduce the threaded parameter count. */
3196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3197
3198 IEM_MC_LOCAL(uint64_t, u64Tmp);
3199 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3200
3201 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3202 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 1);
3203 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3204 IEM_MC_REF_EFLAGS(pEFlags);
3205 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3206 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3207
3208 IEM_MC_ADVANCE_RIP_AND_FINISH();
3209 IEM_MC_END();
3210 }
3211 break;
3212 }
3213
3214 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3215 }
3216}
3217
3218
3219/**
3220 * @opcode 0x6c
3221 * @opfltest iopl,df
3222 */
3223FNIEMOP_DEF(iemOp_insb_Yb_DX)
3224{
3225 IEMOP_HLP_MIN_186();
3226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3227 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3228 {
3229 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
3230 switch (pVCpu->iem.s.enmEffAddrMode)
3231 {
3232 case IEMMODE_16BIT:
3233 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3234 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3235 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3236 iemCImpl_rep_ins_op8_addr16, false);
3237 case IEMMODE_32BIT:
3238 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3239 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3240 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3241 iemCImpl_rep_ins_op8_addr32, false);
3242 case IEMMODE_64BIT:
3243 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3244 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3245 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3246 iemCImpl_rep_ins_op8_addr64, false);
3247 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3248 }
3249 }
3250 else
3251 {
3252 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
3253 switch (pVCpu->iem.s.enmEffAddrMode)
3254 {
3255 case IEMMODE_16BIT:
3256 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3257 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3258 iemCImpl_ins_op8_addr16, false);
3259 case IEMMODE_32BIT:
3260 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3261 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3262 iemCImpl_ins_op8_addr32, false);
3263 case IEMMODE_64BIT:
3264 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3265 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3266 iemCImpl_ins_op8_addr64, false);
3267 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3268 }
3269 }
3270}
3271
3272
3273/**
3274 * @opcode 0x6d
3275 * @opfltest iopl,df
3276 */
3277FNIEMOP_DEF(iemOp_inswd_Yv_DX)
3278{
3279 IEMOP_HLP_MIN_186();
3280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3281 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3282 {
3283 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
3284 switch (pVCpu->iem.s.enmEffOpSize)
3285 {
3286 case IEMMODE_16BIT:
3287 switch (pVCpu->iem.s.enmEffAddrMode)
3288 {
3289 case IEMMODE_16BIT:
3290 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3291 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3292 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3293 iemCImpl_rep_ins_op16_addr16, false);
3294 case IEMMODE_32BIT:
3295 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3296 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3297 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3298 iemCImpl_rep_ins_op16_addr32, false);
3299 case IEMMODE_64BIT:
3300 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3301 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3302 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3303 iemCImpl_rep_ins_op16_addr64, false);
3304 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3305 }
3306 break;
3307 case IEMMODE_64BIT:
3308 case IEMMODE_32BIT:
3309 switch (pVCpu->iem.s.enmEffAddrMode)
3310 {
3311 case IEMMODE_16BIT:
3312 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3313 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3314 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3315 iemCImpl_rep_ins_op32_addr16, false);
3316 case IEMMODE_32BIT:
3317 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3318 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3319 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3320 iemCImpl_rep_ins_op32_addr32, false);
3321 case IEMMODE_64BIT:
3322 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3323 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3324 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3325 iemCImpl_rep_ins_op32_addr64, false);
3326 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3327 }
3328 break;
3329 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3330 }
3331 }
3332 else
3333 {
3334 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
3335 switch (pVCpu->iem.s.enmEffOpSize)
3336 {
3337 case IEMMODE_16BIT:
3338 switch (pVCpu->iem.s.enmEffAddrMode)
3339 {
3340 case IEMMODE_16BIT:
3341 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3342 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3343 iemCImpl_ins_op16_addr16, false);
3344 case IEMMODE_32BIT:
3345 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3346 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3347 iemCImpl_ins_op16_addr32, false);
3348 case IEMMODE_64BIT:
3349 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3350 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3351 iemCImpl_ins_op16_addr64, false);
3352 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3353 }
3354 break;
3355 case IEMMODE_64BIT:
3356 case IEMMODE_32BIT:
3357 switch (pVCpu->iem.s.enmEffAddrMode)
3358 {
3359 case IEMMODE_16BIT:
3360 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3361 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3362 iemCImpl_ins_op32_addr16, false);
3363 case IEMMODE_32BIT:
3364 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3365 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3366 iemCImpl_ins_op32_addr32, false);
3367 case IEMMODE_64BIT:
3368 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3369 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3370 iemCImpl_ins_op32_addr64, false);
3371 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3372 }
3373 break;
3374 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3375 }
3376 }
3377}
3378
3379
3380/**
3381 * @opcode 0x6e
3382 * @opfltest iopl,df
3383 */
3384FNIEMOP_DEF(iemOp_outsb_Yb_DX)
3385{
3386 IEMOP_HLP_MIN_186();
3387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3388 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3389 {
3390 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
3391 switch (pVCpu->iem.s.enmEffAddrMode)
3392 {
3393 case IEMMODE_16BIT:
3394 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3395 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3396 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3397 iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3398 case IEMMODE_32BIT:
3399 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3400 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3401 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3402 iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3403 case IEMMODE_64BIT:
3404 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3405 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3406 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3407 iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3408 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3409 }
3410 }
3411 else
3412 {
3413 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
3414 switch (pVCpu->iem.s.enmEffAddrMode)
3415 {
3416 case IEMMODE_16BIT:
3417 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3418 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3419 iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3420 case IEMMODE_32BIT:
3421 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3422 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3423 iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3424 case IEMMODE_64BIT:
3425 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3426 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3427 iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3428 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3429 }
3430 }
3431}
3432
3433
3434/**
3435 * @opcode 0x6f
3436 * @opfltest iopl,df
3437 */
3438FNIEMOP_DEF(iemOp_outswd_Yv_DX)
3439{
3440 IEMOP_HLP_MIN_186();
3441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3442 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3443 {
3444 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
3445 switch (pVCpu->iem.s.enmEffOpSize)
3446 {
3447 case IEMMODE_16BIT:
3448 switch (pVCpu->iem.s.enmEffAddrMode)
3449 {
3450 case IEMMODE_16BIT:
3451 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3452 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3453 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3454 iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3455 case IEMMODE_32BIT:
3456 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3457 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3458 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3459 iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3460 case IEMMODE_64BIT:
3461 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3462 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3463 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3464 iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3465 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3466 }
3467 break;
3468 case IEMMODE_64BIT:
3469 case IEMMODE_32BIT:
3470 switch (pVCpu->iem.s.enmEffAddrMode)
3471 {
3472 case IEMMODE_16BIT:
3473 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3474 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3475 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3476 iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3477 case IEMMODE_32BIT:
3478 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3479 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3480 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3481 iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3482 case IEMMODE_64BIT:
3483 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3484 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3485 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3486 iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3487 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3488 }
3489 break;
3490 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3491 }
3492 }
3493 else
3494 {
3495 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
3496 switch (pVCpu->iem.s.enmEffOpSize)
3497 {
3498 case IEMMODE_16BIT:
3499 switch (pVCpu->iem.s.enmEffAddrMode)
3500 {
3501 case IEMMODE_16BIT:
3502 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3503 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3504 iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3505 case IEMMODE_32BIT:
3506 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3507 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3508 iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3509 case IEMMODE_64BIT:
3510 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3511 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3512 iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3513 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3514 }
3515 break;
3516 case IEMMODE_64BIT:
3517 case IEMMODE_32BIT:
3518 switch (pVCpu->iem.s.enmEffAddrMode)
3519 {
3520 case IEMMODE_16BIT:
3521 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3522 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3523 iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3524 case IEMMODE_32BIT:
3525 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3526 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3527 iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3528 case IEMMODE_64BIT:
3529 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3530 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3531 iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3532 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3533 }
3534 break;
3535 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3536 }
3537 }
3538}
3539
3540
3541/**
3542 * @opcode 0x70
3543 * @opfltest of
3544 */
3545FNIEMOP_DEF(iemOp_jo_Jb)
3546{
3547 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
3548 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3549 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3550
3551 IEM_MC_BEGIN(0, 0, 0, 0);
3552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3553 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3554 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3555 } IEM_MC_ELSE() {
3556 IEM_MC_ADVANCE_RIP_AND_FINISH();
3557 } IEM_MC_ENDIF();
3558 IEM_MC_END();
3559}
3560
3561
3562/**
3563 * @opcode 0x71
3564 * @opfltest of
3565 */
3566FNIEMOP_DEF(iemOp_jno_Jb)
3567{
3568 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
3569 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3570 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3571
3572 IEM_MC_BEGIN(0, 0, 0, 0);
3573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3574 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3575 IEM_MC_ADVANCE_RIP_AND_FINISH();
3576 } IEM_MC_ELSE() {
3577 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3578 } IEM_MC_ENDIF();
3579 IEM_MC_END();
3580}
3581
3582/**
3583 * @opcode 0x72
3584 * @opfltest cf
3585 */
3586FNIEMOP_DEF(iemOp_jc_Jb)
3587{
3588 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
3589 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3590 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3591
3592 IEM_MC_BEGIN(0, 0, 0, 0);
3593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3594 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3595 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3596 } IEM_MC_ELSE() {
3597 IEM_MC_ADVANCE_RIP_AND_FINISH();
3598 } IEM_MC_ENDIF();
3599 IEM_MC_END();
3600}
3601
3602
3603/**
3604 * @opcode 0x73
3605 * @opfltest cf
3606 */
3607FNIEMOP_DEF(iemOp_jnc_Jb)
3608{
3609 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
3610 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3611 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3612
3613 IEM_MC_BEGIN(0, 0, 0, 0);
3614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3615 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3616 IEM_MC_ADVANCE_RIP_AND_FINISH();
3617 } IEM_MC_ELSE() {
3618 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3619 } IEM_MC_ENDIF();
3620 IEM_MC_END();
3621}
3622
3623
3624/**
3625 * @opcode 0x74
3626 * @opfltest zf
3627 */
3628FNIEMOP_DEF(iemOp_je_Jb)
3629{
3630 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
3631 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3632 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3633
3634 IEM_MC_BEGIN(0, 0, 0, 0);
3635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3636 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3637 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3638 } IEM_MC_ELSE() {
3639 IEM_MC_ADVANCE_RIP_AND_FINISH();
3640 } IEM_MC_ENDIF();
3641 IEM_MC_END();
3642}
3643
3644
3645/**
3646 * @opcode 0x75
3647 * @opfltest zf
3648 */
3649FNIEMOP_DEF(iemOp_jne_Jb)
3650{
3651 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
3652 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3653 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3654
3655 IEM_MC_BEGIN(0, 0, 0, 0);
3656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3657 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3658 IEM_MC_ADVANCE_RIP_AND_FINISH();
3659 } IEM_MC_ELSE() {
3660 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3661 } IEM_MC_ENDIF();
3662 IEM_MC_END();
3663}
3664
3665
3666/**
3667 * @opcode 0x76
3668 * @opfltest cf,zf
3669 */
3670FNIEMOP_DEF(iemOp_jbe_Jb)
3671{
3672 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
3673 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3674 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3675
3676 IEM_MC_BEGIN(0, 0, 0, 0);
3677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3678 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3679 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3680 } IEM_MC_ELSE() {
3681 IEM_MC_ADVANCE_RIP_AND_FINISH();
3682 } IEM_MC_ENDIF();
3683 IEM_MC_END();
3684}
3685
3686
3687/**
3688 * @opcode 0x77
3689 * @opfltest cf,zf
3690 */
3691FNIEMOP_DEF(iemOp_jnbe_Jb)
3692{
3693 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
3694 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3695 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3696
3697 IEM_MC_BEGIN(0, 0, 0, 0);
3698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3699 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3700 IEM_MC_ADVANCE_RIP_AND_FINISH();
3701 } IEM_MC_ELSE() {
3702 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3703 } IEM_MC_ENDIF();
3704 IEM_MC_END();
3705}
3706
3707
3708/**
3709 * @opcode 0x78
3710 * @opfltest sf
3711 */
3712FNIEMOP_DEF(iemOp_js_Jb)
3713{
3714 IEMOP_MNEMONIC(js_Jb, "js Jb");
3715 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3716 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3717
3718 IEM_MC_BEGIN(0, 0, 0, 0);
3719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3720 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3721 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3722 } IEM_MC_ELSE() {
3723 IEM_MC_ADVANCE_RIP_AND_FINISH();
3724 } IEM_MC_ENDIF();
3725 IEM_MC_END();
3726}
3727
3728
3729/**
3730 * @opcode 0x79
3731 * @opfltest sf
3732 */
3733FNIEMOP_DEF(iemOp_jns_Jb)
3734{
3735 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
3736 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3737 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3738
3739 IEM_MC_BEGIN(0, 0, 0, 0);
3740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3741 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3742 IEM_MC_ADVANCE_RIP_AND_FINISH();
3743 } IEM_MC_ELSE() {
3744 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3745 } IEM_MC_ENDIF();
3746 IEM_MC_END();
3747}
3748
3749
3750/**
3751 * @opcode 0x7a
3752 * @opfltest pf
3753 */
3754FNIEMOP_DEF(iemOp_jp_Jb)
3755{
3756 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
3757 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3758 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3759
3760 IEM_MC_BEGIN(0, 0, 0, 0);
3761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3762 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3763 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3764 } IEM_MC_ELSE() {
3765 IEM_MC_ADVANCE_RIP_AND_FINISH();
3766 } IEM_MC_ENDIF();
3767 IEM_MC_END();
3768}
3769
3770
3771/**
3772 * @opcode 0x7b
3773 * @opfltest pf
3774 */
3775FNIEMOP_DEF(iemOp_jnp_Jb)
3776{
3777 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
3778 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3779 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3780
3781 IEM_MC_BEGIN(0, 0, 0, 0);
3782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3783 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3784 IEM_MC_ADVANCE_RIP_AND_FINISH();
3785 } IEM_MC_ELSE() {
3786 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3787 } IEM_MC_ENDIF();
3788 IEM_MC_END();
3789}
3790
3791
3792/**
3793 * @opcode 0x7c
3794 * @opfltest sf,of
3795 */
3796FNIEMOP_DEF(iemOp_jl_Jb)
3797{
3798 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
3799 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3800 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3801
3802 IEM_MC_BEGIN(0, 0, 0, 0);
3803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3804 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3805 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3806 } IEM_MC_ELSE() {
3807 IEM_MC_ADVANCE_RIP_AND_FINISH();
3808 } IEM_MC_ENDIF();
3809 IEM_MC_END();
3810}
3811
3812
3813/**
3814 * @opcode 0x7d
3815 * @opfltest sf,of
3816 */
3817FNIEMOP_DEF(iemOp_jnl_Jb)
3818{
3819 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
3820 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3821 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3822
3823 IEM_MC_BEGIN(0, 0, 0, 0);
3824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3825 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3826 IEM_MC_ADVANCE_RIP_AND_FINISH();
3827 } IEM_MC_ELSE() {
3828 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3829 } IEM_MC_ENDIF();
3830 IEM_MC_END();
3831}
3832
3833
3834/**
3835 * @opcode 0x7e
3836 * @opfltest zf,sf,of
3837 */
3838FNIEMOP_DEF(iemOp_jle_Jb)
3839{
3840 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
3841 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3842 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3843
3844 IEM_MC_BEGIN(0, 0, 0, 0);
3845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3846 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3847 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3848 } IEM_MC_ELSE() {
3849 IEM_MC_ADVANCE_RIP_AND_FINISH();
3850 } IEM_MC_ENDIF();
3851 IEM_MC_END();
3852}
3853
3854
3855/**
3856 * @opcode 0x7f
3857 * @opfltest zf,sf,of
3858 */
3859FNIEMOP_DEF(iemOp_jnle_Jb)
3860{
3861 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
3862 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3863 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3864
3865 IEM_MC_BEGIN(0, 0, 0, 0);
3866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3867 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3868 IEM_MC_ADVANCE_RIP_AND_FINISH();
3869 } IEM_MC_ELSE() {
3870 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3871 } IEM_MC_ENDIF();
3872 IEM_MC_END();
3873}
3874
3875
3876/**
3877 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
3878 * iemOp_Grp1_Eb_Ib_80.
3879 */
3880#define IEMOP_BODY_BINARY_Eb_Ib_RW(a_fnNormalU8) \
3881 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3882 { \
3883 /* register target */ \
3884 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3885 IEM_MC_BEGIN(3, 0, 0, 0); \
3886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3887 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3888 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3889 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3890 \
3891 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3892 IEM_MC_REF_EFLAGS(pEFlags); \
3893 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3894 \
3895 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3896 IEM_MC_END(); \
3897 } \
3898 else \
3899 { \
3900 /* memory target */ \
3901 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
3902 { \
3903 IEM_MC_BEGIN(3, 3, 0, 0); \
3904 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3905 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3906 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3907 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3908 \
3909 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3910 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3911 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3912 IEMOP_HLP_DONE_DECODING(); \
3913 \
3914 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3915 IEM_MC_FETCH_EFLAGS(EFlags); \
3916 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3917 \
3918 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
3919 IEM_MC_COMMIT_EFLAGS(EFlags); \
3920 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3921 IEM_MC_END(); \
3922 } \
3923 else \
3924 { \
3925 (void)0
3926
3927#define IEMOP_BODY_BINARY_Eb_Ib_LOCKED(a_fnLockedU8) \
3928 IEM_MC_BEGIN(3, 3, 0, 0); \
3929 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3930 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3931 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3932 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3933 \
3934 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3935 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3936 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3937 IEMOP_HLP_DONE_DECODING(); \
3938 \
3939 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3940 IEM_MC_FETCH_EFLAGS(EFlags); \
3941 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
3942 \
3943 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
3944 IEM_MC_COMMIT_EFLAGS(EFlags); \
3945 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3946 IEM_MC_END(); \
3947 } \
3948 } \
3949 (void)0
3950
3951#define IEMOP_BODY_BINARY_Eb_Ib_RO(a_fnNormalU8) \
3952 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3953 { \
3954 /* register target */ \
3955 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3956 IEM_MC_BEGIN(3, 0, 0, 0); \
3957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3958 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3959 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3960 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3961 \
3962 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3963 IEM_MC_REF_EFLAGS(pEFlags); \
3964 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3965 \
3966 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3967 IEM_MC_END(); \
3968 } \
3969 else \
3970 { \
3971 /* memory target */ \
3972 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
3973 { \
3974 IEM_MC_BEGIN(3, 3, 0, 0); \
3975 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
3976 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3977 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3978 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3979 \
3980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3981 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3982 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3983 IEMOP_HLP_DONE_DECODING(); \
3984 \
3985 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3986 IEM_MC_FETCH_EFLAGS(EFlags); \
3987 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3988 \
3989 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
3990 IEM_MC_COMMIT_EFLAGS(EFlags); \
3991 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3992 IEM_MC_END(); \
3993 } \
3994 else \
3995 { \
3996 (void)0
3997
3998#define IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK() \
3999 IEMOP_HLP_DONE_DECODING(); \
4000 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4001 } \
4002 } \
4003 (void)0
4004
4005
4006
4007/**
4008 * @opmaps grp1_80,grp1_83
4009 * @opcode /0
4010 * @opflclass arithmetic
4011 */
4012FNIEMOP_DEF_1(iemOp_Grp1_add_Eb_Ib, uint8_t, bRm)
4013{
4014 IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib");
4015 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_add_u8);
4016 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_add_u8_locked);
4017}
4018
4019
4020/**
4021 * @opmaps grp1_80,grp1_83
4022 * @opcode /1
4023 * @opflclass logical
4024 */
4025FNIEMOP_DEF_1(iemOp_Grp1_or_Eb_Ib, uint8_t, bRm)
4026{
4027 IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib");
4028 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_or_u8);
4029 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_or_u8_locked);
4030}
4031
4032
4033/**
4034 * @opmaps grp1_80,grp1_83
4035 * @opcode /2
4036 * @opflclass arithmetic_carry
4037 */
4038FNIEMOP_DEF_1(iemOp_Grp1_adc_Eb_Ib, uint8_t, bRm)
4039{
4040 IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib");
4041 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_adc_u8);
4042 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_adc_u8_locked);
4043}
4044
4045
4046/**
4047 * @opmaps grp1_80,grp1_83
4048 * @opcode /3
4049 * @opflclass arithmetic_carry
4050 */
4051FNIEMOP_DEF_1(iemOp_Grp1_sbb_Eb_Ib, uint8_t, bRm)
4052{
4053 IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib");
4054 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sbb_u8);
4055 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sbb_u8_locked);
4056}
4057
4058
4059/**
4060 * @opmaps grp1_80,grp1_83
4061 * @opcode /4
4062 * @opflclass logical
4063 */
4064FNIEMOP_DEF_1(iemOp_Grp1_and_Eb_Ib, uint8_t, bRm)
4065{
4066 IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib");
4067 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_and_u8);
4068 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_and_u8_locked);
4069}
4070
4071
4072/**
4073 * @opmaps grp1_80,grp1_83
4074 * @opcode /5
4075 * @opflclass arithmetic
4076 */
4077FNIEMOP_DEF_1(iemOp_Grp1_sub_Eb_Ib, uint8_t, bRm)
4078{
4079 IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib");
4080 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sub_u8);
4081 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sub_u8_locked);
4082}
4083
4084
4085/**
4086 * @opmaps grp1_80,grp1_83
4087 * @opcode /6
4088 * @opflclass logical
4089 */
4090FNIEMOP_DEF_1(iemOp_Grp1_xor_Eb_Ib, uint8_t, bRm)
4091{
4092 IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib");
4093 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_xor_u8);
4094 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_xor_u8_locked);
4095}
4096
4097
4098/**
4099 * @opmaps grp1_80,grp1_83
4100 * @opcode /7
4101 * @opflclass arithmetic
4102 */
4103FNIEMOP_DEF_1(iemOp_Grp1_cmp_Eb_Ib, uint8_t, bRm)
4104{
4105 IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib");
4106 IEMOP_BODY_BINARY_Eb_Ib_RO(iemAImpl_cmp_u8);
4107 IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK();
4108}
4109
4110
4111/**
4112 * @opcode 0x80
4113 */
4114FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
4115{
4116 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4117 switch (IEM_GET_MODRM_REG_8(bRm))
4118 {
4119 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Eb_Ib, bRm);
4120 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Eb_Ib, bRm);
4121 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Eb_Ib, bRm);
4122 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Eb_Ib, bRm);
4123 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Eb_Ib, bRm);
4124 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Eb_Ib, bRm);
4125 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Eb_Ib, bRm);
4126 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Eb_Ib, bRm);
4127 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4128 }
4129}
4130
4131
4132/**
4133 * Body for a group 1 binary operator.
4134 */
4135#define IEMOP_BODY_BINARY_Ev_Iz_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4136 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4137 { \
4138 /* register target */ \
4139 switch (pVCpu->iem.s.enmEffOpSize) \
4140 { \
4141 case IEMMODE_16BIT: \
4142 { \
4143 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4144 IEM_MC_BEGIN(3, 0, 0, 0); \
4145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4146 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4147 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4148 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4149 \
4150 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4151 IEM_MC_REF_EFLAGS(pEFlags); \
4152 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4153 \
4154 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4155 IEM_MC_END(); \
4156 break; \
4157 } \
4158 \
4159 case IEMMODE_32BIT: \
4160 { \
4161 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4162 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4164 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4165 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4166 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4167 \
4168 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4169 IEM_MC_REF_EFLAGS(pEFlags); \
4170 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4171 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
4172 \
4173 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4174 IEM_MC_END(); \
4175 break; \
4176 } \
4177 \
4178 case IEMMODE_64BIT: \
4179 { \
4180 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4181 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4183 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4184 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4185 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4186 \
4187 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4188 IEM_MC_REF_EFLAGS(pEFlags); \
4189 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4190 \
4191 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4192 IEM_MC_END(); \
4193 break; \
4194 } \
4195 \
4196 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4197 } \
4198 } \
4199 else \
4200 { \
4201 /* memory target */ \
4202 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4203 { \
4204 switch (pVCpu->iem.s.enmEffOpSize) \
4205 { \
4206 case IEMMODE_16BIT: \
4207 { \
4208 IEM_MC_BEGIN(3, 3, 0, 0); \
4209 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4210 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4211 \
4212 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4213 IEMOP_HLP_DONE_DECODING(); \
4214 \
4215 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4216 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4217 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4218 \
4219 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4220 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4221 IEM_MC_FETCH_EFLAGS(EFlags); \
4222 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4223 \
4224 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4225 IEM_MC_COMMIT_EFLAGS(EFlags); \
4226 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4227 IEM_MC_END(); \
4228 break; \
4229 } \
4230 \
4231 case IEMMODE_32BIT: \
4232 { \
4233 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4234 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4236 \
4237 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4238 IEMOP_HLP_DONE_DECODING(); \
4239 \
4240 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4241 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4242 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4243 \
4244 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4245 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4246 IEM_MC_FETCH_EFLAGS(EFlags); \
4247 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4248 \
4249 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4250 IEM_MC_COMMIT_EFLAGS(EFlags); \
4251 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4252 IEM_MC_END(); \
4253 break; \
4254 } \
4255 \
4256 case IEMMODE_64BIT: \
4257 { \
4258 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4259 \
4260 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4261 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4262 \
4263 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4264 IEMOP_HLP_DONE_DECODING(); \
4265 \
4266 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4267 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4268 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4269 \
4270 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4271 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4272 IEM_MC_FETCH_EFLAGS(EFlags); \
4273 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4274 \
4275 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4276 IEM_MC_COMMIT_EFLAGS(EFlags); \
4277 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4278 IEM_MC_END(); \
4279 break; \
4280 } \
4281 \
4282 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4283 } \
4284 } \
4285 else \
4286 { \
4287 (void)0
4288/* This must be a separate macro due to parsing restrictions in IEMAllInstPython.py. */
4289#define IEMOP_BODY_BINARY_Ev_Iz_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4290 switch (pVCpu->iem.s.enmEffOpSize) \
4291 { \
4292 case IEMMODE_16BIT: \
4293 { \
4294 IEM_MC_BEGIN(3, 3, 0, 0); \
4295 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4296 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4297 \
4298 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4299 IEMOP_HLP_DONE_DECODING(); \
4300 \
4301 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4302 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4303 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4304 \
4305 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4306 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4307 IEM_MC_FETCH_EFLAGS(EFlags); \
4308 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4309 \
4310 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4311 IEM_MC_COMMIT_EFLAGS(EFlags); \
4312 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4313 IEM_MC_END(); \
4314 break; \
4315 } \
4316 \
4317 case IEMMODE_32BIT: \
4318 { \
4319 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4320 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4321 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4322 \
4323 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4324 IEMOP_HLP_DONE_DECODING(); \
4325 \
4326 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4327 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4328 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4329 \
4330 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4331 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4332 IEM_MC_FETCH_EFLAGS(EFlags); \
4333 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4334 \
4335 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4336 IEM_MC_COMMIT_EFLAGS(EFlags); \
4337 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4338 IEM_MC_END(); \
4339 break; \
4340 } \
4341 \
4342 case IEMMODE_64BIT: \
4343 { \
4344 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4346 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4347 \
4348 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4349 IEMOP_HLP_DONE_DECODING(); \
4350 \
4351 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4352 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4353 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4354 \
4355 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4356 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4357 IEM_MC_FETCH_EFLAGS(EFlags); \
4358 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4359 \
4360 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4361 IEM_MC_COMMIT_EFLAGS(EFlags); \
4362 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4363 IEM_MC_END(); \
4364 break; \
4365 } \
4366 \
4367 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4368 } \
4369 } \
4370 } \
4371 (void)0
4372
4373/* read-only version */
4374#define IEMOP_BODY_BINARY_Ev_Iz_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4375 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4376 { \
4377 /* register target */ \
4378 switch (pVCpu->iem.s.enmEffOpSize) \
4379 { \
4380 case IEMMODE_16BIT: \
4381 { \
4382 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4383 IEM_MC_BEGIN(3, 0, 0, 0); \
4384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4385 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4386 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4387 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4388 \
4389 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4390 IEM_MC_REF_EFLAGS(pEFlags); \
4391 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4392 \
4393 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4394 IEM_MC_END(); \
4395 break; \
4396 } \
4397 \
4398 case IEMMODE_32BIT: \
4399 { \
4400 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4401 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4403 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4404 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4405 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4406 \
4407 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4408 IEM_MC_REF_EFLAGS(pEFlags); \
4409 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4410 \
4411 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4412 IEM_MC_END(); \
4413 break; \
4414 } \
4415 \
4416 case IEMMODE_64BIT: \
4417 { \
4418 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4419 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4421 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4422 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4423 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4424 \
4425 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4426 IEM_MC_REF_EFLAGS(pEFlags); \
4427 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4428 \
4429 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4430 IEM_MC_END(); \
4431 break; \
4432 } \
4433 \
4434 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4435 } \
4436 } \
4437 else \
4438 { \
4439 /* memory target */ \
4440 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4441 { \
4442 switch (pVCpu->iem.s.enmEffOpSize) \
4443 { \
4444 case IEMMODE_16BIT: \
4445 { \
4446 IEM_MC_BEGIN(3, 3, 0, 0); \
4447 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4448 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4449 \
4450 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4451 IEMOP_HLP_DONE_DECODING(); \
4452 \
4453 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4454 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
4455 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4456 \
4457 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4458 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4459 IEM_MC_FETCH_EFLAGS(EFlags); \
4460 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4461 \
4462 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4463 IEM_MC_COMMIT_EFLAGS(EFlags); \
4464 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4465 IEM_MC_END(); \
4466 break; \
4467 } \
4468 \
4469 case IEMMODE_32BIT: \
4470 { \
4471 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4472 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4473 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4474 \
4475 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4476 IEMOP_HLP_DONE_DECODING(); \
4477 \
4478 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4479 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
4480 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4481 \
4482 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4483 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4484 IEM_MC_FETCH_EFLAGS(EFlags); \
4485 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4486 \
4487 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4488 IEM_MC_COMMIT_EFLAGS(EFlags); \
4489 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4490 IEM_MC_END(); \
4491 break; \
4492 } \
4493 \
4494 case IEMMODE_64BIT: \
4495 { \
4496 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4497 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4499 \
4500 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4501 IEMOP_HLP_DONE_DECODING(); \
4502 \
4503 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4504 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
4505 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4506 \
4507 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4508 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4509 IEM_MC_FETCH_EFLAGS(EFlags); \
4510 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4511 \
4512 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4513 IEM_MC_COMMIT_EFLAGS(EFlags); \
4514 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4515 IEM_MC_END(); \
4516 break; \
4517 } \
4518 \
4519 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4520 } \
4521 } \
4522 else \
4523 { \
4524 IEMOP_HLP_DONE_DECODING(); \
4525 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4526 } \
4527 } \
4528 (void)0
4529
4530
4531/**
4532 * @opmaps grp1_81
4533 * @opcode /0
4534 * @opflclass arithmetic
4535 */
4536FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Iz, uint8_t, bRm)
4537{
4538 IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz");
4539 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
4540 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4541}
4542
4543
4544/**
4545 * @opmaps grp1_81
4546 * @opcode /1
4547 * @opflclass logical
4548 */
4549FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Iz, uint8_t, bRm)
4550{
4551 IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz");
4552 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
4553 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4554}
4555
4556
4557/**
4558 * @opmaps grp1_81
4559 * @opcode /2
4560 * @opflclass arithmetic_carry
4561 */
4562FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Iz, uint8_t, bRm)
4563{
4564 IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz");
4565 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
4566 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
4567}
4568
4569
4570/**
4571 * @opmaps grp1_81
4572 * @opcode /3
4573 * @opflclass arithmetic_carry
4574 */
4575FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Iz, uint8_t, bRm)
4576{
4577 IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz");
4578 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
4579 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
4580}
4581
4582
4583/**
4584 * @opmaps grp1_81
4585 * @opcode /4
4586 * @opflclass logical
4587 */
4588FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Iz, uint8_t, bRm)
4589{
4590 IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz");
4591 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
4592 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
4593}
4594
4595
4596/**
4597 * @opmaps grp1_81
4598 * @opcode /5
4599 * @opflclass arithmetic
4600 */
4601FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Iz, uint8_t, bRm)
4602{
4603 IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz");
4604 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
4605 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
4606}
4607
4608
4609/**
4610 * @opmaps grp1_81
4611 * @opcode /6
4612 * @opflclass logical
4613 */
4614FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Iz, uint8_t, bRm)
4615{
4616 IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz");
4617 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
4618 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
4619}
4620
4621
4622/**
4623 * @opmaps grp1_81
4624 * @opcode /7
4625 * @opflclass arithmetic
4626 */
4627FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Iz, uint8_t, bRm)
4628{
4629 IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz");
4630 IEMOP_BODY_BINARY_Ev_Iz_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
4631}
4632
4633
4634/**
4635 * @opcode 0x81
4636 */
4637FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
4638{
4639 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4640 switch (IEM_GET_MODRM_REG_8(bRm))
4641 {
4642 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Iz, bRm);
4643 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Iz, bRm);
4644 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Iz, bRm);
4645 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Iz, bRm);
4646 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Iz, bRm);
4647 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Iz, bRm);
4648 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Iz, bRm);
4649 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Iz, bRm);
4650 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4651 }
4652}
4653
4654
4655/**
4656 * @opcode 0x82
4657 * @opmnemonic grp1_82
4658 * @opgroup og_groups
4659 */
4660FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
4661{
4662 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
4663 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
4664}
4665
4666
4667/**
4668 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
4669 * iemOp_Grp1_Ev_Ib.
4670 */
4671#define IEMOP_BODY_BINARY_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4672 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4673 { \
4674 /* \
4675 * Register target \
4676 */ \
4677 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4678 switch (pVCpu->iem.s.enmEffOpSize) \
4679 { \
4680 case IEMMODE_16BIT: \
4681 IEM_MC_BEGIN(3, 0, 0, 0); \
4682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4683 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4684 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4685 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4686 \
4687 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4688 IEM_MC_REF_EFLAGS(pEFlags); \
4689 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4690 \
4691 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4692 IEM_MC_END(); \
4693 break; \
4694 \
4695 case IEMMODE_32BIT: \
4696 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4698 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4699 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4700 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4701 \
4702 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4703 IEM_MC_REF_EFLAGS(pEFlags); \
4704 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4705 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
4706 \
4707 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4708 IEM_MC_END(); \
4709 break; \
4710 \
4711 case IEMMODE_64BIT: \
4712 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4714 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4715 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4716 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4717 \
4718 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4719 IEM_MC_REF_EFLAGS(pEFlags); \
4720 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4721 \
4722 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4723 IEM_MC_END(); \
4724 break; \
4725 \
4726 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4727 } \
4728 } \
4729 else \
4730 { \
4731 /* \
4732 * Memory target. \
4733 */ \
4734 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4735 { \
4736 switch (pVCpu->iem.s.enmEffOpSize) \
4737 { \
4738 case IEMMODE_16BIT: \
4739 IEM_MC_BEGIN(3, 3, 0, 0); \
4740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4742 \
4743 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4744 IEMOP_HLP_DONE_DECODING(); \
4745 \
4746 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4747 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4748 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4749 \
4750 IEM_MC_ARG_CONST(uint16_t, u16Src, (int16_t)(int8_t)u8Imm, 1); \
4751 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4752 IEM_MC_FETCH_EFLAGS(EFlags); \
4753 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4754 \
4755 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4756 IEM_MC_COMMIT_EFLAGS(EFlags); \
4757 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4758 IEM_MC_END(); \
4759 break; \
4760 \
4761 case IEMMODE_32BIT: \
4762 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4763 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4764 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4765 \
4766 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4767 IEMOP_HLP_DONE_DECODING(); \
4768 \
4769 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4770 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4771 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4772 \
4773 IEM_MC_ARG_CONST(uint32_t, u32Src, (int32_t)(int8_t)u8Imm, 1); \
4774 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4775 IEM_MC_FETCH_EFLAGS(EFlags); \
4776 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4777 \
4778 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4779 IEM_MC_COMMIT_EFLAGS(EFlags); \
4780 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4781 IEM_MC_END(); \
4782 break; \
4783 \
4784 case IEMMODE_64BIT: \
4785 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4786 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4788 \
4789 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4790 IEMOP_HLP_DONE_DECODING(); \
4791 \
4792 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4793 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4794 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4795 \
4796 IEM_MC_ARG_CONST(uint64_t, u64Src, (int64_t)(int8_t)u8Imm, 1); \
4797 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4798 IEM_MC_FETCH_EFLAGS(EFlags); \
4799 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4800 \
4801 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4802 IEM_MC_COMMIT_EFLAGS(EFlags); \
4803 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4804 IEM_MC_END(); \
4805 break; \
4806 \
4807 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4808 } \
4809 } \
4810 else \
4811 { \
4812 (void)0
4813/* Separate macro to work around parsing issue in IEMAllInstPython.py */
4814#define IEMOP_BODY_BINARY_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4815 switch (pVCpu->iem.s.enmEffOpSize) \
4816 { \
4817 case IEMMODE_16BIT: \
4818 IEM_MC_BEGIN(3, 3, 0, 0); \
4819 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4820 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4821 \
4822 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4823 IEMOP_HLP_DONE_DECODING(); \
4824 \
4825 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4826 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4827 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4828 \
4829 IEM_MC_ARG_CONST(uint16_t, u16Src, (int16_t)(int8_t)u8Imm, 1); \
4830 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4831 IEM_MC_FETCH_EFLAGS(EFlags); \
4832 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4833 \
4834 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4835 IEM_MC_COMMIT_EFLAGS(EFlags); \
4836 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4837 IEM_MC_END(); \
4838 break; \
4839 \
4840 case IEMMODE_32BIT: \
4841 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4842 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4843 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4844 \
4845 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4846 IEMOP_HLP_DONE_DECODING(); \
4847 \
4848 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4849 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4850 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4851 \
4852 IEM_MC_ARG_CONST(uint32_t, u32Src, (int32_t)(int8_t)u8Imm, 1); \
4853 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4854 IEM_MC_FETCH_EFLAGS(EFlags); \
4855 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4856 \
4857 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4858 IEM_MC_COMMIT_EFLAGS(EFlags); \
4859 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4860 IEM_MC_END(); \
4861 break; \
4862 \
4863 case IEMMODE_64BIT: \
4864 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4865 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4866 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4867 \
4868 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4869 IEMOP_HLP_DONE_DECODING(); \
4870 \
4871 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4872 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4873 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4874 \
4875 IEM_MC_ARG_CONST(uint64_t, u64Src, (int64_t)(int8_t)u8Imm, 1); \
4876 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4877 IEM_MC_FETCH_EFLAGS(EFlags); \
4878 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4879 \
4880 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4881 IEM_MC_COMMIT_EFLAGS(EFlags); \
4882 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4883 IEM_MC_END(); \
4884 break; \
4885 \
4886 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4887 } \
4888 } \
4889 } \
4890 (void)0
4891
4892/* read-only variant */
4893#define IEMOP_BODY_BINARY_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4894 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4895 { \
4896 /* \
4897 * Register target \
4898 */ \
4899 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4900 switch (pVCpu->iem.s.enmEffOpSize) \
4901 { \
4902 case IEMMODE_16BIT: \
4903 IEM_MC_BEGIN(3, 0, 0, 0); \
4904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4905 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4906 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4907 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4908 \
4909 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4910 IEM_MC_REF_EFLAGS(pEFlags); \
4911 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4912 \
4913 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4914 IEM_MC_END(); \
4915 break; \
4916 \
4917 case IEMMODE_32BIT: \
4918 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4920 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4921 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4922 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4923 \
4924 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4925 IEM_MC_REF_EFLAGS(pEFlags); \
4926 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4927 \
4928 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4929 IEM_MC_END(); \
4930 break; \
4931 \
4932 case IEMMODE_64BIT: \
4933 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4935 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4936 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4937 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4938 \
4939 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4940 IEM_MC_REF_EFLAGS(pEFlags); \
4941 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4942 \
4943 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4944 IEM_MC_END(); \
4945 break; \
4946 \
4947 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4948 } \
4949 } \
4950 else \
4951 { \
4952 /* \
4953 * Memory target. \
4954 */ \
4955 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4956 { \
4957 switch (pVCpu->iem.s.enmEffOpSize) \
4958 { \
4959 case IEMMODE_16BIT: \
4960 IEM_MC_BEGIN(3, 3, 0, 0); \
4961 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4962 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4963 \
4964 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4965 IEMOP_HLP_DONE_DECODING(); \
4966 \
4967 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4968 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
4969 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4970 \
4971 IEM_MC_ARG_CONST(uint16_t, u16Src, (int16_t)(int8_t)u8Imm, 1); \
4972 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4973 IEM_MC_FETCH_EFLAGS(EFlags); \
4974 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4975 \
4976 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4977 IEM_MC_COMMIT_EFLAGS(EFlags); \
4978 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4979 IEM_MC_END(); \
4980 break; \
4981 \
4982 case IEMMODE_32BIT: \
4983 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4984 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4985 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4986 \
4987 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4988 IEMOP_HLP_DONE_DECODING(); \
4989 \
4990 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4991 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
4992 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4993 \
4994 IEM_MC_ARG_CONST(uint32_t, u32Src, (int32_t)(int8_t)u8Imm, 1); \
4995 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4996 IEM_MC_FETCH_EFLAGS(EFlags); \
4997 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4998 \
4999 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5000 IEM_MC_COMMIT_EFLAGS(EFlags); \
5001 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5002 IEM_MC_END(); \
5003 break; \
5004 \
5005 case IEMMODE_64BIT: \
5006 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
5007 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5008 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5009 \
5010 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5011 IEMOP_HLP_DONE_DECODING(); \
5012 \
5013 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5014 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
5015 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5016 \
5017 IEM_MC_ARG_CONST(uint64_t, u64Src, (int64_t)(int8_t)u8Imm, 1); \
5018 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5019 IEM_MC_FETCH_EFLAGS(EFlags); \
5020 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
5021 \
5022 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5023 IEM_MC_COMMIT_EFLAGS(EFlags); \
5024 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5025 IEM_MC_END(); \
5026 break; \
5027 \
5028 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5029 } \
5030 } \
5031 else \
5032 { \
5033 IEMOP_HLP_DONE_DECODING(); \
5034 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
5035 } \
5036 } \
5037 (void)0
5038
5039/**
5040 * @opmaps grp1_83
5041 * @opcode /0
5042 * @opflclass arithmetic
5043 */
5044FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Ib, uint8_t, bRm)
5045{
5046 IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib");
5047 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
5048 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
5049}
5050
5051
5052/**
5053 * @opmaps grp1_83
5054 * @opcode /1
5055 * @opflclass logical
5056 */
5057FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Ib, uint8_t, bRm)
5058{
5059 IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib");
5060 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
5061 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
5062}
5063
5064
5065/**
5066 * @opmaps grp1_83
5067 * @opcode /2
5068 * @opflclass arithmetic_carry
5069 */
5070FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Ib, uint8_t, bRm)
5071{
5072 IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib");
5073 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
5074 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
5075}
5076
5077
5078/**
5079 * @opmaps grp1_83
5080 * @opcode /3
5081 * @opflclass arithmetic_carry
5082 */
5083FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Ib, uint8_t, bRm)
5084{
5085 IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib");
5086 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
5087 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
5088}
5089
5090
5091/**
5092 * @opmaps grp1_83
5093 * @opcode /4
5094 * @opflclass logical
5095 */
5096FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Ib, uint8_t, bRm)
5097{
5098 IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib");
5099 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
5100 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
5101}
5102
5103
5104/**
5105 * @opmaps grp1_83
5106 * @opcode /5
5107 * @opflclass arithmetic
5108 */
5109FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Ib, uint8_t, bRm)
5110{
5111 IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib");
5112 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
5113 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
5114}
5115
5116
5117/**
5118 * @opmaps grp1_83
5119 * @opcode /6
5120 * @opflclass logical
5121 */
5122FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Ib, uint8_t, bRm)
5123{
5124 IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib");
5125 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
5126 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
5127}
5128
5129
5130/**
5131 * @opmaps grp1_83
5132 * @opcode /7
5133 * @opflclass arithmetic
5134 */
5135FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Ib, uint8_t, bRm)
5136{
5137 IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib");
5138 IEMOP_BODY_BINARY_Ev_Ib_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
5139}
5140
5141
5142/**
5143 * @opcode 0x83
5144 */
5145FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
5146{
5147 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
5148 to the 386 even if absent in the intel reference manuals and some
5149 3rd party opcode listings. */
5150 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5151 switch (IEM_GET_MODRM_REG_8(bRm))
5152 {
5153 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Ib, bRm);
5154 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Ib, bRm);
5155 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Ib, bRm);
5156 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Ib, bRm);
5157 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Ib, bRm);
5158 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Ib, bRm);
5159 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Ib, bRm);
5160 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Ib, bRm);
5161 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5162 }
5163}
5164
5165
5166/**
5167 * @opcode 0x84
5168 * @opflclass logical
5169 */
5170FNIEMOP_DEF(iemOp_test_Eb_Gb)
5171{
5172 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
5173 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5174 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_test_u8);
5175}
5176
5177
5178/**
5179 * @opcode 0x85
5180 * @opflclass logical
5181 */
5182FNIEMOP_DEF(iemOp_test_Ev_Gv)
5183{
5184 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
5185 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5186 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64);
5187}
5188
5189
5190/**
5191 * @opcode 0x86
5192 */
5193FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
5194{
5195 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5196 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
5197
5198 /*
5199 * If rm is denoting a register, no more instruction bytes.
5200 */
5201 if (IEM_IS_MODRM_REG_MODE(bRm))
5202 {
5203 IEM_MC_BEGIN(0, 2, 0, 0);
5204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5205 IEM_MC_LOCAL(uint8_t, uTmp1);
5206 IEM_MC_LOCAL(uint8_t, uTmp2);
5207
5208 IEM_MC_FETCH_GREG_U8(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5209 IEM_MC_FETCH_GREG_U8(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5210 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5211 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5212
5213 IEM_MC_ADVANCE_RIP_AND_FINISH();
5214 IEM_MC_END();
5215 }
5216 else
5217 {
5218 /*
5219 * We're accessing memory.
5220 */
5221#define IEMOP_XCHG_BYTE(a_fnWorker, a_Style) \
5222 IEM_MC_BEGIN(2, 4, 0, 0); \
5223 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5224 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5225 IEM_MC_LOCAL(uint8_t, uTmpReg); \
5226 IEM_MC_ARG(uint8_t *, pu8Mem, 0); \
5227 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, uTmpReg, 1); \
5228 \
5229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5230 IEMOP_HLP_DONE_DECODING(); /** @todo testcase: lock xchg */ \
5231 IEM_MC_MEM_MAP_U8_##a_Style(pu8Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5232 IEM_MC_FETCH_GREG_U8(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5233 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker, pu8Mem, pu8Reg); \
5234 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Style(bUnmapInfo); \
5235 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5236 \
5237 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5238 IEM_MC_END()
5239
5240 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5241 {
5242 IEMOP_XCHG_BYTE(iemAImpl_xchg_u8_locked,ATOMIC);
5243 }
5244 else
5245 {
5246 IEMOP_XCHG_BYTE(iemAImpl_xchg_u8_unlocked,RW);
5247 }
5248 }
5249}
5250
5251
5252/**
5253 * @opcode 0x87
5254 */
5255FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
5256{
5257 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
5258 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5259
5260 /*
5261 * If rm is denoting a register, no more instruction bytes.
5262 */
5263 if (IEM_IS_MODRM_REG_MODE(bRm))
5264 {
5265 switch (pVCpu->iem.s.enmEffOpSize)
5266 {
5267 case IEMMODE_16BIT:
5268 IEM_MC_BEGIN(0, 2, 0, 0);
5269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5270 IEM_MC_LOCAL(uint16_t, uTmp1);
5271 IEM_MC_LOCAL(uint16_t, uTmp2);
5272
5273 IEM_MC_FETCH_GREG_U16(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5274 IEM_MC_FETCH_GREG_U16(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5275 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5276 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5277
5278 IEM_MC_ADVANCE_RIP_AND_FINISH();
5279 IEM_MC_END();
5280 break;
5281
5282 case IEMMODE_32BIT:
5283 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5285 IEM_MC_LOCAL(uint32_t, uTmp1);
5286 IEM_MC_LOCAL(uint32_t, uTmp2);
5287
5288 IEM_MC_FETCH_GREG_U32(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5289 IEM_MC_FETCH_GREG_U32(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5290 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5291 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5292
5293 IEM_MC_ADVANCE_RIP_AND_FINISH();
5294 IEM_MC_END();
5295 break;
5296
5297 case IEMMODE_64BIT:
5298 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5300 IEM_MC_LOCAL(uint64_t, uTmp1);
5301 IEM_MC_LOCAL(uint64_t, uTmp2);
5302
5303 IEM_MC_FETCH_GREG_U64(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5304 IEM_MC_FETCH_GREG_U64(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5305 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5306 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5307
5308 IEM_MC_ADVANCE_RIP_AND_FINISH();
5309 IEM_MC_END();
5310 break;
5311
5312 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5313 }
5314 }
5315 else
5316 {
5317 /*
5318 * We're accessing memory.
5319 */
5320#define IEMOP_XCHG_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64, a_Type) \
5321 do { \
5322 switch (pVCpu->iem.s.enmEffOpSize) \
5323 { \
5324 case IEMMODE_16BIT: \
5325 IEM_MC_BEGIN(2, 4, 0, 0); \
5326 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5327 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5328 IEM_MC_LOCAL(uint16_t, uTmpReg); \
5329 IEM_MC_ARG(uint16_t *, pu16Mem, 0); \
5330 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, uTmpReg, 1); \
5331 \
5332 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5333 IEMOP_HLP_DONE_DECODING(); /** @todo testcase: lock xchg */ \
5334 IEM_MC_MEM_MAP_U16_##a_Type(pu16Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5335 IEM_MC_FETCH_GREG_U16(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5336 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker16, pu16Mem, pu16Reg); \
5337 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
5338 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5339 \
5340 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5341 IEM_MC_END(); \
5342 break; \
5343 \
5344 case IEMMODE_32BIT: \
5345 IEM_MC_BEGIN(2, 4, IEM_MC_F_MIN_386, 0); \
5346 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5347 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5348 IEM_MC_LOCAL(uint32_t, uTmpReg); \
5349 IEM_MC_ARG(uint32_t *, pu32Mem, 0); \
5350 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, uTmpReg, 1); \
5351 \
5352 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5353 IEMOP_HLP_DONE_DECODING(); \
5354 IEM_MC_MEM_MAP_U32_##a_Type(pu32Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5355 IEM_MC_FETCH_GREG_U32(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5356 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker32, pu32Mem, pu32Reg); \
5357 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
5358 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5359 \
5360 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5361 IEM_MC_END(); \
5362 break; \
5363 \
5364 case IEMMODE_64BIT: \
5365 IEM_MC_BEGIN(2, 4, IEM_MC_F_64BIT, 0); \
5366 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5367 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5368 IEM_MC_LOCAL(uint64_t, uTmpReg); \
5369 IEM_MC_ARG(uint64_t *, pu64Mem, 0); \
5370 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, uTmpReg, 1); \
5371 \
5372 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5373 IEMOP_HLP_DONE_DECODING(); \
5374 IEM_MC_MEM_MAP_U64_##a_Type(pu64Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5375 IEM_MC_FETCH_GREG_U64(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5376 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker64, pu64Mem, pu64Reg); \
5377 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
5378 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5379 \
5380 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5381 IEM_MC_END(); \
5382 break; \
5383 \
5384 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5385 } \
5386 } while (0)
5387 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5388 {
5389 IEMOP_XCHG_EV_GV(iemAImpl_xchg_u16_locked, iemAImpl_xchg_u32_locked, iemAImpl_xchg_u64_locked,ATOMIC);
5390 }
5391 else
5392 {
5393 IEMOP_XCHG_EV_GV(iemAImpl_xchg_u16_unlocked, iemAImpl_xchg_u32_unlocked, iemAImpl_xchg_u64_unlocked,RW);
5394 }
5395 }
5396}
5397
5398
5399/**
5400 * @opcode 0x88
5401 */
5402FNIEMOP_DEF(iemOp_mov_Eb_Gb)
5403{
5404 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
5405
5406 uint8_t bRm;
5407 IEM_OPCODE_GET_NEXT_U8(&bRm);
5408
5409 /*
5410 * If rm is denoting a register, no more instruction bytes.
5411 */
5412 if (IEM_IS_MODRM_REG_MODE(bRm))
5413 {
5414 IEM_MC_BEGIN(0, 1, 0, 0);
5415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5416 IEM_MC_LOCAL(uint8_t, u8Value);
5417 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5418 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Value);
5419 IEM_MC_ADVANCE_RIP_AND_FINISH();
5420 IEM_MC_END();
5421 }
5422 else
5423 {
5424 /*
5425 * We're writing a register to memory.
5426 */
5427 IEM_MC_BEGIN(0, 2, 0, 0);
5428 IEM_MC_LOCAL(uint8_t, u8Value);
5429 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5430 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5432 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5433 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
5434 IEM_MC_ADVANCE_RIP_AND_FINISH();
5435 IEM_MC_END();
5436 }
5437}
5438
5439
5440/**
5441 * @opcode 0x89
5442 */
5443FNIEMOP_DEF(iemOp_mov_Ev_Gv)
5444{
5445 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
5446
5447 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5448
5449 /*
5450 * If rm is denoting a register, no more instruction bytes.
5451 */
5452 if (IEM_IS_MODRM_REG_MODE(bRm))
5453 {
5454 switch (pVCpu->iem.s.enmEffOpSize)
5455 {
5456 case IEMMODE_16BIT:
5457 IEM_MC_BEGIN(0, 1, 0, 0);
5458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5459 IEM_MC_LOCAL(uint16_t, u16Value);
5460 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5461 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5462 IEM_MC_ADVANCE_RIP_AND_FINISH();
5463 IEM_MC_END();
5464 break;
5465
5466 case IEMMODE_32BIT:
5467 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5469 IEM_MC_LOCAL(uint32_t, u32Value);
5470 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5471 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5472 IEM_MC_ADVANCE_RIP_AND_FINISH();
5473 IEM_MC_END();
5474 break;
5475
5476 case IEMMODE_64BIT:
5477 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5479 IEM_MC_LOCAL(uint64_t, u64Value);
5480 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5481 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5482 IEM_MC_ADVANCE_RIP_AND_FINISH();
5483 IEM_MC_END();
5484 break;
5485
5486 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5487 }
5488 }
5489 else
5490 {
5491 /*
5492 * We're writing a register to memory.
5493 */
5494 switch (pVCpu->iem.s.enmEffOpSize)
5495 {
5496 case IEMMODE_16BIT:
5497 IEM_MC_BEGIN(0, 2, 0, 0);
5498 IEM_MC_LOCAL(uint16_t, u16Value);
5499 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5500 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5502 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5503 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5504 IEM_MC_ADVANCE_RIP_AND_FINISH();
5505 IEM_MC_END();
5506 break;
5507
5508 case IEMMODE_32BIT:
5509 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5510 IEM_MC_LOCAL(uint32_t, u32Value);
5511 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5512 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5514 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5515 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
5516 IEM_MC_ADVANCE_RIP_AND_FINISH();
5517 IEM_MC_END();
5518 break;
5519
5520 case IEMMODE_64BIT:
5521 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5522 IEM_MC_LOCAL(uint64_t, u64Value);
5523 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5524 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5526 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5527 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
5528 IEM_MC_ADVANCE_RIP_AND_FINISH();
5529 IEM_MC_END();
5530 break;
5531
5532 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5533 }
5534 }
5535}
5536
5537
5538/**
5539 * @opcode 0x8a
5540 */
5541FNIEMOP_DEF(iemOp_mov_Gb_Eb)
5542{
5543 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
5544
5545 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5546
5547 /*
5548 * If rm is denoting a register, no more instruction bytes.
5549 */
5550 if (IEM_IS_MODRM_REG_MODE(bRm))
5551 {
5552 IEM_MC_BEGIN(0, 1, 0, 0);
5553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5554 IEM_MC_LOCAL(uint8_t, u8Value);
5555 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5556 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5557 IEM_MC_ADVANCE_RIP_AND_FINISH();
5558 IEM_MC_END();
5559 }
5560 else
5561 {
5562 /*
5563 * We're loading a register from memory.
5564 */
5565 IEM_MC_BEGIN(0, 2, 0, 0);
5566 IEM_MC_LOCAL(uint8_t, u8Value);
5567 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5568 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5570 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5571 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5572 IEM_MC_ADVANCE_RIP_AND_FINISH();
5573 IEM_MC_END();
5574 }
5575}
5576
5577
5578/**
5579 * @opcode 0x8b
5580 */
5581FNIEMOP_DEF(iemOp_mov_Gv_Ev)
5582{
5583 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
5584
5585 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5586
5587 /*
5588 * If rm is denoting a register, no more instruction bytes.
5589 */
5590 if (IEM_IS_MODRM_REG_MODE(bRm))
5591 {
5592 switch (pVCpu->iem.s.enmEffOpSize)
5593 {
5594 case IEMMODE_16BIT:
5595 IEM_MC_BEGIN(0, 1, 0, 0);
5596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5597 IEM_MC_LOCAL(uint16_t, u16Value);
5598 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5599 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5600 IEM_MC_ADVANCE_RIP_AND_FINISH();
5601 IEM_MC_END();
5602 break;
5603
5604 case IEMMODE_32BIT:
5605 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5607 IEM_MC_LOCAL(uint32_t, u32Value);
5608 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5609 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5610 IEM_MC_ADVANCE_RIP_AND_FINISH();
5611 IEM_MC_END();
5612 break;
5613
5614 case IEMMODE_64BIT:
5615 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5617 IEM_MC_LOCAL(uint64_t, u64Value);
5618 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5619 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5620 IEM_MC_ADVANCE_RIP_AND_FINISH();
5621 IEM_MC_END();
5622 break;
5623
5624 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5625 }
5626 }
5627 else
5628 {
5629 /*
5630 * We're loading a register from memory.
5631 */
5632 switch (pVCpu->iem.s.enmEffOpSize)
5633 {
5634 case IEMMODE_16BIT:
5635 IEM_MC_BEGIN(0, 2, 0, 0);
5636 IEM_MC_LOCAL(uint16_t, u16Value);
5637 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5638 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5640 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5641 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5642 IEM_MC_ADVANCE_RIP_AND_FINISH();
5643 IEM_MC_END();
5644 break;
5645
5646 case IEMMODE_32BIT:
5647 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5648 IEM_MC_LOCAL(uint32_t, u32Value);
5649 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5652 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5653 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5654 IEM_MC_ADVANCE_RIP_AND_FINISH();
5655 IEM_MC_END();
5656 break;
5657
5658 case IEMMODE_64BIT:
5659 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5660 IEM_MC_LOCAL(uint64_t, u64Value);
5661 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5662 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5664 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5665 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5666 IEM_MC_ADVANCE_RIP_AND_FINISH();
5667 IEM_MC_END();
5668 break;
5669
5670 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5671 }
5672 }
5673}
5674
5675
5676/**
5677 * opcode 0x63
5678 * @todo Table fixme
5679 */
5680FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
5681{
5682 if (!IEM_IS_64BIT_CODE(pVCpu))
5683 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
5684 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
5685 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
5686 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
5687}
5688
5689
5690/**
5691 * @opcode 0x8c
5692 */
5693FNIEMOP_DEF(iemOp_mov_Ev_Sw)
5694{
5695 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
5696
5697 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5698
5699 /*
5700 * Check that the destination register exists. The REX.R prefix is ignored.
5701 */
5702 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5703 if (iSegReg > X86_SREG_GS)
5704 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5705
5706 /*
5707 * If rm is denoting a register, no more instruction bytes.
5708 * In that case, the operand size is respected and the upper bits are
5709 * cleared (starting with some pentium).
5710 */
5711 if (IEM_IS_MODRM_REG_MODE(bRm))
5712 {
5713 switch (pVCpu->iem.s.enmEffOpSize)
5714 {
5715 case IEMMODE_16BIT:
5716 IEM_MC_BEGIN(0, 1, 0, 0);
5717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5718 IEM_MC_LOCAL(uint16_t, u16Value);
5719 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5720 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5721 IEM_MC_ADVANCE_RIP_AND_FINISH();
5722 IEM_MC_END();
5723 break;
5724
5725 case IEMMODE_32BIT:
5726 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5728 IEM_MC_LOCAL(uint32_t, u32Value);
5729 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
5730 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5731 IEM_MC_ADVANCE_RIP_AND_FINISH();
5732 IEM_MC_END();
5733 break;
5734
5735 case IEMMODE_64BIT:
5736 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5738 IEM_MC_LOCAL(uint64_t, u64Value);
5739 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
5740 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5741 IEM_MC_ADVANCE_RIP_AND_FINISH();
5742 IEM_MC_END();
5743 break;
5744
5745 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5746 }
5747 }
5748 else
5749 {
5750 /*
5751 * We're saving the register to memory. The access is word sized
5752 * regardless of operand size prefixes.
5753 */
5754#if 0 /* not necessary */
5755 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5756#endif
5757 IEM_MC_BEGIN(0, 2, 0, 0);
5758 IEM_MC_LOCAL(uint16_t, u16Value);
5759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5762 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5763 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5764 IEM_MC_ADVANCE_RIP_AND_FINISH();
5765 IEM_MC_END();
5766 }
5767}
5768
5769
5770
5771
5772/**
5773 * @opcode 0x8d
5774 */
5775FNIEMOP_DEF(iemOp_lea_Gv_M)
5776{
5777 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
5778 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5779 if (IEM_IS_MODRM_REG_MODE(bRm))
5780 IEMOP_RAISE_INVALID_OPCODE_RET(); /* no register form */
5781
5782 switch (pVCpu->iem.s.enmEffOpSize)
5783 {
5784 case IEMMODE_16BIT:
5785 IEM_MC_BEGIN(0, 2, 0, 0);
5786 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5787 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5789 /** @todo optimize: This value casting/masking can be skipped if addr-size ==
5790 * operand-size, which is usually the case. It'll save an instruction
5791 * and a register. */
5792 IEM_MC_LOCAL(uint16_t, u16Cast);
5793 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
5794 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Cast);
5795 IEM_MC_ADVANCE_RIP_AND_FINISH();
5796 IEM_MC_END();
5797 break;
5798
5799 case IEMMODE_32BIT:
5800 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5802 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5804 /** @todo optimize: This value casting/masking can be skipped if addr-size ==
5805 * operand-size, which is usually the case. It'll save an instruction
5806 * and a register. */
5807 IEM_MC_LOCAL(uint32_t, u32Cast);
5808 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
5809 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Cast);
5810 IEM_MC_ADVANCE_RIP_AND_FINISH();
5811 IEM_MC_END();
5812 break;
5813
5814 case IEMMODE_64BIT:
5815 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5816 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5817 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5819 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), GCPtrEffSrc);
5820 IEM_MC_ADVANCE_RIP_AND_FINISH();
5821 IEM_MC_END();
5822 break;
5823
5824 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5825 }
5826}
5827
5828
5829/**
5830 * @opcode 0x8e
5831 */
5832FNIEMOP_DEF(iemOp_mov_Sw_Ev)
5833{
5834 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
5835
5836 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5837
5838 /*
5839 * The practical operand size is 16-bit.
5840 */
5841#if 0 /* not necessary */
5842 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5843#endif
5844
5845 /*
5846 * Check that the destination register exists and can be used with this
5847 * instruction. The REX.R prefix is ignored.
5848 */
5849 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5850 /** @todo r=bird: What does 8086 do here wrt CS? */
5851 if ( iSegReg == X86_SREG_CS
5852 || iSegReg > X86_SREG_GS)
5853 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5854
5855 /*
5856 * If rm is denoting a register, no more instruction bytes.
5857 *
5858 * Note! Using IEMOP_MOV_SW_EV_REG_BODY here to specify different
5859 * IEM_CIMPL_F_XXX values depending on the CPU mode and target
5860 * register. This is a restriction of the current recompiler
5861 * approach.
5862 */
5863 if (IEM_IS_MODRM_REG_MODE(bRm))
5864 {
5865#define IEMOP_MOV_SW_EV_REG_BODY(a_fCImplFlags) \
5866 IEM_MC_BEGIN(2, 0, 0, a_fCImplFlags); \
5867 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5868 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
5869 IEM_MC_ARG(uint16_t, u16Value, 1); \
5870 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5871 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, \
5872 RT_BIT_64(kIemNativeGstReg_SegSelFirst + iSegReg) \
5873 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + iSegReg) \
5874 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + iSegReg) \
5875 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + iSegReg), \
5876 iemCImpl_load_SReg, iSRegArg, u16Value); \
5877 IEM_MC_END()
5878
5879 if (iSegReg == X86_SREG_SS)
5880 {
5881 if (IEM_IS_32BIT_CODE(pVCpu))
5882 {
5883 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
5884 }
5885 else
5886 {
5887 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
5888 }
5889 }
5890 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5891 {
5892 IEMOP_MOV_SW_EV_REG_BODY(0);
5893 }
5894 else
5895 {
5896 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_MODE);
5897 }
5898#undef IEMOP_MOV_SW_EV_REG_BODY
5899 }
5900 else
5901 {
5902 /*
5903 * We're loading the register from memory. The access is word sized
5904 * regardless of operand size prefixes.
5905 */
5906#define IEMOP_MOV_SW_EV_MEM_BODY(a_fCImplFlags) \
5907 IEM_MC_BEGIN(2, 1, 0, a_fCImplFlags); \
5908 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
5909 IEM_MC_ARG(uint16_t, u16Value, 1); \
5910 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5911 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5913 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5914 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, \
5915 RT_BIT_64(kIemNativeGstReg_SegSelFirst + iSegReg) \
5916 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + iSegReg) \
5917 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + iSegReg) \
5918 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + iSegReg), \
5919 iemCImpl_load_SReg, iSRegArg, u16Value); \
5920 IEM_MC_END()
5921
5922 if (iSegReg == X86_SREG_SS)
5923 {
5924 if (IEM_IS_32BIT_CODE(pVCpu))
5925 {
5926 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
5927 }
5928 else
5929 {
5930 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
5931 }
5932 }
5933 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5934 {
5935 IEMOP_MOV_SW_EV_MEM_BODY(0);
5936 }
5937 else
5938 {
5939 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_MODE);
5940 }
5941#undef IEMOP_MOV_SW_EV_MEM_BODY
5942 }
5943}
5944
5945
5946/** Opcode 0x8f /0. */
5947FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
5948{
5949 /* This bugger is rather annoying as it requires rSP to be updated before
5950 doing the effective address calculations. Will eventually require a
5951 split between the R/M+SIB decoding and the effective address
5952 calculation - which is something that is required for any attempt at
5953 reusing this code for a recompiler. It may also be good to have if we
5954 need to delay #UD exception caused by invalid lock prefixes.
5955
5956 For now, we'll do a mostly safe interpreter-only implementation here. */
5957 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
5958 * now until tests show it's checked.. */
5959 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
5960
5961 /* Register access is relatively easy and can share code. */
5962 if (IEM_IS_MODRM_REG_MODE(bRm))
5963 return FNIEMOP_CALL_1(iemOpCommonPopGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
5964
5965 /*
5966 * Memory target.
5967 *
5968 * Intel says that RSP is incremented before it's used in any effective
5969 * address calcuations. This means some serious extra annoyance here since
5970 * we decode and calculate the effective address in one step and like to
5971 * delay committing registers till everything is done.
5972 *
5973 * So, we'll decode and calculate the effective address twice. This will
5974 * require some recoding if turned into a recompiler.
5975 */
5976 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
5977
5978#if 1 /* This can be compiled, optimize later if needed. */
5979 switch (pVCpu->iem.s.enmEffOpSize)
5980 {
5981 case IEMMODE_16BIT:
5982 IEM_MC_BEGIN(2, 0, 0, 0);
5983 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5984 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2 << 8);
5985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5986 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
5987 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem16, iEffSeg, GCPtrEffDst);
5988 IEM_MC_END();
5989 break;
5990
5991 case IEMMODE_32BIT:
5992 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0);
5993 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
5994 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4 << 8);
5995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5996 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
5997 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem32, iEffSeg, GCPtrEffDst);
5998 IEM_MC_END();
5999 break;
6000
6001 case IEMMODE_64BIT:
6002 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
6003 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6004 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 8 << 8);
6005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6006 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6007 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem64, iEffSeg, GCPtrEffDst);
6008 IEM_MC_END();
6009 break;
6010
6011 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6012 }
6013
6014#else
6015# ifndef TST_IEM_CHECK_MC
6016 /* Calc effective address with modified ESP. */
6017/** @todo testcase */
6018 RTGCPTR GCPtrEff;
6019 VBOXSTRICTRC rcStrict;
6020 switch (pVCpu->iem.s.enmEffOpSize)
6021 {
6022 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 2 << 8, &GCPtrEff); break;
6023 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 4 << 8, &GCPtrEff); break;
6024 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 8 << 8, &GCPtrEff); break;
6025 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6026 }
6027 if (rcStrict != VINF_SUCCESS)
6028 return rcStrict;
6029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6030
6031 /* Perform the operation - this should be CImpl. */
6032 RTUINT64U TmpRsp;
6033 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
6034 switch (pVCpu->iem.s.enmEffOpSize)
6035 {
6036 case IEMMODE_16BIT:
6037 {
6038 uint16_t u16Value;
6039 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
6040 if (rcStrict == VINF_SUCCESS)
6041 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
6042 break;
6043 }
6044
6045 case IEMMODE_32BIT:
6046 {
6047 uint32_t u32Value;
6048 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
6049 if (rcStrict == VINF_SUCCESS)
6050 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
6051 break;
6052 }
6053
6054 case IEMMODE_64BIT:
6055 {
6056 uint64_t u64Value;
6057 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
6058 if (rcStrict == VINF_SUCCESS)
6059 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
6060 break;
6061 }
6062
6063 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6064 }
6065 if (rcStrict == VINF_SUCCESS)
6066 {
6067 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
6068 return iemRegUpdateRipAndFinishClearingRF(pVCpu);
6069 }
6070 return rcStrict;
6071
6072# else
6073 return VERR_IEM_IPE_2;
6074# endif
6075#endif
6076}
6077
6078
6079/**
6080 * @opcode 0x8f
6081 */
6082FNIEMOP_DEF(iemOp_Grp1A__xop)
6083{
6084 /*
6085 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
6086 * three byte VEX prefix, except that the mmmmm field cannot have the values
6087 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
6088 */
6089 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6090 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
6091 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
6092
6093 IEMOP_MNEMONIC(xop, "xop");
6094 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
6095 {
6096 /** @todo Test when exctly the XOP conformance checks kick in during
6097 * instruction decoding and fetching (using \#PF). */
6098 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
6099 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6100 if ( ( pVCpu->iem.s.fPrefixes
6101 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
6102 == 0)
6103 {
6104 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
6105 if ((bXop2 & 0x80 /* XOP.W */) && IEM_IS_64BIT_CODE(pVCpu))
6106 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
6107 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6108 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
6109 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
6110 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
6111 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
6112 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
6113
6114 /** @todo XOP: Just use new tables and decoders. */
6115 switch (bRm & 0x1f)
6116 {
6117 case 8: /* xop opcode map 8. */
6118 IEMOP_BITCH_ABOUT_STUB();
6119 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6120
6121 case 9: /* xop opcode map 9. */
6122 IEMOP_BITCH_ABOUT_STUB();
6123 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6124
6125 case 10: /* xop opcode map 10. */
6126 IEMOP_BITCH_ABOUT_STUB();
6127 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6128
6129 default:
6130 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6131 IEMOP_RAISE_INVALID_OPCODE_RET();
6132 }
6133 }
6134 else
6135 Log(("XOP: Invalid prefix mix!\n"));
6136 }
6137 else
6138 Log(("XOP: XOP support disabled!\n"));
6139 IEMOP_RAISE_INVALID_OPCODE_RET();
6140}
6141
6142
6143/**
6144 * Common 'xchg reg,rAX' helper.
6145 */
6146FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
6147{
6148 iReg |= pVCpu->iem.s.uRexB;
6149 switch (pVCpu->iem.s.enmEffOpSize)
6150 {
6151 case IEMMODE_16BIT:
6152 IEM_MC_BEGIN(0, 2, 0, 0);
6153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6154 IEM_MC_LOCAL(uint16_t, u16Tmp1);
6155 IEM_MC_LOCAL(uint16_t, u16Tmp2);
6156 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
6157 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
6158 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
6159 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
6160 IEM_MC_ADVANCE_RIP_AND_FINISH();
6161 IEM_MC_END();
6162 break;
6163
6164 case IEMMODE_32BIT:
6165 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6167 IEM_MC_LOCAL(uint32_t, u32Tmp1);
6168 IEM_MC_LOCAL(uint32_t, u32Tmp2);
6169 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
6170 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
6171 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
6172 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
6173 IEM_MC_ADVANCE_RIP_AND_FINISH();
6174 IEM_MC_END();
6175 break;
6176
6177 case IEMMODE_64BIT:
6178 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6180 IEM_MC_LOCAL(uint64_t, u64Tmp1);
6181 IEM_MC_LOCAL(uint64_t, u64Tmp2);
6182 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
6183 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
6184 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
6185 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
6186 IEM_MC_ADVANCE_RIP_AND_FINISH();
6187 IEM_MC_END();
6188 break;
6189
6190 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6191 }
6192}
6193
6194
6195/**
6196 * @opcode 0x90
6197 */
6198FNIEMOP_DEF(iemOp_nop)
6199{
6200 /* R8/R8D and RAX/EAX can be exchanged. */
6201 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
6202 {
6203 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
6204 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
6205 }
6206
6207 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
6208 {
6209 IEMOP_MNEMONIC(pause, "pause");
6210 /* ASSUMING that we keep the IEM_F_X86_CTX_IN_GUEST, IEM_F_X86_CTX_VMX
6211 and IEM_F_X86_CTX_SVM in the TB key, we can safely do the following: */
6212 if (!IEM_IS_IN_GUEST(pVCpu))
6213 { /* probable */ }
6214#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6215 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_VMX)
6216 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmx_pause);
6217#endif
6218#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
6219 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_SVM)
6220 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_svm_pause);
6221#endif
6222 }
6223 else
6224 IEMOP_MNEMONIC(nop, "nop");
6225 /** @todo testcase: lock nop; lock pause */
6226 IEM_MC_BEGIN(0, 0, 0, 0);
6227 IEMOP_HLP_DONE_DECODING();
6228 IEM_MC_ADVANCE_RIP_AND_FINISH();
6229 IEM_MC_END();
6230}
6231
6232
6233/**
6234 * @opcode 0x91
6235 */
6236FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
6237{
6238 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
6239 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
6240}
6241
6242
6243/**
6244 * @opcode 0x92
6245 */
6246FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
6247{
6248 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
6249 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
6250}
6251
6252
6253/**
6254 * @opcode 0x93
6255 */
6256FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
6257{
6258 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
6259 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
6260}
6261
6262
6263/**
6264 * @opcode 0x94
6265 */
6266FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
6267{
6268 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
6269 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
6270}
6271
6272
6273/**
6274 * @opcode 0x95
6275 */
6276FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
6277{
6278 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
6279 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
6280}
6281
6282
6283/**
6284 * @opcode 0x96
6285 */
6286FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
6287{
6288 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
6289 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
6290}
6291
6292
6293/**
6294 * @opcode 0x97
6295 */
6296FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
6297{
6298 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
6299 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
6300}
6301
6302
6303/**
6304 * @opcode 0x98
6305 */
6306FNIEMOP_DEF(iemOp_cbw)
6307{
6308 switch (pVCpu->iem.s.enmEffOpSize)
6309 {
6310 case IEMMODE_16BIT:
6311 IEMOP_MNEMONIC(cbw, "cbw");
6312 IEM_MC_BEGIN(0, 1, 0, 0);
6313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6314 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
6315 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
6316 } IEM_MC_ELSE() {
6317 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
6318 } IEM_MC_ENDIF();
6319 IEM_MC_ADVANCE_RIP_AND_FINISH();
6320 IEM_MC_END();
6321 break;
6322
6323 case IEMMODE_32BIT:
6324 IEMOP_MNEMONIC(cwde, "cwde");
6325 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6327 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6328 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
6329 } IEM_MC_ELSE() {
6330 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
6331 } IEM_MC_ENDIF();
6332 IEM_MC_ADVANCE_RIP_AND_FINISH();
6333 IEM_MC_END();
6334 break;
6335
6336 case IEMMODE_64BIT:
6337 IEMOP_MNEMONIC(cdqe, "cdqe");
6338 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6340 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6341 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
6342 } IEM_MC_ELSE() {
6343 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
6344 } IEM_MC_ENDIF();
6345 IEM_MC_ADVANCE_RIP_AND_FINISH();
6346 IEM_MC_END();
6347 break;
6348
6349 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6350 }
6351}
6352
6353
6354/**
6355 * @opcode 0x99
6356 */
6357FNIEMOP_DEF(iemOp_cwd)
6358{
6359 switch (pVCpu->iem.s.enmEffOpSize)
6360 {
6361 case IEMMODE_16BIT:
6362 IEMOP_MNEMONIC(cwd, "cwd");
6363 IEM_MC_BEGIN(0, 1, 0, 0);
6364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6365 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6366 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
6367 } IEM_MC_ELSE() {
6368 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
6369 } IEM_MC_ENDIF();
6370 IEM_MC_ADVANCE_RIP_AND_FINISH();
6371 IEM_MC_END();
6372 break;
6373
6374 case IEMMODE_32BIT:
6375 IEMOP_MNEMONIC(cdq, "cdq");
6376 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6378 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6379 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
6380 } IEM_MC_ELSE() {
6381 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
6382 } IEM_MC_ENDIF();
6383 IEM_MC_ADVANCE_RIP_AND_FINISH();
6384 IEM_MC_END();
6385 break;
6386
6387 case IEMMODE_64BIT:
6388 IEMOP_MNEMONIC(cqo, "cqo");
6389 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6390 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6391 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
6392 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
6393 } IEM_MC_ELSE() {
6394 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
6395 } IEM_MC_ENDIF();
6396 IEM_MC_ADVANCE_RIP_AND_FINISH();
6397 IEM_MC_END();
6398 break;
6399
6400 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6401 }
6402}
6403
6404
6405/**
6406 * @opcode 0x9a
6407 */
6408FNIEMOP_DEF(iemOp_call_Ap)
6409{
6410 IEMOP_MNEMONIC(call_Ap, "call Ap");
6411 IEMOP_HLP_NO_64BIT();
6412
6413 /* Decode the far pointer address and pass it on to the far call C implementation. */
6414 uint32_t off32Seg;
6415 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
6416 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
6417 else
6418 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
6419 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
6420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6421 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
6422 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, UINT64_MAX,
6423 iemCImpl_callf, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
6424 /** @todo make task-switches, ring-switches, ++ return non-zero status */
6425}
6426
6427
6428/** Opcode 0x9b. (aka fwait) */
6429FNIEMOP_DEF(iemOp_wait)
6430{
6431 IEMOP_MNEMONIC(wait, "wait");
6432 IEM_MC_BEGIN(0, 0, 0, 0);
6433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6434 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
6435 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6436 IEM_MC_ADVANCE_RIP_AND_FINISH();
6437 IEM_MC_END();
6438}
6439
6440
6441/**
6442 * @opcode 0x9c
6443 */
6444FNIEMOP_DEF(iemOp_pushf_Fv)
6445{
6446 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
6447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6448 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6449 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP),
6450 iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
6451}
6452
6453
6454/**
6455 * @opcode 0x9d
6456 */
6457FNIEMOP_DEF(iemOp_popf_Fv)
6458{
6459 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
6460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6461 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6462 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE_AND_AFTER,
6463 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP),
6464 iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
6465}
6466
6467
6468/**
6469 * @opcode 0x9e
6470 */
6471FNIEMOP_DEF(iemOp_sahf)
6472{
6473 IEMOP_MNEMONIC(sahf, "sahf");
6474 if ( IEM_IS_64BIT_CODE(pVCpu)
6475 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6476 IEMOP_RAISE_INVALID_OPCODE_RET();
6477 IEM_MC_BEGIN(0, 2, 0, 0);
6478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6479 IEM_MC_LOCAL(uint32_t, u32Flags);
6480 IEM_MC_LOCAL(uint32_t, EFlags);
6481 IEM_MC_FETCH_EFLAGS(EFlags);
6482 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
6483 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6484 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
6485 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
6486 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
6487 IEM_MC_COMMIT_EFLAGS(EFlags);
6488 IEM_MC_ADVANCE_RIP_AND_FINISH();
6489 IEM_MC_END();
6490}
6491
6492
6493/**
6494 * @opcode 0x9f
6495 */
6496FNIEMOP_DEF(iemOp_lahf)
6497{
6498 IEMOP_MNEMONIC(lahf, "lahf");
6499 if ( IEM_IS_64BIT_CODE(pVCpu)
6500 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6501 IEMOP_RAISE_INVALID_OPCODE_RET();
6502 IEM_MC_BEGIN(0, 1, 0, 0);
6503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6504 IEM_MC_LOCAL(uint8_t, u8Flags);
6505 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
6506 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
6507 IEM_MC_ADVANCE_RIP_AND_FINISH();
6508 IEM_MC_END();
6509}
6510
6511
6512/**
6513 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
6514 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode.
6515 * Will return/throw on failures.
6516 * @param a_GCPtrMemOff The variable to store the offset in.
6517 */
6518#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
6519 do \
6520 { \
6521 switch (pVCpu->iem.s.enmEffAddrMode) \
6522 { \
6523 case IEMMODE_16BIT: \
6524 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
6525 break; \
6526 case IEMMODE_32BIT: \
6527 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
6528 break; \
6529 case IEMMODE_64BIT: \
6530 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
6531 break; \
6532 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
6533 } \
6534 } while (0)
6535
6536/**
6537 * @opcode 0xa0
6538 */
6539FNIEMOP_DEF(iemOp_mov_AL_Ob)
6540{
6541 /*
6542 * Get the offset.
6543 */
6544 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
6545 RTGCPTR GCPtrMemOffDecode;
6546 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6547
6548 /*
6549 * Fetch AL.
6550 */
6551 IEM_MC_BEGIN(0, 2, 0, 0);
6552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6553 IEM_MC_LOCAL(uint8_t, u8Tmp);
6554 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6555 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6556 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6557 IEM_MC_ADVANCE_RIP_AND_FINISH();
6558 IEM_MC_END();
6559}
6560
6561
6562/**
6563 * @opcode 0xa1
6564 */
6565FNIEMOP_DEF(iemOp_mov_rAX_Ov)
6566{
6567 /*
6568 * Get the offset.
6569 */
6570 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
6571 RTGCPTR GCPtrMemOffDecode;
6572 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6573
6574 /*
6575 * Fetch rAX.
6576 */
6577 switch (pVCpu->iem.s.enmEffOpSize)
6578 {
6579 case IEMMODE_16BIT:
6580 IEM_MC_BEGIN(0, 2, 0, 0);
6581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6582 IEM_MC_LOCAL(uint16_t, u16Tmp);
6583 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6584 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6585 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
6586 IEM_MC_ADVANCE_RIP_AND_FINISH();
6587 IEM_MC_END();
6588 break;
6589
6590 case IEMMODE_32BIT:
6591 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6593 IEM_MC_LOCAL(uint32_t, u32Tmp);
6594 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6595 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6596 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
6597 IEM_MC_ADVANCE_RIP_AND_FINISH();
6598 IEM_MC_END();
6599 break;
6600
6601 case IEMMODE_64BIT:
6602 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6604 IEM_MC_LOCAL(uint64_t, u64Tmp);
6605 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6606 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6607 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
6608 IEM_MC_ADVANCE_RIP_AND_FINISH();
6609 IEM_MC_END();
6610 break;
6611
6612 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6613 }
6614}
6615
6616
6617/**
6618 * @opcode 0xa2
6619 */
6620FNIEMOP_DEF(iemOp_mov_Ob_AL)
6621{
6622 /*
6623 * Get the offset.
6624 */
6625 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
6626 RTGCPTR GCPtrMemOffDecode;
6627 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6628
6629 /*
6630 * Store AL.
6631 */
6632 IEM_MC_BEGIN(0, 2, 0, 0);
6633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6634 IEM_MC_LOCAL(uint8_t, u8Tmp);
6635 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
6636 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6637 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
6638 IEM_MC_ADVANCE_RIP_AND_FINISH();
6639 IEM_MC_END();
6640}
6641
6642
6643/**
6644 * @opcode 0xa3
6645 */
6646FNIEMOP_DEF(iemOp_mov_Ov_rAX)
6647{
6648 /*
6649 * Get the offset.
6650 */
6651 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
6652 RTGCPTR GCPtrMemOffDecode;
6653 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6654
6655 /*
6656 * Store rAX.
6657 */
6658 switch (pVCpu->iem.s.enmEffOpSize)
6659 {
6660 case IEMMODE_16BIT:
6661 IEM_MC_BEGIN(0, 2, 0, 0);
6662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6663 IEM_MC_LOCAL(uint16_t, u16Tmp);
6664 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
6665 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6666 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
6667 IEM_MC_ADVANCE_RIP_AND_FINISH();
6668 IEM_MC_END();
6669 break;
6670
6671 case IEMMODE_32BIT:
6672 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6674 IEM_MC_LOCAL(uint32_t, u32Tmp);
6675 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
6676 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6677 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
6678 IEM_MC_ADVANCE_RIP_AND_FINISH();
6679 IEM_MC_END();
6680 break;
6681
6682 case IEMMODE_64BIT:
6683 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6685 IEM_MC_LOCAL(uint64_t, u64Tmp);
6686 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
6687 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6688 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
6689 IEM_MC_ADVANCE_RIP_AND_FINISH();
6690 IEM_MC_END();
6691 break;
6692
6693 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6694 }
6695}
6696
6697/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
6698#define IEM_MOVS_CASE(ValBits, AddrBits, a_fMcFlags) \
6699 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
6700 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6701 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6702 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6703 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6704 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
6705 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6706 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
6707 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6708 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6709 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6710 } IEM_MC_ELSE() { \
6711 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6712 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6713 } IEM_MC_ENDIF(); \
6714 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6715 IEM_MC_END() \
6716
6717/**
6718 * @opcode 0xa4
6719 * @opfltest df
6720 */
6721FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
6722{
6723 /*
6724 * Use the C implementation if a repeat prefix is encountered.
6725 */
6726 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6727 {
6728 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
6729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6730 switch (pVCpu->iem.s.enmEffAddrMode)
6731 {
6732 case IEMMODE_16BIT:
6733 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6734 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6735 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6736 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6737 iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
6738 case IEMMODE_32BIT:
6739 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6740 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6741 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6742 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6743 iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
6744 case IEMMODE_64BIT:
6745 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6746 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6747 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6748 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6749 iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
6750 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6751 }
6752 }
6753
6754 /*
6755 * Sharing case implementation with movs[wdq] below.
6756 */
6757 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
6758 switch (pVCpu->iem.s.enmEffAddrMode)
6759 {
6760 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
6761 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32, IEM_MC_F_MIN_386); break;
6762 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64, IEM_MC_F_64BIT); break;
6763 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6764 }
6765}
6766
6767
6768/**
6769 * @opcode 0xa5
6770 * @opfltest df
6771 */
6772FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
6773{
6774
6775 /*
6776 * Use the C implementation if a repeat prefix is encountered.
6777 */
6778 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6779 {
6780 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
6781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6782 switch (pVCpu->iem.s.enmEffOpSize)
6783 {
6784 case IEMMODE_16BIT:
6785 switch (pVCpu->iem.s.enmEffAddrMode)
6786 {
6787 case IEMMODE_16BIT:
6788 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6789 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6790 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6791 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6792 iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
6793 case IEMMODE_32BIT:
6794 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6795 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6796 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6797 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6798 iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
6799 case IEMMODE_64BIT:
6800 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6801 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6802 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6803 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6804 iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
6805 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6806 }
6807 break;
6808 case IEMMODE_32BIT:
6809 switch (pVCpu->iem.s.enmEffAddrMode)
6810 {
6811 case IEMMODE_16BIT:
6812 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6813 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6814 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6815 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6816 iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
6817 case IEMMODE_32BIT:
6818 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6819 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6820 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6821 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6822 iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
6823 case IEMMODE_64BIT:
6824 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6825 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6826 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6827 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6828 iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
6829 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6830 }
6831 case IEMMODE_64BIT:
6832 switch (pVCpu->iem.s.enmEffAddrMode)
6833 {
6834 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
6835 case IEMMODE_32BIT:
6836 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6837 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6838 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6839 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6840 iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
6841 case IEMMODE_64BIT:
6842 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6843 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6844 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6845 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6846 iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
6847 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6848 }
6849 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6850 }
6851 }
6852
6853 /*
6854 * Annoying double switch here.
6855 * Using ugly macro for implementing the cases, sharing it with movsb.
6856 */
6857 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
6858 switch (pVCpu->iem.s.enmEffOpSize)
6859 {
6860 case IEMMODE_16BIT:
6861 switch (pVCpu->iem.s.enmEffAddrMode)
6862 {
6863 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
6864 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32, IEM_MC_F_MIN_386); break;
6865 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64, IEM_MC_F_64BIT); break;
6866 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6867 }
6868 break;
6869
6870 case IEMMODE_32BIT:
6871 switch (pVCpu->iem.s.enmEffAddrMode)
6872 {
6873 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
6874 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32, IEM_MC_F_MIN_386); break;
6875 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64, IEM_MC_F_64BIT); break;
6876 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6877 }
6878 break;
6879
6880 case IEMMODE_64BIT:
6881 switch (pVCpu->iem.s.enmEffAddrMode)
6882 {
6883 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6884 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32, IEM_MC_F_64BIT); break;
6885 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64, IEM_MC_F_64BIT); break;
6886 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6887 }
6888 break;
6889 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6890 }
6891}
6892
6893#undef IEM_MOVS_CASE
6894
6895/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
6896#define IEM_CMPS_CASE(ValBits, AddrBits, a_fMcFlags) \
6897 IEM_MC_BEGIN(3, 3, a_fMcFlags, 0); \
6898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6899 \
6900 IEM_MC_LOCAL(RTGCPTR, uAddr1); \
6901 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr1, X86_GREG_xSI); \
6902 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
6903 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr1); \
6904 \
6905 IEM_MC_LOCAL(RTGCPTR, uAddr2); \
6906 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr2, X86_GREG_xDI); \
6907 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
6908 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr2); \
6909 \
6910 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
6911 IEM_MC_REF_EFLAGS(pEFlags); \
6912 IEM_MC_ARG_LOCAL_REF(uint##ValBits##_t *, puValue1, uValue1, 0); \
6913 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
6914 \
6915 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6916 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6917 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6918 } IEM_MC_ELSE() { \
6919 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6920 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6921 } IEM_MC_ENDIF(); \
6922 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6923 IEM_MC_END() \
6924
6925/**
6926 * @opcode 0xa6
6927 * @opflclass arithmetic
6928 * @opfltest df
6929 */
6930FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
6931{
6932
6933 /*
6934 * Use the C implementation if a repeat prefix is encountered.
6935 */
6936 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6937 {
6938 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
6939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6940 switch (pVCpu->iem.s.enmEffAddrMode)
6941 {
6942 case IEMMODE_16BIT:
6943 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6944 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6945 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6946 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6947 iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6948 case IEMMODE_32BIT:
6949 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6950 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6951 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6952 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6953 iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6954 case IEMMODE_64BIT:
6955 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6956 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6957 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6958 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6959 iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6960 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6961 }
6962 }
6963 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
6964 {
6965 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
6966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6967 switch (pVCpu->iem.s.enmEffAddrMode)
6968 {
6969 case IEMMODE_16BIT:
6970 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6971 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6972 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6973 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6974 iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6975 case IEMMODE_32BIT:
6976 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6977 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6978 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6979 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6980 iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6981 case IEMMODE_64BIT:
6982 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6983 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6984 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6985 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6986 iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6987 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6988 }
6989 }
6990
6991 /*
6992 * Sharing case implementation with cmps[wdq] below.
6993 */
6994 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
6995 switch (pVCpu->iem.s.enmEffAddrMode)
6996 {
6997 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
6998 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32, IEM_MC_F_MIN_386); break;
6999 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64, IEM_MC_F_64BIT); break;
7000 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7001 }
7002}
7003
7004
7005/**
7006 * @opcode 0xa7
7007 * @opflclass arithmetic
7008 * @opfltest df
7009 */
7010FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
7011{
7012 /*
7013 * Use the C implementation if a repeat prefix is encountered.
7014 */
7015 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7016 {
7017 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
7018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7019 switch (pVCpu->iem.s.enmEffOpSize)
7020 {
7021 case IEMMODE_16BIT:
7022 switch (pVCpu->iem.s.enmEffAddrMode)
7023 {
7024 case IEMMODE_16BIT:
7025 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7026 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7027 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7028 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7029 iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
7030 case IEMMODE_32BIT:
7031 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7032 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7033 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7034 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7035 iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
7036 case IEMMODE_64BIT:
7037 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7038 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7039 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7040 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7041 iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
7042 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7043 }
7044 break;
7045 case IEMMODE_32BIT:
7046 switch (pVCpu->iem.s.enmEffAddrMode)
7047 {
7048 case IEMMODE_16BIT:
7049 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7050 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7051 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7052 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7053 iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
7054 case IEMMODE_32BIT:
7055 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7056 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7057 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7058 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7059 iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
7060 case IEMMODE_64BIT:
7061 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7062 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7063 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7064 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7065 iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
7066 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7067 }
7068 case IEMMODE_64BIT:
7069 switch (pVCpu->iem.s.enmEffAddrMode)
7070 {
7071 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
7072 case IEMMODE_32BIT:
7073 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7074 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7075 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7076 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7077 iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
7078 case IEMMODE_64BIT:
7079 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7080 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7081 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7082 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7083 iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
7084 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7085 }
7086 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7087 }
7088 }
7089
7090 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7091 {
7092 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
7093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7094 switch (pVCpu->iem.s.enmEffOpSize)
7095 {
7096 case IEMMODE_16BIT:
7097 switch (pVCpu->iem.s.enmEffAddrMode)
7098 {
7099 case IEMMODE_16BIT:
7100 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7101 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7102 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7103 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7104 iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
7105 case IEMMODE_32BIT:
7106 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7107 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7108 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7109 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7110 iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
7111 case IEMMODE_64BIT:
7112 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7113 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7114 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7115 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7116 iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
7117 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7118 }
7119 break;
7120 case IEMMODE_32BIT:
7121 switch (pVCpu->iem.s.enmEffAddrMode)
7122 {
7123 case IEMMODE_16BIT:
7124 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7125 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7126 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7127 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7128 iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
7129 case IEMMODE_32BIT:
7130 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7131 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7132 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7133 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7134 iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
7135 case IEMMODE_64BIT:
7136 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7137 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7138 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7139 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7140 iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
7141 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7142 }
7143 case IEMMODE_64BIT:
7144 switch (pVCpu->iem.s.enmEffAddrMode)
7145 {
7146 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
7147 case IEMMODE_32BIT:
7148 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7149 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7150 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7151 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7152 iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
7153 case IEMMODE_64BIT:
7154 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7155 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7156 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7157 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7158 iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
7159 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7160 }
7161 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7162 }
7163 }
7164
7165 /*
7166 * Annoying double switch here.
7167 * Using ugly macro for implementing the cases, sharing it with cmpsb.
7168 */
7169 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
7170 switch (pVCpu->iem.s.enmEffOpSize)
7171 {
7172 case IEMMODE_16BIT:
7173 switch (pVCpu->iem.s.enmEffAddrMode)
7174 {
7175 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7176 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7177 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64, IEM_MC_F_64BIT); break;
7178 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7179 }
7180 break;
7181
7182 case IEMMODE_32BIT:
7183 switch (pVCpu->iem.s.enmEffAddrMode)
7184 {
7185 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7186 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7187 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64, IEM_MC_F_64BIT); break;
7188 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7189 }
7190 break;
7191
7192 case IEMMODE_64BIT:
7193 switch (pVCpu->iem.s.enmEffAddrMode)
7194 {
7195 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7196 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32, IEM_MC_F_MIN_386); break;
7197 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64, IEM_MC_F_64BIT); break;
7198 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7199 }
7200 break;
7201 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7202 }
7203}
7204
7205#undef IEM_CMPS_CASE
7206
7207/**
7208 * @opcode 0xa8
7209 * @opflclass logical
7210 */
7211FNIEMOP_DEF(iemOp_test_AL_Ib)
7212{
7213 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
7214 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7215 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_test_u8);
7216}
7217
7218
7219/**
7220 * @opcode 0xa9
7221 * @opflclass logical
7222 */
7223FNIEMOP_DEF(iemOp_test_eAX_Iz)
7224{
7225 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
7226 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7227 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, 0);
7228}
7229
7230
7231/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
7232#define IEM_STOS_CASE(ValBits, AddrBits, a_fMcFlags) \
7233 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
7234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7235 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7236 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7237 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
7238 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7239 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
7240 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7241 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7242 } IEM_MC_ELSE() { \
7243 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7244 } IEM_MC_ENDIF(); \
7245 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7246 IEM_MC_END() \
7247
7248/**
7249 * @opcode 0xaa
7250 */
7251FNIEMOP_DEF(iemOp_stosb_Yb_AL)
7252{
7253 /*
7254 * Use the C implementation if a repeat prefix is encountered.
7255 */
7256 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7257 {
7258 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
7259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7260 switch (pVCpu->iem.s.enmEffAddrMode)
7261 {
7262 case IEMMODE_16BIT:
7263 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7264 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7265 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7266 iemCImpl_stos_al_m16);
7267 case IEMMODE_32BIT:
7268 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7269 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7270 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7271 iemCImpl_stos_al_m32);
7272 case IEMMODE_64BIT:
7273 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7274 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7275 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7276 iemCImpl_stos_al_m64);
7277 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7278 }
7279 }
7280
7281 /*
7282 * Sharing case implementation with stos[wdq] below.
7283 */
7284 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
7285 switch (pVCpu->iem.s.enmEffAddrMode)
7286 {
7287 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7288 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7289 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64, IEM_MC_F_64BIT); break;
7290 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7291 }
7292}
7293
7294
7295/**
7296 * @opcode 0xab
7297 */
7298FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
7299{
7300 /*
7301 * Use the C implementation if a repeat prefix is encountered.
7302 */
7303 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7304 {
7305 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
7306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7307 switch (pVCpu->iem.s.enmEffOpSize)
7308 {
7309 case IEMMODE_16BIT:
7310 switch (pVCpu->iem.s.enmEffAddrMode)
7311 {
7312 case IEMMODE_16BIT:
7313 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7314 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7315 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7316 iemCImpl_stos_ax_m16);
7317 case IEMMODE_32BIT:
7318 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7319 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7320 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7321 iemCImpl_stos_ax_m32);
7322 case IEMMODE_64BIT:
7323 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7324 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7325 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7326 iemCImpl_stos_ax_m64);
7327 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7328 }
7329 break;
7330 case IEMMODE_32BIT:
7331 switch (pVCpu->iem.s.enmEffAddrMode)
7332 {
7333 case IEMMODE_16BIT:
7334 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7335 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7336 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7337 iemCImpl_stos_eax_m16);
7338 case IEMMODE_32BIT:
7339 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7340 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7341 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7342 iemCImpl_stos_eax_m32);
7343 case IEMMODE_64BIT:
7344 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7345 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7346 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7347 iemCImpl_stos_eax_m64);
7348 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7349 }
7350 case IEMMODE_64BIT:
7351 switch (pVCpu->iem.s.enmEffAddrMode)
7352 {
7353 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
7354 case IEMMODE_32BIT:
7355 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7356 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7357 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7358 iemCImpl_stos_rax_m32);
7359 case IEMMODE_64BIT:
7360 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7361 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7362 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7363 iemCImpl_stos_rax_m64);
7364 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7365 }
7366 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7367 }
7368 }
7369
7370 /*
7371 * Annoying double switch here.
7372 * Using ugly macro for implementing the cases, sharing it with stosb.
7373 */
7374 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
7375 switch (pVCpu->iem.s.enmEffOpSize)
7376 {
7377 case IEMMODE_16BIT:
7378 switch (pVCpu->iem.s.enmEffAddrMode)
7379 {
7380 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7381 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7382 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64, IEM_MC_F_64BIT); break;
7383 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7384 }
7385 break;
7386
7387 case IEMMODE_32BIT:
7388 switch (pVCpu->iem.s.enmEffAddrMode)
7389 {
7390 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7391 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7392 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64, IEM_MC_F_64BIT); break;
7393 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7394 }
7395 break;
7396
7397 case IEMMODE_64BIT:
7398 switch (pVCpu->iem.s.enmEffAddrMode)
7399 {
7400 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7401 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32, IEM_MC_F_64BIT); break;
7402 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64, IEM_MC_F_64BIT); break;
7403 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7404 }
7405 break;
7406 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7407 }
7408}
7409
7410#undef IEM_STOS_CASE
7411
7412/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
7413#define IEM_LODS_CASE(ValBits, AddrBits, a_fMcFlags) \
7414 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
7415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7416 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7417 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7418 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
7419 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
7420 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
7421 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7422 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7423 } IEM_MC_ELSE() { \
7424 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7425 } IEM_MC_ENDIF(); \
7426 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7427 IEM_MC_END() \
7428
7429/**
7430 * @opcode 0xac
7431 * @opfltest df
7432 */
7433FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
7434{
7435 /*
7436 * Use the C implementation if a repeat prefix is encountered.
7437 */
7438 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7439 {
7440 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
7441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7442 switch (pVCpu->iem.s.enmEffAddrMode)
7443 {
7444 case IEMMODE_16BIT:
7445 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7446 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7447 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7448 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7449 iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
7450 case IEMMODE_32BIT:
7451 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7452 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7453 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7454 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7455 iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
7456 case IEMMODE_64BIT:
7457 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7458 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7459 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7460 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7461 iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
7462 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7463 }
7464 }
7465
7466 /*
7467 * Sharing case implementation with stos[wdq] below.
7468 */
7469 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
7470 switch (pVCpu->iem.s.enmEffAddrMode)
7471 {
7472 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7473 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7474 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64, IEM_MC_F_64BIT); break;
7475 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7476 }
7477}
7478
7479
7480/**
7481 * @opcode 0xad
7482 * @opfltest df
7483 */
7484FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
7485{
7486 /*
7487 * Use the C implementation if a repeat prefix is encountered.
7488 */
7489 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7490 {
7491 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
7492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7493 switch (pVCpu->iem.s.enmEffOpSize)
7494 {
7495 case IEMMODE_16BIT:
7496 switch (pVCpu->iem.s.enmEffAddrMode)
7497 {
7498 case IEMMODE_16BIT:
7499 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7500 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7501 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7502 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7503 iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
7504 case IEMMODE_32BIT:
7505 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7506 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7507 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7508 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7509 iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
7510 case IEMMODE_64BIT:
7511 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7512 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7513 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7514 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7515 iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
7516 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7517 }
7518 break;
7519 case IEMMODE_32BIT:
7520 switch (pVCpu->iem.s.enmEffAddrMode)
7521 {
7522 case IEMMODE_16BIT:
7523 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7524 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7525 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7526 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7527 iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
7528 case IEMMODE_32BIT:
7529 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7530 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7531 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7532 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7533 iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
7534 case IEMMODE_64BIT:
7535 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7536 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7537 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7538 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7539 iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
7540 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7541 }
7542 case IEMMODE_64BIT:
7543 switch (pVCpu->iem.s.enmEffAddrMode)
7544 {
7545 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
7546 case IEMMODE_32BIT:
7547 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7548 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7549 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7550 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7551 iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
7552 case IEMMODE_64BIT:
7553 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7554 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7555 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7556 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7557 iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
7558 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7559 }
7560 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7561 }
7562 }
7563
7564 /*
7565 * Annoying double switch here.
7566 * Using ugly macro for implementing the cases, sharing it with lodsb.
7567 */
7568 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
7569 switch (pVCpu->iem.s.enmEffOpSize)
7570 {
7571 case IEMMODE_16BIT:
7572 switch (pVCpu->iem.s.enmEffAddrMode)
7573 {
7574 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7575 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7576 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64, IEM_MC_F_64BIT); break;
7577 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7578 }
7579 break;
7580
7581 case IEMMODE_32BIT:
7582 switch (pVCpu->iem.s.enmEffAddrMode)
7583 {
7584 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7585 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7586 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64, IEM_MC_F_64BIT); break;
7587 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7588 }
7589 break;
7590
7591 case IEMMODE_64BIT:
7592 switch (pVCpu->iem.s.enmEffAddrMode)
7593 {
7594 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7595 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32, IEM_MC_F_64BIT); break;
7596 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64, IEM_MC_F_64BIT); break;
7597 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7598 }
7599 break;
7600 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7601 }
7602}
7603
7604#undef IEM_LODS_CASE
7605
7606/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
7607#define IEM_SCAS_CASE(ValBits, AddrBits, a_fMcFlags) \
7608 IEM_MC_BEGIN(3, 2, a_fMcFlags, 0); \
7609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7610 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
7611 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
7612 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
7613 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7614 \
7615 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7616 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
7617 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
7618 IEM_MC_REF_EFLAGS(pEFlags); \
7619 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
7620 \
7621 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7622 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7623 } IEM_MC_ELSE() { \
7624 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7625 } IEM_MC_ENDIF(); \
7626 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7627 IEM_MC_END();
7628
7629/**
7630 * @opcode 0xae
7631 * @opflclass arithmetic
7632 * @opfltest df
7633 */
7634FNIEMOP_DEF(iemOp_scasb_AL_Xb)
7635{
7636 /*
7637 * Use the C implementation if a repeat prefix is encountered.
7638 */
7639 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7640 {
7641 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
7642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7643 switch (pVCpu->iem.s.enmEffAddrMode)
7644 {
7645 case IEMMODE_16BIT:
7646 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7647 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7648 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7649 iemCImpl_repe_scas_al_m16);
7650 case IEMMODE_32BIT:
7651 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7652 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7653 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7654 iemCImpl_repe_scas_al_m32);
7655 case IEMMODE_64BIT:
7656 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7657 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7658 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7659 iemCImpl_repe_scas_al_m64);
7660 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7661 }
7662 }
7663 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7664 {
7665 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
7666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7667 switch (pVCpu->iem.s.enmEffAddrMode)
7668 {
7669 case IEMMODE_16BIT:
7670 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7671 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7672 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7673 iemCImpl_repne_scas_al_m16);
7674 case IEMMODE_32BIT:
7675 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7676 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7677 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7678 iemCImpl_repne_scas_al_m32);
7679 case IEMMODE_64BIT:
7680 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7681 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7682 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7683 iemCImpl_repne_scas_al_m64);
7684 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7685 }
7686 }
7687
7688 /*
7689 * Sharing case implementation with stos[wdq] below.
7690 */
7691 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
7692 switch (pVCpu->iem.s.enmEffAddrMode)
7693 {
7694 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7695 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7696 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64, IEM_MC_F_64BIT); break;
7697 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7698 }
7699}
7700
7701
7702/**
7703 * @opcode 0xaf
7704 * @opflclass arithmetic
7705 * @opfltest df
7706 */
7707FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
7708{
7709 /*
7710 * Use the C implementation if a repeat prefix is encountered.
7711 */
7712 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7713 {
7714 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
7715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7716 switch (pVCpu->iem.s.enmEffOpSize)
7717 {
7718 case IEMMODE_16BIT:
7719 switch (pVCpu->iem.s.enmEffAddrMode)
7720 {
7721 case IEMMODE_16BIT:
7722 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7723 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7724 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7725 iemCImpl_repe_scas_ax_m16);
7726 case IEMMODE_32BIT:
7727 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7728 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7729 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7730 iemCImpl_repe_scas_ax_m32);
7731 case IEMMODE_64BIT:
7732 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7733 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7734 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7735 iemCImpl_repe_scas_ax_m64);
7736 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7737 }
7738 break;
7739 case IEMMODE_32BIT:
7740 switch (pVCpu->iem.s.enmEffAddrMode)
7741 {
7742 case IEMMODE_16BIT:
7743 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7744 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7745 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7746 iemCImpl_repe_scas_eax_m16);
7747 case IEMMODE_32BIT:
7748 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7749 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7750 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7751 iemCImpl_repe_scas_eax_m32);
7752 case IEMMODE_64BIT:
7753 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7754 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7755 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7756 iemCImpl_repe_scas_eax_m64);
7757 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7758 }
7759 case IEMMODE_64BIT:
7760 switch (pVCpu->iem.s.enmEffAddrMode)
7761 {
7762 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
7763 case IEMMODE_32BIT:
7764 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7765 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7766 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7767 iemCImpl_repe_scas_rax_m32);
7768 case IEMMODE_64BIT:
7769 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7770 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7771 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7772 iemCImpl_repe_scas_rax_m64);
7773 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7774 }
7775 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7776 }
7777 }
7778 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7779 {
7780 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
7781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7782 switch (pVCpu->iem.s.enmEffOpSize)
7783 {
7784 case IEMMODE_16BIT:
7785 switch (pVCpu->iem.s.enmEffAddrMode)
7786 {
7787 case IEMMODE_16BIT:
7788 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7789 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7790 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7791 iemCImpl_repne_scas_ax_m16);
7792 case IEMMODE_32BIT:
7793 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7794 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7795 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7796 iemCImpl_repne_scas_ax_m32);
7797 case IEMMODE_64BIT:
7798 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7799 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7800 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7801 iemCImpl_repne_scas_ax_m64);
7802 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7803 }
7804 break;
7805 case IEMMODE_32BIT:
7806 switch (pVCpu->iem.s.enmEffAddrMode)
7807 {
7808 case IEMMODE_16BIT:
7809 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7810 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7811 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7812 iemCImpl_repne_scas_eax_m16);
7813 case IEMMODE_32BIT:
7814 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7815 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7816 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7817 iemCImpl_repne_scas_eax_m32);
7818 case IEMMODE_64BIT:
7819 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7820 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7821 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7822 iemCImpl_repne_scas_eax_m64);
7823 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7824 }
7825 case IEMMODE_64BIT:
7826 switch (pVCpu->iem.s.enmEffAddrMode)
7827 {
7828 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
7829 case IEMMODE_32BIT:
7830 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7831 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7832 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7833 iemCImpl_repne_scas_rax_m32);
7834 case IEMMODE_64BIT:
7835 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7836 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7837 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7838 iemCImpl_repne_scas_rax_m64);
7839 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7840 }
7841 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7842 }
7843 }
7844
7845 /*
7846 * Annoying double switch here.
7847 * Using ugly macro for implementing the cases, sharing it with scasb.
7848 */
7849 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
7850 switch (pVCpu->iem.s.enmEffOpSize)
7851 {
7852 case IEMMODE_16BIT:
7853 switch (pVCpu->iem.s.enmEffAddrMode)
7854 {
7855 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7856 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7857 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64, IEM_MC_F_64BIT); break;
7858 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7859 }
7860 break;
7861
7862 case IEMMODE_32BIT:
7863 switch (pVCpu->iem.s.enmEffAddrMode)
7864 {
7865 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7866 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7867 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64, IEM_MC_F_64BIT); break;
7868 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7869 }
7870 break;
7871
7872 case IEMMODE_64BIT:
7873 switch (pVCpu->iem.s.enmEffAddrMode)
7874 {
7875 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7876 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32, IEM_MC_F_64BIT); break;
7877 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64, IEM_MC_F_64BIT); break;
7878 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7879 }
7880 break;
7881 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7882 }
7883}
7884
7885#undef IEM_SCAS_CASE
7886
7887/**
7888 * Common 'mov r8, imm8' helper.
7889 */
7890FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iFixedReg)
7891{
7892 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7893 IEM_MC_BEGIN(0, 0, 0, 0);
7894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7895 IEM_MC_STORE_GREG_U8_CONST(iFixedReg, u8Imm);
7896 IEM_MC_ADVANCE_RIP_AND_FINISH();
7897 IEM_MC_END();
7898}
7899
7900
7901/**
7902 * @opcode 0xb0
7903 */
7904FNIEMOP_DEF(iemOp_mov_AL_Ib)
7905{
7906 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
7907 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7908}
7909
7910
7911/**
7912 * @opcode 0xb1
7913 */
7914FNIEMOP_DEF(iemOp_CL_Ib)
7915{
7916 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
7917 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7918}
7919
7920
7921/**
7922 * @opcode 0xb2
7923 */
7924FNIEMOP_DEF(iemOp_DL_Ib)
7925{
7926 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
7927 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7928}
7929
7930
7931/**
7932 * @opcode 0xb3
7933 */
7934FNIEMOP_DEF(iemOp_BL_Ib)
7935{
7936 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
7937 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7938}
7939
7940
7941/**
7942 * @opcode 0xb4
7943 */
7944FNIEMOP_DEF(iemOp_mov_AH_Ib)
7945{
7946 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
7947 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7948}
7949
7950
7951/**
7952 * @opcode 0xb5
7953 */
7954FNIEMOP_DEF(iemOp_CH_Ib)
7955{
7956 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
7957 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7958}
7959
7960
7961/**
7962 * @opcode 0xb6
7963 */
7964FNIEMOP_DEF(iemOp_DH_Ib)
7965{
7966 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
7967 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7968}
7969
7970
7971/**
7972 * @opcode 0xb7
7973 */
7974FNIEMOP_DEF(iemOp_BH_Ib)
7975{
7976 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
7977 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
7978}
7979
7980
7981/**
7982 * Common 'mov regX,immX' helper.
7983 */
7984FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iFixedReg)
7985{
7986 switch (pVCpu->iem.s.enmEffOpSize)
7987 {
7988 case IEMMODE_16BIT:
7989 IEM_MC_BEGIN(0, 0, 0, 0);
7990 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
7991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7992 IEM_MC_STORE_GREG_U16_CONST(iFixedReg, u16Imm);
7993 IEM_MC_ADVANCE_RIP_AND_FINISH();
7994 IEM_MC_END();
7995 break;
7996
7997 case IEMMODE_32BIT:
7998 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
7999 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8001 IEM_MC_STORE_GREG_U32_CONST(iFixedReg, u32Imm);
8002 IEM_MC_ADVANCE_RIP_AND_FINISH();
8003 IEM_MC_END();
8004 break;
8005
8006 case IEMMODE_64BIT:
8007 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
8008 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
8009 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8010 IEM_MC_STORE_GREG_U64_CONST(iFixedReg, u64Imm);
8011 IEM_MC_ADVANCE_RIP_AND_FINISH();
8012 IEM_MC_END();
8013 break;
8014 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8015 }
8016}
8017
8018
8019/**
8020 * @opcode 0xb8
8021 */
8022FNIEMOP_DEF(iemOp_eAX_Iv)
8023{
8024 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
8025 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8026}
8027
8028
8029/**
8030 * @opcode 0xb9
8031 */
8032FNIEMOP_DEF(iemOp_eCX_Iv)
8033{
8034 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
8035 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8036}
8037
8038
8039/**
8040 * @opcode 0xba
8041 */
8042FNIEMOP_DEF(iemOp_eDX_Iv)
8043{
8044 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
8045 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8046}
8047
8048
8049/**
8050 * @opcode 0xbb
8051 */
8052FNIEMOP_DEF(iemOp_eBX_Iv)
8053{
8054 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
8055 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8056}
8057
8058
8059/**
8060 * @opcode 0xbc
8061 */
8062FNIEMOP_DEF(iemOp_eSP_Iv)
8063{
8064 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
8065 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8066}
8067
8068
8069/**
8070 * @opcode 0xbd
8071 */
8072FNIEMOP_DEF(iemOp_eBP_Iv)
8073{
8074 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
8075 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8076}
8077
8078
8079/**
8080 * @opcode 0xbe
8081 */
8082FNIEMOP_DEF(iemOp_eSI_Iv)
8083{
8084 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
8085 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8086}
8087
8088
8089/**
8090 * @opcode 0xbf
8091 */
8092FNIEMOP_DEF(iemOp_eDI_Iv)
8093{
8094 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
8095 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8096}
8097
8098
8099/**
8100 * @opcode 0xc0
8101 */
8102FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
8103{
8104 IEMOP_HLP_MIN_186();
8105 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8106
8107 /* Need to use a body macro here since the EFLAGS behaviour differs between
8108 the shifts, rotates and rotate w/ carry. Sigh. */
8109#define GRP2_BODY_Eb_Ib(a_pImplExpr) \
8110 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
8111 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8112 { \
8113 /* register */ \
8114 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8115 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0); \
8116 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8117 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
8118 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8119 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8120 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8121 IEM_MC_REF_EFLAGS(pEFlags); \
8122 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
8123 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8124 IEM_MC_END(); \
8125 } \
8126 else \
8127 { \
8128 /* memory */ \
8129 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_186, 0); \
8130 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8131 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8132 \
8133 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8135 \
8136 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8137 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
8138 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8139 \
8140 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8141 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8142 IEM_MC_FETCH_EFLAGS(EFlags); \
8143 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
8144 \
8145 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8146 IEM_MC_COMMIT_EFLAGS(EFlags); \
8147 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8148 IEM_MC_END(); \
8149 } (void)0
8150
8151 switch (IEM_GET_MODRM_REG_8(bRm))
8152 {
8153 /**
8154 * @opdone
8155 * @opmaps grp2_c0
8156 * @opcode /0
8157 * @opflclass rotate_count
8158 */
8159 case 0:
8160 {
8161 IEMOP_MNEMONIC2(MI, ROL, rol, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8162 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8163 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
8164 break;
8165 }
8166 /**
8167 * @opdone
8168 * @opmaps grp2_c0
8169 * @opcode /1
8170 * @opflclass rotate_count
8171 */
8172 case 1:
8173 {
8174 IEMOP_MNEMONIC2(MI, ROR, ror, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8175 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8176 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
8177 break;
8178 }
8179 /**
8180 * @opdone
8181 * @opmaps grp2_c0
8182 * @opcode /2
8183 * @opflclass rotate_carry_count
8184 */
8185 case 2:
8186 {
8187 IEMOP_MNEMONIC2(MI, RCL, rcl, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8188 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8189 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
8190 break;
8191 }
8192 /**
8193 * @opdone
8194 * @opmaps grp2_c0
8195 * @opcode /3
8196 * @opflclass rotate_carry_count
8197 */
8198 case 3:
8199 {
8200 IEMOP_MNEMONIC2(MI, RCR, rcr, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8201 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8202 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
8203 break;
8204 }
8205 /**
8206 * @opdone
8207 * @opmaps grp2_c0
8208 * @opcode /4
8209 * @opflclass shift_count
8210 */
8211 case 4:
8212 {
8213 IEMOP_MNEMONIC2(MI, SHL, shl, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8214 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8215 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
8216 break;
8217 }
8218 /**
8219 * @opdone
8220 * @opmaps grp2_c0
8221 * @opcode /5
8222 * @opflclass shift_count
8223 */
8224 case 5:
8225 {
8226 IEMOP_MNEMONIC2(MI, SHR, shr, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8227 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8228 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
8229 break;
8230 }
8231 /**
8232 * @opdone
8233 * @opmaps grp2_c0
8234 * @opcode /7
8235 * @opflclass shift_count
8236 */
8237 case 7:
8238 {
8239 IEMOP_MNEMONIC2(MI, SAR, sar, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8240 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8241 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
8242 break;
8243 }
8244
8245 /** @opdone */
8246 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8247 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8248 }
8249#undef GRP2_BODY_Eb_Ib
8250}
8251
8252
8253/**
8254 * @opcode 0xc1
8255 */
8256FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
8257{
8258 IEMOP_HLP_MIN_186();
8259 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8260
8261 /* Need to use a body macro here since the EFLAGS behaviour differs between
8262 the shifts, rotates and rotate w/ carry. Sigh. */
8263#define GRP2_BODY_Ev_Ib(a_pImplExpr) \
8264 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
8265 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8266 { \
8267 /* register */ \
8268 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8269 switch (pVCpu->iem.s.enmEffOpSize) \
8270 { \
8271 case IEMMODE_16BIT: \
8272 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0); \
8273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8274 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
8275 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8276 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8277 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8278 IEM_MC_REF_EFLAGS(pEFlags); \
8279 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
8280 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8281 IEM_MC_END(); \
8282 break; \
8283 \
8284 case IEMMODE_32BIT: \
8285 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
8286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8287 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
8288 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8289 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8290 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8291 IEM_MC_REF_EFLAGS(pEFlags); \
8292 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
8293 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
8294 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8295 IEM_MC_END(); \
8296 break; \
8297 \
8298 case IEMMODE_64BIT: \
8299 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
8300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8301 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
8302 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8303 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8304 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8305 IEM_MC_REF_EFLAGS(pEFlags); \
8306 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
8307 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8308 IEM_MC_END(); \
8309 break; \
8310 \
8311 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8312 } \
8313 } \
8314 else \
8315 { \
8316 /* memory */ \
8317 switch (pVCpu->iem.s.enmEffOpSize) \
8318 { \
8319 case IEMMODE_16BIT: \
8320 IEM_MC_BEGIN(3, 3, 0, 0); \
8321 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8322 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8323 \
8324 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8326 \
8327 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8328 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
8329 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8330 \
8331 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8332 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8333 IEM_MC_FETCH_EFLAGS(EFlags); \
8334 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
8335 \
8336 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8337 IEM_MC_COMMIT_EFLAGS(EFlags); \
8338 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8339 IEM_MC_END(); \
8340 break; \
8341 \
8342 case IEMMODE_32BIT: \
8343 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
8344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8345 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8346 \
8347 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8349 \
8350 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8351 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
8352 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8353 \
8354 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8355 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8356 IEM_MC_FETCH_EFLAGS(EFlags); \
8357 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
8358 \
8359 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8360 IEM_MC_COMMIT_EFLAGS(EFlags); \
8361 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8362 IEM_MC_END(); \
8363 break; \
8364 \
8365 case IEMMODE_64BIT: \
8366 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
8367 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8368 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8369 \
8370 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8372 \
8373 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8374 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
8375 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8376 \
8377 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8378 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8379 IEM_MC_FETCH_EFLAGS(EFlags); \
8380 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
8381 \
8382 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8383 IEM_MC_COMMIT_EFLAGS(EFlags); \
8384 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8385 IEM_MC_END(); \
8386 break; \
8387 \
8388 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8389 } \
8390 } (void)0
8391
8392 switch (IEM_GET_MODRM_REG_8(bRm))
8393 {
8394 /**
8395 * @opdone
8396 * @opmaps grp2_c1
8397 * @opcode /0
8398 * @opflclass rotate_count
8399 */
8400 case 0:
8401 {
8402 IEMOP_MNEMONIC2(MI, ROL, rol, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8403 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
8404 break;
8405 }
8406 /**
8407 * @opdone
8408 * @opmaps grp2_c1
8409 * @opcode /1
8410 * @opflclass rotate_count
8411 */
8412 case 1:
8413 {
8414 IEMOP_MNEMONIC2(MI, ROR, ror, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8415 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
8416 break;
8417 }
8418 /**
8419 * @opdone
8420 * @opmaps grp2_c1
8421 * @opcode /2
8422 * @opflclass rotate_carry_count
8423 */
8424 case 2:
8425 {
8426 IEMOP_MNEMONIC2(MI, RCL, rcl, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8427 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
8428 break;
8429 }
8430 /**
8431 * @opdone
8432 * @opmaps grp2_c1
8433 * @opcode /3
8434 * @opflclass rotate_carry_count
8435 */
8436 case 3:
8437 {
8438 IEMOP_MNEMONIC2(MI, RCR, rcr, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8439 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
8440 break;
8441 }
8442 /**
8443 * @opdone
8444 * @opmaps grp2_c1
8445 * @opcode /4
8446 * @opflclass shift_count
8447 */
8448 case 4:
8449 {
8450 IEMOP_MNEMONIC2(MI, SHL, shl, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8451 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
8452 break;
8453 }
8454 /**
8455 * @opdone
8456 * @opmaps grp2_c1
8457 * @opcode /5
8458 * @opflclass shift_count
8459 */
8460 case 5:
8461 {
8462 IEMOP_MNEMONIC2(MI, SHR, shr, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8463 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
8464 break;
8465 }
8466 /**
8467 * @opdone
8468 * @opmaps grp2_c1
8469 * @opcode /7
8470 * @opflclass shift_count
8471 */
8472 case 7:
8473 {
8474 IEMOP_MNEMONIC2(MI, SAR, sar, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8475 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
8476 break;
8477 }
8478
8479 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8480 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8481 }
8482#undef GRP2_BODY_Ev_Ib
8483}
8484
8485
8486/**
8487 * @opcode 0xc2
8488 */
8489FNIEMOP_DEF(iemOp_retn_Iw)
8490{
8491 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
8492 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8493 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8495 switch (pVCpu->iem.s.enmEffOpSize)
8496 {
8497 case IEMMODE_16BIT:
8498 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8499 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_iw_16, u16Imm);
8500 case IEMMODE_32BIT:
8501 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8502 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_iw_32, u16Imm);
8503 case IEMMODE_64BIT:
8504 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8505 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_iw_64, u16Imm);
8506 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8507 }
8508}
8509
8510
8511/**
8512 * @opcode 0xc3
8513 */
8514FNIEMOP_DEF(iemOp_retn)
8515{
8516 IEMOP_MNEMONIC(retn, "retn");
8517 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8519 switch (pVCpu->iem.s.enmEffOpSize)
8520 {
8521 case IEMMODE_16BIT:
8522 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8523 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_16);
8524 case IEMMODE_32BIT:
8525 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8526 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_32);
8527 case IEMMODE_64BIT:
8528 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8529 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_64);
8530 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8531 }
8532}
8533
8534
8535/**
8536 * @opcode 0xc4
8537 */
8538FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
8539{
8540 /* The LDS instruction is invalid 64-bit mode. In legacy and
8541 compatability mode it is invalid with MOD=3.
8542 The use as a VEX prefix is made possible by assigning the inverted
8543 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
8544 outside of 64-bit mode. VEX is not available in real or v86 mode. */
8545 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8546 if ( IEM_IS_64BIT_CODE(pVCpu)
8547 || IEM_IS_MODRM_REG_MODE(bRm) )
8548 {
8549 IEMOP_MNEMONIC(vex3_prefix, "vex3");
8550 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
8551 {
8552 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
8553 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
8554 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
8555 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
8556 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
8557 if ((bVex2 & 0x80 /* VEX.W */) && IEM_IS_64BIT_CODE(pVCpu))
8558 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
8559 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
8560 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
8561 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
8562 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
8563 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
8564 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
8565
8566 switch (bRm & 0x1f)
8567 {
8568 case 1: /* 0x0f lead opcode byte. */
8569#ifdef IEM_WITH_VEX
8570 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8571#else
8572 IEMOP_BITCH_ABOUT_STUB();
8573 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8574#endif
8575
8576 case 2: /* 0x0f 0x38 lead opcode bytes. */
8577#ifdef IEM_WITH_VEX
8578 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8579#else
8580 IEMOP_BITCH_ABOUT_STUB();
8581 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8582#endif
8583
8584 case 3: /* 0x0f 0x3a lead opcode bytes. */
8585#ifdef IEM_WITH_VEX
8586 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8587#else
8588 IEMOP_BITCH_ABOUT_STUB();
8589 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8590#endif
8591
8592 default:
8593 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
8594 IEMOP_RAISE_INVALID_OPCODE_RET();
8595 }
8596 }
8597 Log(("VEX3: VEX support disabled!\n"));
8598 IEMOP_RAISE_INVALID_OPCODE_RET();
8599 }
8600
8601 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
8602 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
8603}
8604
8605
8606/**
8607 * @opcode 0xc5
8608 */
8609FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
8610{
8611 /* The LES instruction is invalid 64-bit mode. In legacy and
8612 compatability mode it is invalid with MOD=3.
8613 The use as a VEX prefix is made possible by assigning the inverted
8614 REX.R to the top MOD bit, and the top bit in the inverted register
8615 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
8616 to accessing registers 0..7 in this VEX form. */
8617 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8618 if ( IEM_IS_64BIT_CODE(pVCpu)
8619 || IEM_IS_MODRM_REG_MODE(bRm))
8620 {
8621 IEMOP_MNEMONIC(vex2_prefix, "vex2");
8622 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
8623 {
8624 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
8625 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
8626 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
8627 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
8628 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
8629 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
8630 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
8631 pVCpu->iem.s.idxPrefix = bRm & 0x3;
8632
8633#ifdef IEM_WITH_VEX
8634 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8635#else
8636 IEMOP_BITCH_ABOUT_STUB();
8637 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8638#endif
8639 }
8640
8641 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
8642 Log(("VEX2: VEX support disabled!\n"));
8643 IEMOP_RAISE_INVALID_OPCODE_RET();
8644 }
8645
8646 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
8647 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
8648}
8649
8650
8651/**
8652 * @opcode 0xc6
8653 */
8654FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
8655{
8656 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8657 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
8658 IEMOP_RAISE_INVALID_OPCODE_RET();
8659 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
8660
8661 if (IEM_IS_MODRM_REG_MODE(bRm))
8662 {
8663 /* register access */
8664 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8665 IEM_MC_BEGIN(0, 0, 0, 0);
8666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8667 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u8Imm);
8668 IEM_MC_ADVANCE_RIP_AND_FINISH();
8669 IEM_MC_END();
8670 }
8671 else
8672 {
8673 /* memory access. */
8674 IEM_MC_BEGIN(0, 1, 0, 0);
8675 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8677 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8679 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
8680 IEM_MC_ADVANCE_RIP_AND_FINISH();
8681 IEM_MC_END();
8682 }
8683}
8684
8685
8686/**
8687 * @opcode 0xc7
8688 */
8689FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
8690{
8691 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8692 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Iz in this group. */
8693 IEMOP_RAISE_INVALID_OPCODE_RET();
8694 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
8695
8696 if (IEM_IS_MODRM_REG_MODE(bRm))
8697 {
8698 /* register access */
8699 switch (pVCpu->iem.s.enmEffOpSize)
8700 {
8701 case IEMMODE_16BIT:
8702 IEM_MC_BEGIN(0, 0, 0, 0);
8703 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8705 IEM_MC_STORE_GREG_U16_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u16Imm);
8706 IEM_MC_ADVANCE_RIP_AND_FINISH();
8707 IEM_MC_END();
8708 break;
8709
8710 case IEMMODE_32BIT:
8711 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8712 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8714 IEM_MC_STORE_GREG_U32_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u32Imm);
8715 IEM_MC_ADVANCE_RIP_AND_FINISH();
8716 IEM_MC_END();
8717 break;
8718
8719 case IEMMODE_64BIT:
8720 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
8721 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8722 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8723 IEM_MC_STORE_GREG_U64_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u64Imm);
8724 IEM_MC_ADVANCE_RIP_AND_FINISH();
8725 IEM_MC_END();
8726 break;
8727
8728 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8729 }
8730 }
8731 else
8732 {
8733 /* memory access. */
8734 switch (pVCpu->iem.s.enmEffOpSize)
8735 {
8736 case IEMMODE_16BIT:
8737 IEM_MC_BEGIN(0, 1, 0, 0);
8738 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8739 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
8740 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8742 IEM_MC_STORE_MEM_U16_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
8743 IEM_MC_ADVANCE_RIP_AND_FINISH();
8744 IEM_MC_END();
8745 break;
8746
8747 case IEMMODE_32BIT:
8748 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8749 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8750 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8751 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8753 IEM_MC_STORE_MEM_U32_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
8754 IEM_MC_ADVANCE_RIP_AND_FINISH();
8755 IEM_MC_END();
8756 break;
8757
8758 case IEMMODE_64BIT:
8759 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
8760 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8761 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8762 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8764 IEM_MC_STORE_MEM_U64_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
8765 IEM_MC_ADVANCE_RIP_AND_FINISH();
8766 IEM_MC_END();
8767 break;
8768
8769 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8770 }
8771 }
8772}
8773
8774
8775
8776
8777/**
8778 * @opcode 0xc8
8779 */
8780FNIEMOP_DEF(iemOp_enter_Iw_Ib)
8781{
8782 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
8783 IEMOP_HLP_MIN_186();
8784 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8785 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
8786 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
8787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8788 IEM_MC_DEFER_TO_CIMPL_3_RET(0,
8789 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8790 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP),
8791 iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
8792}
8793
8794
8795/**
8796 * @opcode 0xc9
8797 */
8798FNIEMOP_DEF(iemOp_leave)
8799{
8800 IEMOP_MNEMONIC(leave, "leave");
8801 IEMOP_HLP_MIN_186();
8802 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8804 IEM_MC_DEFER_TO_CIMPL_1_RET(0,
8805 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8806 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP),
8807 iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
8808}
8809
8810
8811/**
8812 * @opcode 0xca
8813 */
8814FNIEMOP_DEF(iemOp_retf_Iw)
8815{
8816 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
8817 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8819 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
8820 | IEM_CIMPL_F_MODE,
8821 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8822 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
8823 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
8824 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8825 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
8826 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
8827 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
8828 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8829 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
8830 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
8831 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
8832 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8833 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
8834 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
8835 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
8836 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
8837 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
8838 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
8839}
8840
8841
8842/**
8843 * @opcode 0xcb
8844 */
8845FNIEMOP_DEF(iemOp_retf)
8846{
8847 IEMOP_MNEMONIC(retf, "retf");
8848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8849 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
8850 | IEM_CIMPL_F_MODE,
8851 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8852 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
8853 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
8854 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8855 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
8856 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
8857 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
8858 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8859 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
8860 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
8861 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
8862 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8863 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
8864 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
8865 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
8866 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
8867 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
8868 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
8869}
8870
8871
8872/**
8873 * @opcode 0xcc
8874 */
8875FNIEMOP_DEF(iemOp_int3)
8876{
8877 IEMOP_MNEMONIC(int3, "int3");
8878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8879 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8880 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
8881 iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
8882}
8883
8884
8885/**
8886 * @opcode 0xcd
8887 */
8888FNIEMOP_DEF(iemOp_int_Ib)
8889{
8890 IEMOP_MNEMONIC(int_Ib, "int Ib");
8891 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
8892 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8893 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8894 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS, UINT64_MAX,
8895 iemCImpl_int, u8Int, IEMINT_INTN);
8896 /** @todo make task-switches, ring-switches, ++ return non-zero status */
8897}
8898
8899
8900/**
8901 * @opcode 0xce
8902 */
8903FNIEMOP_DEF(iemOp_into)
8904{
8905 IEMOP_MNEMONIC(into, "into");
8906 IEMOP_HLP_NO_64BIT();
8907 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8908 | IEM_CIMPL_F_BRANCH_CONDITIONAL | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
8909 UINT64_MAX,
8910 iemCImpl_int, X86_XCPT_OF, IEMINT_INTO);
8911 /** @todo make task-switches, ring-switches, ++ return non-zero status */
8912}
8913
8914
8915/**
8916 * @opcode 0xcf
8917 */
8918FNIEMOP_DEF(iemOp_iret)
8919{
8920 IEMOP_MNEMONIC(iret, "iret");
8921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8922 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8923 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE | IEM_CIMPL_F_VMEXIT,
8924 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8925 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
8926 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
8927 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
8928 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
8929 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
8930 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
8931 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
8932 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
8933 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8934 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8935 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8936 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
8937 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
8938 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
8939 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
8940 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
8941 iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
8942 /* Segment registers are sanitized when returning to an outer ring, or fully
8943 reloaded when returning to v86 mode. Thus the large flush list above. */
8944}
8945
8946
8947/**
8948 * @opcode 0xd0
8949 */
8950FNIEMOP_DEF(iemOp_Grp2_Eb_1)
8951{
8952 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8953
8954 /* Need to use a body macro here since the EFLAGS behaviour differs between
8955 the shifts, rotates and rotate w/ carry. Sigh. */
8956#define GRP2_BODY_Eb_1(a_pImplExpr) \
8957 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
8958 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8959 { \
8960 /* register */ \
8961 IEM_MC_BEGIN(3, 0, 0, 0); \
8962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8963 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
8964 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1); \
8965 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8966 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8967 IEM_MC_REF_EFLAGS(pEFlags); \
8968 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
8969 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8970 IEM_MC_END(); \
8971 } \
8972 else \
8973 { \
8974 /* memory */ \
8975 IEM_MC_BEGIN(3, 3, 0, 0); \
8976 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
8977 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1); \
8978 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
8979 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8980 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8981 \
8982 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
8983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8984 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8985 IEM_MC_FETCH_EFLAGS(EFlags); \
8986 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
8987 \
8988 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8989 IEM_MC_COMMIT_EFLAGS(EFlags); \
8990 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8991 IEM_MC_END(); \
8992 } (void)0
8993
8994 switch (IEM_GET_MODRM_REG_8(bRm))
8995 {
8996 /**
8997 * @opdone
8998 * @opmaps grp2_d0
8999 * @opcode /0
9000 * @opflclass rotate_1
9001 */
9002 case 0:
9003 {
9004 IEMOP_MNEMONIC2(M1, ROL, rol, Eb, 1, DISOPTYPE_HARMLESS, 0);
9005 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9006 break;
9007 }
9008 /**
9009 * @opdone
9010 * @opmaps grp2_d0
9011 * @opcode /1
9012 * @opflclass rotate_1
9013 */
9014 case 1:
9015 {
9016 IEMOP_MNEMONIC2(M1, ROR, ror, Eb, 1, DISOPTYPE_HARMLESS, 0);
9017 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9018 break;
9019 }
9020 /**
9021 * @opdone
9022 * @opmaps grp2_d0
9023 * @opcode /2
9024 * @opflclass rotate_carry_1
9025 */
9026 case 2:
9027 {
9028 IEMOP_MNEMONIC2(M1, RCL, rcl, Eb, 1, DISOPTYPE_HARMLESS, 0);
9029 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9030 break;
9031 }
9032 /**
9033 * @opdone
9034 * @opmaps grp2_d0
9035 * @opcode /3
9036 * @opflclass rotate_carry_1
9037 */
9038 case 3:
9039 {
9040 IEMOP_MNEMONIC2(M1, RCR, rcr, Eb, 1, DISOPTYPE_HARMLESS, 0);
9041 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9042 break;
9043 }
9044 /**
9045 * @opdone
9046 * @opmaps grp2_d0
9047 * @opcode /4
9048 * @opflclass shift_1
9049 */
9050 case 4:
9051 {
9052 IEMOP_MNEMONIC2(M1, SHL, shl, Eb, 1, DISOPTYPE_HARMLESS, 0);
9053 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9054 break;
9055 }
9056 /**
9057 * @opdone
9058 * @opmaps grp2_d0
9059 * @opcode /5
9060 * @opflclass shift_1
9061 */
9062 case 5:
9063 {
9064 IEMOP_MNEMONIC2(M1, SHR, shr, Eb, 1, DISOPTYPE_HARMLESS, 0);
9065 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9066 break;
9067 }
9068 /**
9069 * @opdone
9070 * @opmaps grp2_d0
9071 * @opcode /7
9072 * @opflclass shift_1
9073 */
9074 case 7:
9075 {
9076 IEMOP_MNEMONIC2(M1, SAR, sar, Eb, 1, DISOPTYPE_HARMLESS, 0);
9077 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9078 break;
9079 }
9080 /** @opdone */
9081 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9082 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
9083 }
9084#undef GRP2_BODY_Eb_1
9085}
9086
9087
9088
9089/**
9090 * @opcode 0xd1
9091 */
9092FNIEMOP_DEF(iemOp_Grp2_Ev_1)
9093{
9094 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9095
9096 /* Need to use a body macro here since the EFLAGS behaviour differs between
9097 the shifts, rotates and rotate w/ carry. Sigh. */
9098#define GRP2_BODY_Ev_1(a_pImplExpr) \
9099 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
9100 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9101 { \
9102 /* register */ \
9103 switch (pVCpu->iem.s.enmEffOpSize) \
9104 { \
9105 case IEMMODE_16BIT: \
9106 IEM_MC_BEGIN(3, 0, 0, 0); \
9107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9108 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9109 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9110 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9111 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9112 IEM_MC_REF_EFLAGS(pEFlags); \
9113 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
9114 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9115 IEM_MC_END(); \
9116 break; \
9117 \
9118 case IEMMODE_32BIT: \
9119 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
9120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9121 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9122 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9123 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9124 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9125 IEM_MC_REF_EFLAGS(pEFlags); \
9126 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
9127 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9128 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9129 IEM_MC_END(); \
9130 break; \
9131 \
9132 case IEMMODE_64BIT: \
9133 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
9134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9135 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9136 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9137 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9138 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9139 IEM_MC_REF_EFLAGS(pEFlags); \
9140 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
9141 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9142 IEM_MC_END(); \
9143 break; \
9144 \
9145 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9146 } \
9147 } \
9148 else \
9149 { \
9150 /* memory */ \
9151 switch (pVCpu->iem.s.enmEffOpSize) \
9152 { \
9153 case IEMMODE_16BIT: \
9154 IEM_MC_BEGIN(3, 3, 0, 0); \
9155 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9156 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9157 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9158 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9159 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9160 \
9161 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9163 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9164 IEM_MC_FETCH_EFLAGS(EFlags); \
9165 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
9166 \
9167 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9168 IEM_MC_COMMIT_EFLAGS(EFlags); \
9169 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9170 IEM_MC_END(); \
9171 break; \
9172 \
9173 case IEMMODE_32BIT: \
9174 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
9175 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9176 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9177 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9178 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9179 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9180 \
9181 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9183 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9184 IEM_MC_FETCH_EFLAGS(EFlags); \
9185 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
9186 \
9187 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9188 IEM_MC_COMMIT_EFLAGS(EFlags); \
9189 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9190 IEM_MC_END(); \
9191 break; \
9192 \
9193 case IEMMODE_64BIT: \
9194 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
9195 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9196 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9197 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9198 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9199 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9200 \
9201 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9203 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9204 IEM_MC_FETCH_EFLAGS(EFlags); \
9205 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
9206 \
9207 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9208 IEM_MC_COMMIT_EFLAGS(EFlags); \
9209 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9210 IEM_MC_END(); \
9211 break; \
9212 \
9213 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9214 } \
9215 } (void)0
9216
9217 switch (IEM_GET_MODRM_REG_8(bRm))
9218 {
9219 /**
9220 * @opdone
9221 * @opmaps grp2_d1
9222 * @opcode /0
9223 * @opflclass rotate_1
9224 */
9225 case 0:
9226 {
9227 IEMOP_MNEMONIC2(M1, ROL, rol, Ev, 1, DISOPTYPE_HARMLESS, 0);
9228 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9229 break;
9230 }
9231 /**
9232 * @opdone
9233 * @opmaps grp2_d1
9234 * @opcode /1
9235 * @opflclass rotate_1
9236 */
9237 case 1:
9238 {
9239 IEMOP_MNEMONIC2(M1, ROR, ror, Ev, 1, DISOPTYPE_HARMLESS, 0);
9240 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9241 break;
9242 }
9243 /**
9244 * @opdone
9245 * @opmaps grp2_d1
9246 * @opcode /2
9247 * @opflclass rotate_carry_1
9248 */
9249 case 2:
9250 {
9251 IEMOP_MNEMONIC2(M1, RCL, rcl, Ev, 1, DISOPTYPE_HARMLESS, 0);
9252 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9253 break;
9254 }
9255 /**
9256 * @opdone
9257 * @opmaps grp2_d1
9258 * @opcode /3
9259 * @opflclass rotate_carry_1
9260 */
9261 case 3:
9262 {
9263 IEMOP_MNEMONIC2(M1, RCR, rcr, Ev, 1, DISOPTYPE_HARMLESS, 0);
9264 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9265 break;
9266 }
9267 /**
9268 * @opdone
9269 * @opmaps grp2_d1
9270 * @opcode /4
9271 * @opflclass shift_1
9272 */
9273 case 4:
9274 {
9275 IEMOP_MNEMONIC2(M1, SHL, shl, Ev, 1, DISOPTYPE_HARMLESS, 0);
9276 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9277 break;
9278 }
9279 /**
9280 * @opdone
9281 * @opmaps grp2_d1
9282 * @opcode /5
9283 * @opflclass shift_1
9284 */
9285 case 5:
9286 {
9287 IEMOP_MNEMONIC2(M1, SHR, shr, Ev, 1, DISOPTYPE_HARMLESS, 0);
9288 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9289 break;
9290 }
9291 /**
9292 * @opdone
9293 * @opmaps grp2_d1
9294 * @opcode /7
9295 * @opflclass shift_1
9296 */
9297 case 7:
9298 {
9299 IEMOP_MNEMONIC2(M1, SAR, sar, Ev, 1, DISOPTYPE_HARMLESS, 0);
9300 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9301 break;
9302 }
9303 /** @opdone */
9304 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9305 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
9306 }
9307#undef GRP2_BODY_Ev_1
9308}
9309
9310
9311/**
9312 * @opcode 0xd2
9313 */
9314FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
9315{
9316 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9317
9318 /* Need to use a body macro here since the EFLAGS behaviour differs between
9319 the shifts, rotates and rotate w/ carry. Sigh. */
9320#define GRP2_BODY_Eb_CL(a_pImplExpr) \
9321 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
9322 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9323 { \
9324 /* register */ \
9325 IEM_MC_BEGIN(3, 0, 0, 0); \
9326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9327 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
9328 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9329 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9330 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9331 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9332 IEM_MC_REF_EFLAGS(pEFlags); \
9333 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
9334 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9335 IEM_MC_END(); \
9336 } \
9337 else \
9338 { \
9339 /* memory */ \
9340 IEM_MC_BEGIN(3, 3, 0, 0); \
9341 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
9342 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9343 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9345 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9346 \
9347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9349 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9350 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9351 IEM_MC_FETCH_EFLAGS(EFlags); \
9352 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
9353 \
9354 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9355 IEM_MC_COMMIT_EFLAGS(EFlags); \
9356 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9357 IEM_MC_END(); \
9358 } (void)0
9359
9360 switch (IEM_GET_MODRM_REG_8(bRm))
9361 {
9362 /**
9363 * @opdone
9364 * @opmaps grp2_d0
9365 * @opcode /0
9366 * @opflclass rotate_count
9367 */
9368 case 0:
9369 {
9370 IEMOP_MNEMONIC2EX(rol_Eb_CL, "rol Eb,CL", M_CL, ROL, rol, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9371 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9372 break;
9373 }
9374 /**
9375 * @opdone
9376 * @opmaps grp2_d0
9377 * @opcode /1
9378 * @opflclass rotate_count
9379 */
9380 case 1:
9381 {
9382 IEMOP_MNEMONIC2EX(ror_Eb_CL, "ror Eb,CL", M_CL, ROR, ror, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9383 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9384 break;
9385 }
9386 /**
9387 * @opdone
9388 * @opmaps grp2_d0
9389 * @opcode /2
9390 * @opflclass rotate_carry_count
9391 */
9392 case 2:
9393 {
9394 IEMOP_MNEMONIC2EX(rcl_Eb_CL, "rcl Eb,CL", M_CL, RCL, rcl, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9395 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9396 break;
9397 }
9398 /**
9399 * @opdone
9400 * @opmaps grp2_d0
9401 * @opcode /3
9402 * @opflclass rotate_carry_count
9403 */
9404 case 3:
9405 {
9406 IEMOP_MNEMONIC2EX(rcr_Eb_CL, "rcr Eb,CL", M_CL, RCR, rcr, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9407 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9408 break;
9409 }
9410 /**
9411 * @opdone
9412 * @opmaps grp2_d0
9413 * @opcode /4
9414 * @opflclass shift_count
9415 */
9416 case 4:
9417 {
9418 IEMOP_MNEMONIC2EX(shl_Eb_CL, "shl Eb,CL", M_CL, SHL, shl, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9419 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9420 break;
9421 }
9422 /**
9423 * @opdone
9424 * @opmaps grp2_d0
9425 * @opcode /5
9426 * @opflclass shift_count
9427 */
9428 case 5:
9429 {
9430 IEMOP_MNEMONIC2EX(shr_Eb_CL, "shr Eb,CL", M_CL, SHR, shr, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9431 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9432 break;
9433 }
9434 /**
9435 * @opdone
9436 * @opmaps grp2_d0
9437 * @opcode /7
9438 * @opflclass shift_count
9439 */
9440 case 7:
9441 {
9442 IEMOP_MNEMONIC2EX(sar_Eb_CL, "sar Eb,CL", M_CL, SAR, sar, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9443 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9444 break;
9445 }
9446 /** @opdone */
9447 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9448 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
9449 }
9450#undef GRP2_BODY_Eb_CL
9451}
9452
9453
9454/**
9455 * @opcode 0xd3
9456 */
9457FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
9458{
9459 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9460
9461 /* Need to use a body macro here since the EFLAGS behaviour differs between
9462 the shifts, rotates and rotate w/ carry. Sigh. */
9463#define GRP2_BODY_Ev_CL(a_pImplExpr) \
9464 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
9465 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9466 { \
9467 /* register */ \
9468 switch (pVCpu->iem.s.enmEffOpSize) \
9469 { \
9470 case IEMMODE_16BIT: \
9471 IEM_MC_BEGIN(3, 0, 0, 0); \
9472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9473 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9474 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9475 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9476 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9477 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9478 IEM_MC_REF_EFLAGS(pEFlags); \
9479 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
9480 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9481 IEM_MC_END(); \
9482 break; \
9483 \
9484 case IEMMODE_32BIT: \
9485 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
9486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9487 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9488 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9489 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9490 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9491 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9492 IEM_MC_REF_EFLAGS(pEFlags); \
9493 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
9494 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9495 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9496 IEM_MC_END(); \
9497 break; \
9498 \
9499 case IEMMODE_64BIT: \
9500 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
9501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9502 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9503 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9504 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9505 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9506 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9507 IEM_MC_REF_EFLAGS(pEFlags); \
9508 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
9509 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9510 IEM_MC_END(); \
9511 break; \
9512 \
9513 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9514 } \
9515 } \
9516 else \
9517 { \
9518 /* memory */ \
9519 switch (pVCpu->iem.s.enmEffOpSize) \
9520 { \
9521 case IEMMODE_16BIT: \
9522 IEM_MC_BEGIN(3, 3, 0, 0); \
9523 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9524 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9525 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9526 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9527 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9528 \
9529 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9531 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9532 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9533 IEM_MC_FETCH_EFLAGS(EFlags); \
9534 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
9535 \
9536 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9537 IEM_MC_COMMIT_EFLAGS(EFlags); \
9538 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9539 IEM_MC_END(); \
9540 break; \
9541 \
9542 case IEMMODE_32BIT: \
9543 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
9544 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9545 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9546 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9547 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9548 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9549 \
9550 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9552 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9553 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9554 IEM_MC_FETCH_EFLAGS(EFlags); \
9555 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
9556 \
9557 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9558 IEM_MC_COMMIT_EFLAGS(EFlags); \
9559 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9560 IEM_MC_END(); \
9561 break; \
9562 \
9563 case IEMMODE_64BIT: \
9564 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
9565 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9566 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9567 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9568 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9569 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9570 \
9571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9573 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9574 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9575 IEM_MC_FETCH_EFLAGS(EFlags); \
9576 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
9577 \
9578 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9579 IEM_MC_COMMIT_EFLAGS(EFlags); \
9580 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9581 IEM_MC_END(); \
9582 break; \
9583 \
9584 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9585 } \
9586 } (void)0
9587 switch (IEM_GET_MODRM_REG_8(bRm))
9588 {
9589 /**
9590 * @opdone
9591 * @opmaps grp2_d0
9592 * @opcode /0
9593 * @opflclass rotate_count
9594 */
9595 case 0:
9596 {
9597 IEMOP_MNEMONIC2EX(rol_Ev_CL, "rol Ev,CL", M_CL, ROL, rol, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9598 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9599 break;
9600 }
9601 /**
9602 * @opdone
9603 * @opmaps grp2_d0
9604 * @opcode /1
9605 * @opflclass rotate_count
9606 */
9607 case 1:
9608 {
9609 IEMOP_MNEMONIC2EX(ror_Ev_CL, "ror Ev,CL", M_CL, ROR, ror, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9610 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9611 break;
9612 }
9613 /**
9614 * @opdone
9615 * @opmaps grp2_d0
9616 * @opcode /2
9617 * @opflclass rotate_carry_count
9618 */
9619 case 2:
9620 {
9621 IEMOP_MNEMONIC2EX(rcl_Ev_CL, "rcl Ev,CL", M_CL, RCL, rcl, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9622 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9623 break;
9624 }
9625 /**
9626 * @opdone
9627 * @opmaps grp2_d0
9628 * @opcode /3
9629 * @opflclass rotate_carry_count
9630 */
9631 case 3:
9632 {
9633 IEMOP_MNEMONIC2EX(rcr_Ev_CL, "rcr Ev,CL", M_CL, RCR, rcr, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9634 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9635 break;
9636 }
9637 /**
9638 * @opdone
9639 * @opmaps grp2_d0
9640 * @opcode /4
9641 * @opflclass shift_count
9642 */
9643 case 4:
9644 {
9645 IEMOP_MNEMONIC2EX(shl_Ev_CL, "shl Ev,CL", M_CL, SHL, shl, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9646 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9647 break;
9648 }
9649 /**
9650 * @opdone
9651 * @opmaps grp2_d0
9652 * @opcode /5
9653 * @opflclass shift_count
9654 */
9655 case 5:
9656 {
9657 IEMOP_MNEMONIC2EX(shr_Ev_CL, "shr Ev,CL", M_CL, SHR, shr, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9658 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9659 break;
9660 }
9661 /**
9662 * @opdone
9663 * @opmaps grp2_d0
9664 * @opcode /7
9665 * @opflclass shift_count
9666 */
9667 case 7:
9668 {
9669 IEMOP_MNEMONIC2EX(sar_Ev_CL, "sar Ev,CL", M_CL, SAR, sar, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9670 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9671 break;
9672 }
9673 /** @opdone */
9674 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9675 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
9676 }
9677#undef GRP2_BODY_Ev_CL
9678}
9679
9680
9681/**
9682 * @opcode 0xd4
9683 * @opflmodify cf,pf,af,zf,sf,of
9684 * @opflundef cf,af,of
9685 */
9686FNIEMOP_DEF(iemOp_aam_Ib)
9687{
9688/** @todo testcase: aam */
9689 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
9690 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
9691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9692 IEMOP_HLP_NO_64BIT();
9693 if (!bImm)
9694 IEMOP_RAISE_DIVIDE_ERROR_RET();
9695 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aam, bImm);
9696}
9697
9698
9699/**
9700 * @opcode 0xd5
9701 * @opflmodify cf,pf,af,zf,sf,of
9702 * @opflundef cf,af,of
9703 */
9704FNIEMOP_DEF(iemOp_aad_Ib)
9705{
9706/** @todo testcase: aad? */
9707 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
9708 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
9709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9710 IEMOP_HLP_NO_64BIT();
9711 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aad, bImm);
9712}
9713
9714
9715/**
9716 * @opcode 0xd6
9717 */
9718FNIEMOP_DEF(iemOp_salc)
9719{
9720 IEMOP_MNEMONIC(salc, "salc");
9721 IEMOP_HLP_NO_64BIT();
9722
9723 IEM_MC_BEGIN(0, 0, 0, 0);
9724 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9725 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9726 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
9727 } IEM_MC_ELSE() {
9728 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
9729 } IEM_MC_ENDIF();
9730 IEM_MC_ADVANCE_RIP_AND_FINISH();
9731 IEM_MC_END();
9732}
9733
9734
9735/**
9736 * @opcode 0xd7
9737 */
9738FNIEMOP_DEF(iemOp_xlat)
9739{
9740 IEMOP_MNEMONIC(xlat, "xlat");
9741 switch (pVCpu->iem.s.enmEffAddrMode)
9742 {
9743 case IEMMODE_16BIT:
9744 IEM_MC_BEGIN(2, 0, 0, 0);
9745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9746 IEM_MC_LOCAL(uint8_t, u8Tmp);
9747 IEM_MC_LOCAL(uint16_t, u16Addr);
9748 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
9749 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
9750 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
9751 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9752 IEM_MC_ADVANCE_RIP_AND_FINISH();
9753 IEM_MC_END();
9754 break;
9755
9756 case IEMMODE_32BIT:
9757 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0);
9758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9759 IEM_MC_LOCAL(uint8_t, u8Tmp);
9760 IEM_MC_LOCAL(uint32_t, u32Addr);
9761 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
9762 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
9763 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
9764 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9765 IEM_MC_ADVANCE_RIP_AND_FINISH();
9766 IEM_MC_END();
9767 break;
9768
9769 case IEMMODE_64BIT:
9770 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
9771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9772 IEM_MC_LOCAL(uint8_t, u8Tmp);
9773 IEM_MC_LOCAL(uint64_t, u64Addr);
9774 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
9775 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
9776 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
9777 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9778 IEM_MC_ADVANCE_RIP_AND_FINISH();
9779 IEM_MC_END();
9780 break;
9781
9782 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9783 }
9784}
9785
9786
9787/**
9788 * Common worker for FPU instructions working on ST0 and STn, and storing the
9789 * result in ST0.
9790 *
9791 * @param bRm Mod R/M byte.
9792 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9793 */
9794FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
9795{
9796 IEM_MC_BEGIN(3, 1, 0, 0);
9797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9798 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9799 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9800 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9801 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9802
9803 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9804 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9805 IEM_MC_PREPARE_FPU_USAGE();
9806 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9807 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
9808 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9809 } IEM_MC_ELSE() {
9810 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9811 } IEM_MC_ENDIF();
9812 IEM_MC_ADVANCE_RIP_AND_FINISH();
9813
9814 IEM_MC_END();
9815}
9816
9817
9818/**
9819 * Common worker for FPU instructions working on ST0 and STn, and only affecting
9820 * flags.
9821 *
9822 * @param bRm Mod R/M byte.
9823 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9824 */
9825FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
9826{
9827 IEM_MC_BEGIN(3, 1, 0, 0);
9828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9829 IEM_MC_LOCAL(uint16_t, u16Fsw);
9830 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9831 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9832 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9833
9834 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9835 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9836 IEM_MC_PREPARE_FPU_USAGE();
9837 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9838 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
9839 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9840 } IEM_MC_ELSE() {
9841 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
9842 } IEM_MC_ENDIF();
9843 IEM_MC_ADVANCE_RIP_AND_FINISH();
9844
9845 IEM_MC_END();
9846}
9847
9848
9849/**
9850 * Common worker for FPU instructions working on ST0 and STn, only affecting
9851 * flags, and popping when done.
9852 *
9853 * @param bRm Mod R/M byte.
9854 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9855 */
9856FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
9857{
9858 IEM_MC_BEGIN(3, 1, 0, 0);
9859 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9860 IEM_MC_LOCAL(uint16_t, u16Fsw);
9861 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9862 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9863 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9864
9865 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9866 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9867 IEM_MC_PREPARE_FPU_USAGE();
9868 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9869 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
9870 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9871 } IEM_MC_ELSE() {
9872 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
9873 } IEM_MC_ENDIF();
9874 IEM_MC_ADVANCE_RIP_AND_FINISH();
9875
9876 IEM_MC_END();
9877}
9878
9879
9880/** Opcode 0xd8 11/0. */
9881FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
9882{
9883 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
9884 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
9885}
9886
9887
9888/** Opcode 0xd8 11/1. */
9889FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
9890{
9891 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
9892 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
9893}
9894
9895
9896/** Opcode 0xd8 11/2. */
9897FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
9898{
9899 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
9900 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
9901}
9902
9903
9904/** Opcode 0xd8 11/3. */
9905FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
9906{
9907 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
9908 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
9909}
9910
9911
9912/** Opcode 0xd8 11/4. */
9913FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
9914{
9915 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
9916 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
9917}
9918
9919
9920/** Opcode 0xd8 11/5. */
9921FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
9922{
9923 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
9924 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
9925}
9926
9927
9928/** Opcode 0xd8 11/6. */
9929FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
9930{
9931 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
9932 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
9933}
9934
9935
9936/** Opcode 0xd8 11/7. */
9937FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
9938{
9939 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
9940 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
9941}
9942
9943
9944/**
9945 * Common worker for FPU instructions working on ST0 and an m32r, and storing
9946 * the result in ST0.
9947 *
9948 * @param bRm Mod R/M byte.
9949 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9950 */
9951FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
9952{
9953 IEM_MC_BEGIN(3, 3, 0, 0);
9954 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9955 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9956 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
9957 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9958 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9959 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
9960
9961 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9963
9964 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9965 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9966 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9967
9968 IEM_MC_PREPARE_FPU_USAGE();
9969 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9970 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
9971 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9972 } IEM_MC_ELSE() {
9973 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9974 } IEM_MC_ENDIF();
9975 IEM_MC_ADVANCE_RIP_AND_FINISH();
9976
9977 IEM_MC_END();
9978}
9979
9980
9981/** Opcode 0xd8 !11/0. */
9982FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
9983{
9984 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
9985 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
9986}
9987
9988
9989/** Opcode 0xd8 !11/1. */
9990FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
9991{
9992 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
9993 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
9994}
9995
9996
9997/** Opcode 0xd8 !11/2. */
9998FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
9999{
10000 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
10001
10002 IEM_MC_BEGIN(3, 3, 0, 0);
10003 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10004 IEM_MC_LOCAL(uint16_t, u16Fsw);
10005 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
10006 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10007 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10008 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
10009
10010 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10012
10013 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10014 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10015 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10016
10017 IEM_MC_PREPARE_FPU_USAGE();
10018 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10019 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
10020 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10021 } IEM_MC_ELSE() {
10022 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10023 } IEM_MC_ENDIF();
10024 IEM_MC_ADVANCE_RIP_AND_FINISH();
10025
10026 IEM_MC_END();
10027}
10028
10029
10030/** Opcode 0xd8 !11/3. */
10031FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
10032{
10033 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
10034
10035 IEM_MC_BEGIN(3, 3, 0, 0);
10036 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10037 IEM_MC_LOCAL(uint16_t, u16Fsw);
10038 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
10039 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10040 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10041 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
10042
10043 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10045
10046 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10047 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10048 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10049
10050 IEM_MC_PREPARE_FPU_USAGE();
10051 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10052 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
10053 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10054 } IEM_MC_ELSE() {
10055 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10056 } IEM_MC_ENDIF();
10057 IEM_MC_ADVANCE_RIP_AND_FINISH();
10058
10059 IEM_MC_END();
10060}
10061
10062
10063/** Opcode 0xd8 !11/4. */
10064FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
10065{
10066 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
10067 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
10068}
10069
10070
10071/** Opcode 0xd8 !11/5. */
10072FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
10073{
10074 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
10075 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
10076}
10077
10078
10079/** Opcode 0xd8 !11/6. */
10080FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
10081{
10082 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
10083 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
10084}
10085
10086
10087/** Opcode 0xd8 !11/7. */
10088FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
10089{
10090 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
10091 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
10092}
10093
10094
10095/**
10096 * @opcode 0xd8
10097 */
10098FNIEMOP_DEF(iemOp_EscF0)
10099{
10100 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10101 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
10102
10103 if (IEM_IS_MODRM_REG_MODE(bRm))
10104 {
10105 switch (IEM_GET_MODRM_REG_8(bRm))
10106 {
10107 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
10108 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
10109 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
10110 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
10111 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
10112 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
10113 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
10114 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
10115 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10116 }
10117 }
10118 else
10119 {
10120 switch (IEM_GET_MODRM_REG_8(bRm))
10121 {
10122 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
10123 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
10124 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
10125 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
10126 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
10127 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
10128 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
10129 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
10130 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10131 }
10132 }
10133}
10134
10135
10136/** Opcode 0xd9 /0 mem32real
10137 * @sa iemOp_fld_m64r */
10138FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
10139{
10140 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
10141
10142 IEM_MC_BEGIN(2, 3, 0, 0);
10143 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10144 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10145 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
10146 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10147 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
10148
10149 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10150 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10151
10152 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10153 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10154 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10155 IEM_MC_PREPARE_FPU_USAGE();
10156 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10157 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r32, pFpuRes, pr32Val);
10158 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10159 } IEM_MC_ELSE() {
10160 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10161 } IEM_MC_ENDIF();
10162 IEM_MC_ADVANCE_RIP_AND_FINISH();
10163
10164 IEM_MC_END();
10165}
10166
10167
10168/** Opcode 0xd9 !11/2 mem32real */
10169FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
10170{
10171 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
10172 IEM_MC_BEGIN(3, 3, 0, 0);
10173 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10174 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10175
10176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10177 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10178 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10179 IEM_MC_PREPARE_FPU_USAGE();
10180
10181 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10182 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
10183 IEM_MC_MEM_MAP_R32_WO(pr32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10184
10185 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10186 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10187 IEM_MC_LOCAL(uint16_t, u16Fsw);
10188 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
10189 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
10190 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
10191 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10192 } IEM_MC_ELSE() {
10193 IEM_MC_IF_FCW_IM() {
10194 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
10195 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
10196 } IEM_MC_ELSE() {
10197 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
10198 } IEM_MC_ENDIF();
10199 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10200 } IEM_MC_ENDIF();
10201 IEM_MC_ADVANCE_RIP_AND_FINISH();
10202
10203 IEM_MC_END();
10204}
10205
10206
10207/** Opcode 0xd9 !11/3 */
10208FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
10209{
10210 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
10211 IEM_MC_BEGIN(3, 3, 0, 0);
10212 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10213 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10214
10215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10216 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10217 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10218 IEM_MC_PREPARE_FPU_USAGE();
10219
10220 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10221 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
10222 IEM_MC_MEM_MAP_R32_WO(pr32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10223
10224 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10225 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10226 IEM_MC_LOCAL(uint16_t, u16Fsw);
10227 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
10228 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
10229 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
10230 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10231 } IEM_MC_ELSE() {
10232 IEM_MC_IF_FCW_IM() {
10233 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
10234 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
10235 } IEM_MC_ELSE() {
10236 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
10237 } IEM_MC_ENDIF();
10238 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10239 } IEM_MC_ENDIF();
10240 IEM_MC_ADVANCE_RIP_AND_FINISH();
10241
10242 IEM_MC_END();
10243}
10244
10245
10246/** Opcode 0xd9 !11/4 */
10247FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
10248{
10249 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
10250 IEM_MC_BEGIN(3, 0, 0, 0);
10251 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
10252 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10253
10254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10255 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10256 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10257
10258 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
10259 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
10260 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
10261 IEM_MC_END();
10262}
10263
10264
10265/** Opcode 0xd9 !11/5 */
10266FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
10267{
10268 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
10269 IEM_MC_BEGIN(1, 1, 0, 0);
10270 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10271 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10272
10273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10274 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10275 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10276
10277 IEM_MC_ARG(uint16_t, u16Fsw, 0);
10278 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10279
10280 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_FPU, 0, iemCImpl_fldcw, u16Fsw);
10281 IEM_MC_END();
10282}
10283
10284
10285/** Opcode 0xd9 !11/6 */
10286FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
10287{
10288 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
10289 IEM_MC_BEGIN(3, 0, 0, 0);
10290 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
10291 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10292
10293 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10294 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10295 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
10296
10297 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
10298 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
10299 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
10300 IEM_MC_END();
10301}
10302
10303
10304/** Opcode 0xd9 !11/7 */
10305FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
10306{
10307 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
10308 IEM_MC_BEGIN(2, 0, 0, 0);
10309 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10310 IEM_MC_LOCAL(uint16_t, u16Fcw);
10311 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10313 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10314 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
10315 IEM_MC_FETCH_FCW(u16Fcw);
10316 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
10317 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
10318 IEM_MC_END();
10319}
10320
10321
10322/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
10323FNIEMOP_DEF(iemOp_fnop)
10324{
10325 IEMOP_MNEMONIC(fnop, "fnop");
10326 IEM_MC_BEGIN(0, 0, 0, 0);
10327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10328 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10329 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10330 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10331 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
10332 * intel optimizations. Investigate. */
10333 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10334 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
10335 IEM_MC_END();
10336}
10337
10338
10339/** Opcode 0xd9 11/0 stN */
10340FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
10341{
10342 IEMOP_MNEMONIC(fld_stN, "fld stN");
10343 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
10344 * indicates that it does. */
10345 IEM_MC_BEGIN(0, 2, 0, 0);
10346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10347 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
10348 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10349 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10350 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10351
10352 IEM_MC_PREPARE_FPU_USAGE();
10353 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, IEM_GET_MODRM_RM_8(bRm)) {
10354 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
10355 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
10356 } IEM_MC_ELSE() {
10357 IEM_MC_FPU_STACK_PUSH_UNDERFLOW(pVCpu->iem.s.uFpuOpcode);
10358 } IEM_MC_ENDIF();
10359
10360 IEM_MC_ADVANCE_RIP_AND_FINISH();
10361 IEM_MC_END();
10362}
10363
10364
10365/** Opcode 0xd9 11/3 stN */
10366FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
10367{
10368 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
10369 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
10370 * indicates that it does. */
10371 IEM_MC_BEGIN(2, 3, 0, 0);
10372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10373 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
10374 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
10375 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10376 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ IEM_GET_MODRM_RM_8(bRm), 0);
10377 IEM_MC_ARG_CONST(uint16_t, uFpuOpcode, /*=*/ pVCpu->iem.s.uFpuOpcode, 1);
10378 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10379 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10380
10381 IEM_MC_PREPARE_FPU_USAGE();
10382 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
10383 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
10384 IEM_MC_STORE_FPUREG_R80_SRC_REF(IEM_GET_MODRM_RM_8(bRm), pr80Value1);
10385 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10386 } IEM_MC_ELSE() {
10387 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, 0, iemCImpl_fxch_underflow, iStReg, uFpuOpcode);
10388 } IEM_MC_ENDIF();
10389
10390 IEM_MC_ADVANCE_RIP_AND_FINISH();
10391 IEM_MC_END();
10392}
10393
10394
10395/** Opcode 0xd9 11/4, 0xdd 11/2. */
10396FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
10397{
10398 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
10399
10400 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
10401 uint8_t const iDstReg = IEM_GET_MODRM_RM_8(bRm);
10402 if (!iDstReg)
10403 {
10404 IEM_MC_BEGIN(0, 1, 0, 0);
10405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10406 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
10407 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10408 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10409
10410 IEM_MC_PREPARE_FPU_USAGE();
10411 IEM_MC_IF_FPUREG_NOT_EMPTY(0) {
10412 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10413 } IEM_MC_ELSE() {
10414 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0, pVCpu->iem.s.uFpuOpcode);
10415 } IEM_MC_ENDIF();
10416
10417 IEM_MC_ADVANCE_RIP_AND_FINISH();
10418 IEM_MC_END();
10419 }
10420 else
10421 {
10422 IEM_MC_BEGIN(0, 2, 0, 0);
10423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10424 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
10425 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10426 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10427 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10428
10429 IEM_MC_PREPARE_FPU_USAGE();
10430 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10431 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
10432 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg, pVCpu->iem.s.uFpuOpcode);
10433 } IEM_MC_ELSE() {
10434 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg, pVCpu->iem.s.uFpuOpcode);
10435 } IEM_MC_ENDIF();
10436
10437 IEM_MC_ADVANCE_RIP_AND_FINISH();
10438 IEM_MC_END();
10439 }
10440}
10441
10442
10443/**
10444 * Common worker for FPU instructions working on ST0 and replaces it with the
10445 * result, i.e. unary operators.
10446 *
10447 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10448 */
10449FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
10450{
10451 IEM_MC_BEGIN(2, 1, 0, 0);
10452 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10453 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10454 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10455 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10456
10457 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10458 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10459 IEM_MC_PREPARE_FPU_USAGE();
10460 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10461 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
10462 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10463 } IEM_MC_ELSE() {
10464 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10465 } IEM_MC_ENDIF();
10466 IEM_MC_ADVANCE_RIP_AND_FINISH();
10467
10468 IEM_MC_END();
10469}
10470
10471
10472/** Opcode 0xd9 0xe0. */
10473FNIEMOP_DEF(iemOp_fchs)
10474{
10475 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
10476 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
10477}
10478
10479
10480/** Opcode 0xd9 0xe1. */
10481FNIEMOP_DEF(iemOp_fabs)
10482{
10483 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
10484 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
10485}
10486
10487
10488/** Opcode 0xd9 0xe4. */
10489FNIEMOP_DEF(iemOp_ftst)
10490{
10491 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
10492 IEM_MC_BEGIN(2, 1, 0, 0);
10493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10494 IEM_MC_LOCAL(uint16_t, u16Fsw);
10495 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10496 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10497
10498 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10499 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10500 IEM_MC_PREPARE_FPU_USAGE();
10501 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10502 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_ftst_r80, pu16Fsw, pr80Value);
10503 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10504 } IEM_MC_ELSE() {
10505 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
10506 } IEM_MC_ENDIF();
10507 IEM_MC_ADVANCE_RIP_AND_FINISH();
10508
10509 IEM_MC_END();
10510}
10511
10512
10513/** Opcode 0xd9 0xe5. */
10514FNIEMOP_DEF(iemOp_fxam)
10515{
10516 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
10517 IEM_MC_BEGIN(2, 1, 0, 0);
10518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10519 IEM_MC_LOCAL(uint16_t, u16Fsw);
10520 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10521 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10522
10523 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10524 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10525 IEM_MC_PREPARE_FPU_USAGE();
10526 IEM_MC_REF_FPUREG(pr80Value, 0);
10527 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fxam_r80, pu16Fsw, pr80Value);
10528 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10529 IEM_MC_ADVANCE_RIP_AND_FINISH();
10530
10531 IEM_MC_END();
10532}
10533
10534
10535/**
10536 * Common worker for FPU instructions pushing a constant onto the FPU stack.
10537 *
10538 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10539 */
10540FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
10541{
10542 IEM_MC_BEGIN(1, 1, 0, 0);
10543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10544 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10545 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10546
10547 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10548 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10549 IEM_MC_PREPARE_FPU_USAGE();
10550 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10551 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
10552 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
10553 } IEM_MC_ELSE() {
10554 IEM_MC_FPU_STACK_PUSH_OVERFLOW(pVCpu->iem.s.uFpuOpcode);
10555 } IEM_MC_ENDIF();
10556 IEM_MC_ADVANCE_RIP_AND_FINISH();
10557
10558 IEM_MC_END();
10559}
10560
10561
10562/** Opcode 0xd9 0xe8. */
10563FNIEMOP_DEF(iemOp_fld1)
10564{
10565 IEMOP_MNEMONIC(fld1, "fld1");
10566 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
10567}
10568
10569
10570/** Opcode 0xd9 0xe9. */
10571FNIEMOP_DEF(iemOp_fldl2t)
10572{
10573 IEMOP_MNEMONIC(fldl2t, "fldl2t");
10574 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
10575}
10576
10577
10578/** Opcode 0xd9 0xea. */
10579FNIEMOP_DEF(iemOp_fldl2e)
10580{
10581 IEMOP_MNEMONIC(fldl2e, "fldl2e");
10582 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
10583}
10584
10585/** Opcode 0xd9 0xeb. */
10586FNIEMOP_DEF(iemOp_fldpi)
10587{
10588 IEMOP_MNEMONIC(fldpi, "fldpi");
10589 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
10590}
10591
10592
10593/** Opcode 0xd9 0xec. */
10594FNIEMOP_DEF(iemOp_fldlg2)
10595{
10596 IEMOP_MNEMONIC(fldlg2, "fldlg2");
10597 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
10598}
10599
10600/** Opcode 0xd9 0xed. */
10601FNIEMOP_DEF(iemOp_fldln2)
10602{
10603 IEMOP_MNEMONIC(fldln2, "fldln2");
10604 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
10605}
10606
10607
10608/** Opcode 0xd9 0xee. */
10609FNIEMOP_DEF(iemOp_fldz)
10610{
10611 IEMOP_MNEMONIC(fldz, "fldz");
10612 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
10613}
10614
10615
10616/** Opcode 0xd9 0xf0.
10617 *
10618 * The f2xm1 instruction works on values +1.0 thru -1.0, currently (the range on
10619 * 287 & 8087 was +0.5 thru 0.0 according to docs). In addition is does appear
10620 * to produce proper results for +Inf and -Inf.
10621 *
10622 * This is probably usful in the implementation pow() and similar.
10623 */
10624FNIEMOP_DEF(iemOp_f2xm1)
10625{
10626 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
10627 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
10628}
10629
10630
10631/**
10632 * Common worker for FPU instructions working on STn and ST0, storing the result
10633 * in STn, and popping the stack unless IE, DE or ZE was raised.
10634 *
10635 * @param bRm Mod R/M byte.
10636 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10637 */
10638FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
10639{
10640 IEM_MC_BEGIN(3, 1, 0, 0);
10641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10642 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10643 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10644 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10645 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10646
10647 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10648 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10649
10650 IEM_MC_PREPARE_FPU_USAGE();
10651 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
10652 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
10653 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10654 } IEM_MC_ELSE() {
10655 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10656 } IEM_MC_ENDIF();
10657 IEM_MC_ADVANCE_RIP_AND_FINISH();
10658
10659 IEM_MC_END();
10660}
10661
10662
10663/** Opcode 0xd9 0xf1. */
10664FNIEMOP_DEF(iemOp_fyl2x)
10665{
10666 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
10667 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
10668}
10669
10670
10671/**
10672 * Common worker for FPU instructions working on ST0 and having two outputs, one
10673 * replacing ST0 and one pushed onto the stack.
10674 *
10675 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10676 */
10677FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
10678{
10679 IEM_MC_BEGIN(2, 1, 0, 0);
10680 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10681 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
10682 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
10683 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10684
10685 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10686 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10687 IEM_MC_PREPARE_FPU_USAGE();
10688 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10689 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
10690 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo, pVCpu->iem.s.uFpuOpcode);
10691 } IEM_MC_ELSE() {
10692 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO(pVCpu->iem.s.uFpuOpcode);
10693 } IEM_MC_ENDIF();
10694 IEM_MC_ADVANCE_RIP_AND_FINISH();
10695
10696 IEM_MC_END();
10697}
10698
10699
10700/** Opcode 0xd9 0xf2. */
10701FNIEMOP_DEF(iemOp_fptan)
10702{
10703 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
10704 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
10705}
10706
10707
10708/** Opcode 0xd9 0xf3. */
10709FNIEMOP_DEF(iemOp_fpatan)
10710{
10711 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
10712 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
10713}
10714
10715
10716/** Opcode 0xd9 0xf4. */
10717FNIEMOP_DEF(iemOp_fxtract)
10718{
10719 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
10720 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
10721}
10722
10723
10724/** Opcode 0xd9 0xf5. */
10725FNIEMOP_DEF(iemOp_fprem1)
10726{
10727 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
10728 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
10729}
10730
10731
10732/** Opcode 0xd9 0xf6. */
10733FNIEMOP_DEF(iemOp_fdecstp)
10734{
10735 IEMOP_MNEMONIC(fdecstp, "fdecstp");
10736 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
10737 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
10738 * FINCSTP and FDECSTP. */
10739 IEM_MC_BEGIN(0, 0, 0, 0);
10740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10741
10742 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10743 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10744
10745 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10746 IEM_MC_FPU_STACK_DEC_TOP();
10747 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
10748
10749 IEM_MC_ADVANCE_RIP_AND_FINISH();
10750 IEM_MC_END();
10751}
10752
10753
10754/** Opcode 0xd9 0xf7. */
10755FNIEMOP_DEF(iemOp_fincstp)
10756{
10757 IEMOP_MNEMONIC(fincstp, "fincstp");
10758 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
10759 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
10760 * FINCSTP and FDECSTP. */
10761 IEM_MC_BEGIN(0, 0, 0, 0);
10762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10763
10764 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10765 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10766
10767 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10768 IEM_MC_FPU_STACK_INC_TOP();
10769 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
10770
10771 IEM_MC_ADVANCE_RIP_AND_FINISH();
10772 IEM_MC_END();
10773}
10774
10775
10776/** Opcode 0xd9 0xf8. */
10777FNIEMOP_DEF(iemOp_fprem)
10778{
10779 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
10780 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
10781}
10782
10783
10784/** Opcode 0xd9 0xf9. */
10785FNIEMOP_DEF(iemOp_fyl2xp1)
10786{
10787 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
10788 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
10789}
10790
10791
10792/** Opcode 0xd9 0xfa. */
10793FNIEMOP_DEF(iemOp_fsqrt)
10794{
10795 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
10796 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
10797}
10798
10799
10800/** Opcode 0xd9 0xfb. */
10801FNIEMOP_DEF(iemOp_fsincos)
10802{
10803 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
10804 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
10805}
10806
10807
10808/** Opcode 0xd9 0xfc. */
10809FNIEMOP_DEF(iemOp_frndint)
10810{
10811 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
10812 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
10813}
10814
10815
10816/** Opcode 0xd9 0xfd. */
10817FNIEMOP_DEF(iemOp_fscale)
10818{
10819 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
10820 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
10821}
10822
10823
10824/** Opcode 0xd9 0xfe. */
10825FNIEMOP_DEF(iemOp_fsin)
10826{
10827 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
10828 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
10829}
10830
10831
10832/** Opcode 0xd9 0xff. */
10833FNIEMOP_DEF(iemOp_fcos)
10834{
10835 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
10836 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
10837}
10838
10839
10840/** Used by iemOp_EscF1. */
10841IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
10842{
10843 /* 0xe0 */ iemOp_fchs,
10844 /* 0xe1 */ iemOp_fabs,
10845 /* 0xe2 */ iemOp_Invalid,
10846 /* 0xe3 */ iemOp_Invalid,
10847 /* 0xe4 */ iemOp_ftst,
10848 /* 0xe5 */ iemOp_fxam,
10849 /* 0xe6 */ iemOp_Invalid,
10850 /* 0xe7 */ iemOp_Invalid,
10851 /* 0xe8 */ iemOp_fld1,
10852 /* 0xe9 */ iemOp_fldl2t,
10853 /* 0xea */ iemOp_fldl2e,
10854 /* 0xeb */ iemOp_fldpi,
10855 /* 0xec */ iemOp_fldlg2,
10856 /* 0xed */ iemOp_fldln2,
10857 /* 0xee */ iemOp_fldz,
10858 /* 0xef */ iemOp_Invalid,
10859 /* 0xf0 */ iemOp_f2xm1,
10860 /* 0xf1 */ iemOp_fyl2x,
10861 /* 0xf2 */ iemOp_fptan,
10862 /* 0xf3 */ iemOp_fpatan,
10863 /* 0xf4 */ iemOp_fxtract,
10864 /* 0xf5 */ iemOp_fprem1,
10865 /* 0xf6 */ iemOp_fdecstp,
10866 /* 0xf7 */ iemOp_fincstp,
10867 /* 0xf8 */ iemOp_fprem,
10868 /* 0xf9 */ iemOp_fyl2xp1,
10869 /* 0xfa */ iemOp_fsqrt,
10870 /* 0xfb */ iemOp_fsincos,
10871 /* 0xfc */ iemOp_frndint,
10872 /* 0xfd */ iemOp_fscale,
10873 /* 0xfe */ iemOp_fsin,
10874 /* 0xff */ iemOp_fcos
10875};
10876
10877
10878/**
10879 * @opcode 0xd9
10880 */
10881FNIEMOP_DEF(iemOp_EscF1)
10882{
10883 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10884 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
10885
10886 if (IEM_IS_MODRM_REG_MODE(bRm))
10887 {
10888 switch (IEM_GET_MODRM_REG_8(bRm))
10889 {
10890 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
10891 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
10892 case 2:
10893 if (bRm == 0xd0)
10894 return FNIEMOP_CALL(iemOp_fnop);
10895 IEMOP_RAISE_INVALID_OPCODE_RET();
10896 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
10897 case 4:
10898 case 5:
10899 case 6:
10900 case 7:
10901 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
10902 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
10903 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10904 }
10905 }
10906 else
10907 {
10908 switch (IEM_GET_MODRM_REG_8(bRm))
10909 {
10910 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
10911 case 1: IEMOP_RAISE_INVALID_OPCODE_RET();
10912 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
10913 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
10914 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
10915 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
10916 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
10917 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
10918 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10919 }
10920 }
10921}
10922
10923
10924/** Opcode 0xda 11/0. */
10925FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
10926{
10927 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
10928 IEM_MC_BEGIN(0, 1, 0, 0);
10929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10930 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10931
10932 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10933 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10934
10935 IEM_MC_PREPARE_FPU_USAGE();
10936 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10937 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
10938 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10939 } IEM_MC_ENDIF();
10940 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10941 } IEM_MC_ELSE() {
10942 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10943 } IEM_MC_ENDIF();
10944 IEM_MC_ADVANCE_RIP_AND_FINISH();
10945
10946 IEM_MC_END();
10947}
10948
10949
10950/** Opcode 0xda 11/1. */
10951FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
10952{
10953 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
10954 IEM_MC_BEGIN(0, 1, 0, 0);
10955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10956 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10957
10958 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10959 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10960
10961 IEM_MC_PREPARE_FPU_USAGE();
10962 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10963 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10964 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10965 } IEM_MC_ENDIF();
10966 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10967 } IEM_MC_ELSE() {
10968 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10969 } IEM_MC_ENDIF();
10970 IEM_MC_ADVANCE_RIP_AND_FINISH();
10971
10972 IEM_MC_END();
10973}
10974
10975
10976/** Opcode 0xda 11/2. */
10977FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
10978{
10979 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
10980 IEM_MC_BEGIN(0, 1, 0, 0);
10981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10982 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10983
10984 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10985 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10986
10987 IEM_MC_PREPARE_FPU_USAGE();
10988 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10989 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10990 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10991 } IEM_MC_ENDIF();
10992 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10993 } IEM_MC_ELSE() {
10994 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10995 } IEM_MC_ENDIF();
10996 IEM_MC_ADVANCE_RIP_AND_FINISH();
10997
10998 IEM_MC_END();
10999}
11000
11001
11002/** Opcode 0xda 11/3. */
11003FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
11004{
11005 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
11006 IEM_MC_BEGIN(0, 1, 0, 0);
11007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11008 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11009
11010 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11011 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11012
11013 IEM_MC_PREPARE_FPU_USAGE();
11014 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11015 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
11016 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11017 } IEM_MC_ENDIF();
11018 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11019 } IEM_MC_ELSE() {
11020 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11021 } IEM_MC_ENDIF();
11022 IEM_MC_ADVANCE_RIP_AND_FINISH();
11023
11024 IEM_MC_END();
11025}
11026
11027
11028/**
11029 * Common worker for FPU instructions working on ST0 and ST1, only affecting
11030 * flags, and popping twice when done.
11031 *
11032 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11033 */
11034FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
11035{
11036 IEM_MC_BEGIN(3, 1, 0, 0);
11037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11038 IEM_MC_LOCAL(uint16_t, u16Fsw);
11039 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11040 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11041 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11042
11043 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11044 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11045
11046 IEM_MC_PREPARE_FPU_USAGE();
11047 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1) {
11048 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
11049 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
11050 } IEM_MC_ELSE() {
11051 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP(pVCpu->iem.s.uFpuOpcode);
11052 } IEM_MC_ENDIF();
11053 IEM_MC_ADVANCE_RIP_AND_FINISH();
11054
11055 IEM_MC_END();
11056}
11057
11058
11059/** Opcode 0xda 0xe9. */
11060FNIEMOP_DEF(iemOp_fucompp)
11061{
11062 IEMOP_MNEMONIC(fucompp, "fucompp");
11063 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fucom_r80_by_r80);
11064}
11065
11066
11067/**
11068 * Common worker for FPU instructions working on ST0 and an m32i, and storing
11069 * the result in ST0.
11070 *
11071 * @param bRm Mod R/M byte.
11072 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11073 */
11074FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
11075{
11076 IEM_MC_BEGIN(3, 3, 0, 0);
11077 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11078 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11079 IEM_MC_LOCAL(int32_t, i32Val2);
11080 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11081 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11082 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
11083
11084 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11086
11087 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11088 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11089 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11090
11091 IEM_MC_PREPARE_FPU_USAGE();
11092 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11093 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
11094 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
11095 } IEM_MC_ELSE() {
11096 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11097 } IEM_MC_ENDIF();
11098 IEM_MC_ADVANCE_RIP_AND_FINISH();
11099
11100 IEM_MC_END();
11101}
11102
11103
11104/** Opcode 0xda !11/0. */
11105FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
11106{
11107 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
11108 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
11109}
11110
11111
11112/** Opcode 0xda !11/1. */
11113FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
11114{
11115 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
11116 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
11117}
11118
11119
11120/** Opcode 0xda !11/2. */
11121FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
11122{
11123 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
11124
11125 IEM_MC_BEGIN(3, 3, 0, 0);
11126 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11127 IEM_MC_LOCAL(uint16_t, u16Fsw);
11128 IEM_MC_LOCAL(int32_t, i32Val2);
11129 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11130 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11131 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
11132
11133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11135
11136 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11137 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11138 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11139
11140 IEM_MC_PREPARE_FPU_USAGE();
11141 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11142 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
11143 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11144 } IEM_MC_ELSE() {
11145 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11146 } IEM_MC_ENDIF();
11147 IEM_MC_ADVANCE_RIP_AND_FINISH();
11148
11149 IEM_MC_END();
11150}
11151
11152
11153/** Opcode 0xda !11/3. */
11154FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
11155{
11156 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
11157
11158 IEM_MC_BEGIN(3, 3, 0, 0);
11159 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11160 IEM_MC_LOCAL(uint16_t, u16Fsw);
11161 IEM_MC_LOCAL(int32_t, i32Val2);
11162 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11163 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11164 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
11165
11166 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11168
11169 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11170 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11171 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11172
11173 IEM_MC_PREPARE_FPU_USAGE();
11174 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11175 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
11176 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11177 } IEM_MC_ELSE() {
11178 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11179 } IEM_MC_ENDIF();
11180 IEM_MC_ADVANCE_RIP_AND_FINISH();
11181
11182 IEM_MC_END();
11183}
11184
11185
11186/** Opcode 0xda !11/4. */
11187FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
11188{
11189 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
11190 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
11191}
11192
11193
11194/** Opcode 0xda !11/5. */
11195FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
11196{
11197 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
11198 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
11199}
11200
11201
11202/** Opcode 0xda !11/6. */
11203FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
11204{
11205 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
11206 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
11207}
11208
11209
11210/** Opcode 0xda !11/7. */
11211FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
11212{
11213 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
11214 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
11215}
11216
11217
11218/**
11219 * @opcode 0xda
11220 */
11221FNIEMOP_DEF(iemOp_EscF2)
11222{
11223 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11224 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
11225 if (IEM_IS_MODRM_REG_MODE(bRm))
11226 {
11227 switch (IEM_GET_MODRM_REG_8(bRm))
11228 {
11229 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
11230 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
11231 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
11232 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
11233 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
11234 case 5:
11235 if (bRm == 0xe9)
11236 return FNIEMOP_CALL(iemOp_fucompp);
11237 IEMOP_RAISE_INVALID_OPCODE_RET();
11238 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
11239 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11240 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11241 }
11242 }
11243 else
11244 {
11245 switch (IEM_GET_MODRM_REG_8(bRm))
11246 {
11247 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
11248 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
11249 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
11250 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
11251 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
11252 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
11253 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
11254 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
11255 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11256 }
11257 }
11258}
11259
11260
11261/** Opcode 0xdb !11/0. */
11262FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
11263{
11264 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
11265
11266 IEM_MC_BEGIN(2, 3, 0, 0);
11267 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11268 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11269 IEM_MC_LOCAL(int32_t, i32Val);
11270 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11271 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
11272
11273 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11274 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11275
11276 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11277 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11278 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11279
11280 IEM_MC_PREPARE_FPU_USAGE();
11281 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11282 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i32, pFpuRes, pi32Val);
11283 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11284 } IEM_MC_ELSE() {
11285 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11286 } IEM_MC_ENDIF();
11287 IEM_MC_ADVANCE_RIP_AND_FINISH();
11288
11289 IEM_MC_END();
11290}
11291
11292
11293/** Opcode 0xdb !11/1. */
11294FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
11295{
11296 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
11297 IEM_MC_BEGIN(3, 3, 0, 0);
11298 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11299 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11300
11301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11302 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11303 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11304 IEM_MC_PREPARE_FPU_USAGE();
11305
11306 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11307 IEM_MC_ARG(int32_t *, pi32Dst, 1);
11308 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11309
11310 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11311 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11312 IEM_MC_LOCAL(uint16_t, u16Fsw);
11313 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11314 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
11315 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11316 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11317 } IEM_MC_ELSE() {
11318 IEM_MC_IF_FCW_IM() {
11319 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
11320 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11321 } IEM_MC_ELSE() {
11322 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11323 } IEM_MC_ENDIF();
11324 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11325 } IEM_MC_ENDIF();
11326 IEM_MC_ADVANCE_RIP_AND_FINISH();
11327
11328 IEM_MC_END();
11329}
11330
11331
11332/** Opcode 0xdb !11/2. */
11333FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
11334{
11335 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
11336 IEM_MC_BEGIN(3, 3, 0, 0);
11337 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11338 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11339
11340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11341 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11342 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11343 IEM_MC_PREPARE_FPU_USAGE();
11344
11345 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11346 IEM_MC_ARG(int32_t *, pi32Dst, 1);
11347 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11348
11349 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11350 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11351 IEM_MC_LOCAL(uint16_t, u16Fsw);
11352 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11353 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
11354 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11355 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11356 } IEM_MC_ELSE() {
11357 IEM_MC_IF_FCW_IM() {
11358 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
11359 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11360 } IEM_MC_ELSE() {
11361 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11362 } IEM_MC_ENDIF();
11363 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11364 } IEM_MC_ENDIF();
11365 IEM_MC_ADVANCE_RIP_AND_FINISH();
11366
11367 IEM_MC_END();
11368}
11369
11370
11371/** Opcode 0xdb !11/3. */
11372FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
11373{
11374 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
11375 IEM_MC_BEGIN(3, 2, 0, 0);
11376 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11377 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11378
11379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11380 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11381 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11382 IEM_MC_PREPARE_FPU_USAGE();
11383
11384 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11385 IEM_MC_ARG(int32_t *, pi32Dst, 1);
11386 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11387
11388 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11389 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11390 IEM_MC_LOCAL(uint16_t, u16Fsw);
11391 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11392 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
11393 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11394 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11395 } IEM_MC_ELSE() {
11396 IEM_MC_IF_FCW_IM() {
11397 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
11398 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11399 } IEM_MC_ELSE() {
11400 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11401 } IEM_MC_ENDIF();
11402 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11403 } IEM_MC_ENDIF();
11404 IEM_MC_ADVANCE_RIP_AND_FINISH();
11405
11406 IEM_MC_END();
11407}
11408
11409
11410/** Opcode 0xdb !11/5. */
11411FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
11412{
11413 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
11414
11415 IEM_MC_BEGIN(2, 3, 0, 0);
11416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11417 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11418 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
11419 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11420 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
11421
11422 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11424
11425 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11426 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11427 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11428
11429 IEM_MC_PREPARE_FPU_USAGE();
11430 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11431 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
11432 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11433 } IEM_MC_ELSE() {
11434 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11435 } IEM_MC_ENDIF();
11436 IEM_MC_ADVANCE_RIP_AND_FINISH();
11437
11438 IEM_MC_END();
11439}
11440
11441
11442/** Opcode 0xdb !11/7. */
11443FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
11444{
11445 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
11446 IEM_MC_BEGIN(3, 3, 0, 0);
11447 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11448 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11449
11450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11451 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11452 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11453 IEM_MC_PREPARE_FPU_USAGE();
11454
11455 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11456 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
11457 IEM_MC_MEM_MAP_R80_WO(pr80Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11458
11459 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11460 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11461 IEM_MC_LOCAL(uint16_t, u16Fsw);
11462 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11463 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
11464 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11465 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11466 } IEM_MC_ELSE() {
11467 IEM_MC_IF_FCW_IM() {
11468 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
11469 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11470 } IEM_MC_ELSE() {
11471 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11472 } IEM_MC_ENDIF();
11473 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11474 } IEM_MC_ENDIF();
11475 IEM_MC_ADVANCE_RIP_AND_FINISH();
11476
11477 IEM_MC_END();
11478}
11479
11480
11481/** Opcode 0xdb 11/0. */
11482FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
11483{
11484 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
11485 IEM_MC_BEGIN(0, 1, 0, 0);
11486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11487 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11488
11489 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11490 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11491
11492 IEM_MC_PREPARE_FPU_USAGE();
11493 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11494 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF) {
11495 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11496 } IEM_MC_ENDIF();
11497 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11498 } IEM_MC_ELSE() {
11499 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11500 } IEM_MC_ENDIF();
11501 IEM_MC_ADVANCE_RIP_AND_FINISH();
11502
11503 IEM_MC_END();
11504}
11505
11506
11507/** Opcode 0xdb 11/1. */
11508FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
11509{
11510 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
11511 IEM_MC_BEGIN(0, 1, 0, 0);
11512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11513 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11514
11515 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11516 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11517
11518 IEM_MC_PREPARE_FPU_USAGE();
11519 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11520 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11521 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11522 } IEM_MC_ENDIF();
11523 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11524 } IEM_MC_ELSE() {
11525 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11526 } IEM_MC_ENDIF();
11527 IEM_MC_ADVANCE_RIP_AND_FINISH();
11528
11529 IEM_MC_END();
11530}
11531
11532
11533/** Opcode 0xdb 11/2. */
11534FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
11535{
11536 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
11537 IEM_MC_BEGIN(0, 1, 0, 0);
11538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11539 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11540
11541 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11542 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11543
11544 IEM_MC_PREPARE_FPU_USAGE();
11545 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11546 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
11547 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11548 } IEM_MC_ENDIF();
11549 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11550 } IEM_MC_ELSE() {
11551 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11552 } IEM_MC_ENDIF();
11553 IEM_MC_ADVANCE_RIP_AND_FINISH();
11554
11555 IEM_MC_END();
11556}
11557
11558
11559/** Opcode 0xdb 11/3. */
11560FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
11561{
11562 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
11563 IEM_MC_BEGIN(0, 1, 0, 0);
11564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11565 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11566
11567 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11568 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11569
11570 IEM_MC_PREPARE_FPU_USAGE();
11571 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11572 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF) {
11573 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11574 } IEM_MC_ENDIF();
11575 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11576 } IEM_MC_ELSE() {
11577 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11578 } IEM_MC_ENDIF();
11579 IEM_MC_ADVANCE_RIP_AND_FINISH();
11580
11581 IEM_MC_END();
11582}
11583
11584
11585/** Opcode 0xdb 0xe0. */
11586FNIEMOP_DEF(iemOp_fneni)
11587{
11588 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
11589 IEM_MC_BEGIN(0, 0, 0, 0);
11590 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11591 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11592 IEM_MC_ADVANCE_RIP_AND_FINISH();
11593 IEM_MC_END();
11594}
11595
11596
11597/** Opcode 0xdb 0xe1. */
11598FNIEMOP_DEF(iemOp_fndisi)
11599{
11600 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
11601 IEM_MC_BEGIN(0, 0, 0, 0);
11602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11603 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11604 IEM_MC_ADVANCE_RIP_AND_FINISH();
11605 IEM_MC_END();
11606}
11607
11608
11609/** Opcode 0xdb 0xe2. */
11610FNIEMOP_DEF(iemOp_fnclex)
11611{
11612 IEMOP_MNEMONIC(fnclex, "fnclex");
11613 IEM_MC_BEGIN(0, 0, 0, 0);
11614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11615 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11616 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11617 IEM_MC_CLEAR_FSW_EX();
11618 IEM_MC_ADVANCE_RIP_AND_FINISH();
11619 IEM_MC_END();
11620}
11621
11622
11623/** Opcode 0xdb 0xe3. */
11624FNIEMOP_DEF(iemOp_fninit)
11625{
11626 IEMOP_MNEMONIC(fninit, "fninit");
11627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11628 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_FPU, 0, iemCImpl_finit, false /*fCheckXcpts*/);
11629}
11630
11631
11632/** Opcode 0xdb 0xe4. */
11633FNIEMOP_DEF(iemOp_fnsetpm)
11634{
11635 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
11636 IEM_MC_BEGIN(0, 0, 0, 0);
11637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11638 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11639 IEM_MC_ADVANCE_RIP_AND_FINISH();
11640 IEM_MC_END();
11641}
11642
11643
11644/** Opcode 0xdb 0xe5. */
11645FNIEMOP_DEF(iemOp_frstpm)
11646{
11647 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
11648#if 0 /* #UDs on newer CPUs */
11649 IEM_MC_BEGIN(0, 0, 0, 0);
11650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11651 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11652 IEM_MC_ADVANCE_RIP_AND_FINISH();
11653 IEM_MC_END();
11654 return VINF_SUCCESS;
11655#else
11656 IEMOP_RAISE_INVALID_OPCODE_RET();
11657#endif
11658}
11659
11660
11661/** Opcode 0xdb 11/5. */
11662FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
11663{
11664 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
11665 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
11666 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), true /*fUCmp*/,
11667 0 /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11668}
11669
11670
11671/** Opcode 0xdb 11/6. */
11672FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
11673{
11674 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
11675 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
11676 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
11677 false /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11678}
11679
11680
11681/**
11682 * @opcode 0xdb
11683 */
11684FNIEMOP_DEF(iemOp_EscF3)
11685{
11686 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11687 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
11688 if (IEM_IS_MODRM_REG_MODE(bRm))
11689 {
11690 switch (IEM_GET_MODRM_REG_8(bRm))
11691 {
11692 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
11693 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
11694 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
11695 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
11696 case 4:
11697 switch (bRm)
11698 {
11699 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
11700 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
11701 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
11702 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
11703 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
11704 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
11705 case 0xe6: IEMOP_RAISE_INVALID_OPCODE_RET();
11706 case 0xe7: IEMOP_RAISE_INVALID_OPCODE_RET();
11707 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11708 }
11709 break;
11710 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
11711 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
11712 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11713 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11714 }
11715 }
11716 else
11717 {
11718 switch (IEM_GET_MODRM_REG_8(bRm))
11719 {
11720 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
11721 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
11722 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
11723 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
11724 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
11725 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
11726 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
11727 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
11728 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11729 }
11730 }
11731}
11732
11733
11734/**
11735 * Common worker for FPU instructions working on STn and ST0, and storing the
11736 * result in STn unless IE, DE or ZE was raised.
11737 *
11738 * @param bRm Mod R/M byte.
11739 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11740 */
11741FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
11742{
11743 IEM_MC_BEGIN(3, 1, 0, 0);
11744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11745 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11746 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11747 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11748 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11749
11750 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11751 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11752
11753 IEM_MC_PREPARE_FPU_USAGE();
11754 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
11755 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
11756 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11757 } IEM_MC_ELSE() {
11758 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11759 } IEM_MC_ENDIF();
11760 IEM_MC_ADVANCE_RIP_AND_FINISH();
11761
11762 IEM_MC_END();
11763}
11764
11765
11766/** Opcode 0xdc 11/0. */
11767FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
11768{
11769 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
11770 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
11771}
11772
11773
11774/** Opcode 0xdc 11/1. */
11775FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
11776{
11777 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
11778 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
11779}
11780
11781
11782/** Opcode 0xdc 11/4. */
11783FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
11784{
11785 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
11786 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
11787}
11788
11789
11790/** Opcode 0xdc 11/5. */
11791FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
11792{
11793 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
11794 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
11795}
11796
11797
11798/** Opcode 0xdc 11/6. */
11799FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
11800{
11801 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
11802 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
11803}
11804
11805
11806/** Opcode 0xdc 11/7. */
11807FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
11808{
11809 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
11810 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
11811}
11812
11813
11814/**
11815 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
11816 * memory operand, and storing the result in ST0.
11817 *
11818 * @param bRm Mod R/M byte.
11819 * @param pfnImpl Pointer to the instruction implementation (assembly).
11820 */
11821FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
11822{
11823 IEM_MC_BEGIN(3, 3, 0, 0);
11824 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11825 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11826 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
11827 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11828 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
11829 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
11830
11831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11833 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11834 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11835
11836 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11837 IEM_MC_PREPARE_FPU_USAGE();
11838 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0) {
11839 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
11840 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11841 } IEM_MC_ELSE() {
11842 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11843 } IEM_MC_ENDIF();
11844 IEM_MC_ADVANCE_RIP_AND_FINISH();
11845
11846 IEM_MC_END();
11847}
11848
11849
11850/** Opcode 0xdc !11/0. */
11851FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
11852{
11853 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
11854 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
11855}
11856
11857
11858/** Opcode 0xdc !11/1. */
11859FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
11860{
11861 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
11862 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
11863}
11864
11865
11866/** Opcode 0xdc !11/2. */
11867FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
11868{
11869 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
11870
11871 IEM_MC_BEGIN(3, 3, 0, 0);
11872 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11873 IEM_MC_LOCAL(uint16_t, u16Fsw);
11874 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
11875 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11876 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11877 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
11878
11879 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11881
11882 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11883 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11884 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11885
11886 IEM_MC_PREPARE_FPU_USAGE();
11887 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11888 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
11889 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11890 } IEM_MC_ELSE() {
11891 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11892 } IEM_MC_ENDIF();
11893 IEM_MC_ADVANCE_RIP_AND_FINISH();
11894
11895 IEM_MC_END();
11896}
11897
11898
11899/** Opcode 0xdc !11/3. */
11900FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
11901{
11902 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
11903
11904 IEM_MC_BEGIN(3, 3, 0, 0);
11905 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11906 IEM_MC_LOCAL(uint16_t, u16Fsw);
11907 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
11908 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11909 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11910 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
11911
11912 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11913 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11914
11915 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11916 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11917 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11918
11919 IEM_MC_PREPARE_FPU_USAGE();
11920 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11921 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
11922 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11923 } IEM_MC_ELSE() {
11924 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11925 } IEM_MC_ENDIF();
11926 IEM_MC_ADVANCE_RIP_AND_FINISH();
11927
11928 IEM_MC_END();
11929}
11930
11931
11932/** Opcode 0xdc !11/4. */
11933FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
11934{
11935 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
11936 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
11937}
11938
11939
11940/** Opcode 0xdc !11/5. */
11941FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
11942{
11943 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
11944 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
11945}
11946
11947
11948/** Opcode 0xdc !11/6. */
11949FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
11950{
11951 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
11952 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
11953}
11954
11955
11956/** Opcode 0xdc !11/7. */
11957FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
11958{
11959 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
11960 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
11961}
11962
11963
11964/**
11965 * @opcode 0xdc
11966 */
11967FNIEMOP_DEF(iemOp_EscF4)
11968{
11969 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11970 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
11971 if (IEM_IS_MODRM_REG_MODE(bRm))
11972 {
11973 switch (IEM_GET_MODRM_REG_8(bRm))
11974 {
11975 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
11976 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
11977 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
11978 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
11979 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
11980 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
11981 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
11982 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
11983 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11984 }
11985 }
11986 else
11987 {
11988 switch (IEM_GET_MODRM_REG_8(bRm))
11989 {
11990 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
11991 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
11992 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
11993 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
11994 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
11995 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
11996 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
11997 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
11998 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11999 }
12000 }
12001}
12002
12003
12004/** Opcode 0xdd !11/0.
12005 * @sa iemOp_fld_m32r */
12006FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
12007{
12008 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
12009
12010 IEM_MC_BEGIN(2, 3, 0, 0);
12011 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12012 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12013 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
12014 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12015 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
12016
12017 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12019 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12020 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12021
12022 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12023 IEM_MC_PREPARE_FPU_USAGE();
12024 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12025 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r64, pFpuRes, pr64Val);
12026 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12027 } IEM_MC_ELSE() {
12028 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12029 } IEM_MC_ENDIF();
12030 IEM_MC_ADVANCE_RIP_AND_FINISH();
12031
12032 IEM_MC_END();
12033}
12034
12035
12036/** Opcode 0xdd !11/0. */
12037FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
12038{
12039 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
12040 IEM_MC_BEGIN(3, 3, 0, 0);
12041 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12042 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12043
12044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12045 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12046 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12047 IEM_MC_PREPARE_FPU_USAGE();
12048
12049 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12050 IEM_MC_ARG(int64_t *, pi64Dst, 1);
12051 IEM_MC_MEM_MAP_I64_WO(pi64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12052
12053 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12054 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12055 IEM_MC_LOCAL(uint16_t, u16Fsw);
12056 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12057 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
12058 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12059 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12060 } IEM_MC_ELSE() {
12061 IEM_MC_IF_FCW_IM() {
12062 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
12063 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12064 } IEM_MC_ELSE() {
12065 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12066 } IEM_MC_ENDIF();
12067 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12068 } IEM_MC_ENDIF();
12069 IEM_MC_ADVANCE_RIP_AND_FINISH();
12070
12071 IEM_MC_END();
12072}
12073
12074
12075/** Opcode 0xdd !11/0. */
12076FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
12077{
12078 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
12079 IEM_MC_BEGIN(3, 3, 0, 0);
12080 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12081 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12082
12083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12084 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12085 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12086 IEM_MC_PREPARE_FPU_USAGE();
12087
12088 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12089 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
12090 IEM_MC_MEM_MAP_R64_WO(pr64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12091
12092 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12093 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12094 IEM_MC_LOCAL(uint16_t, u16Fsw);
12095 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12096 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
12097 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12098 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12099 } IEM_MC_ELSE() {
12100 IEM_MC_IF_FCW_IM() {
12101 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
12102 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12103 } IEM_MC_ELSE() {
12104 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12105 } IEM_MC_ENDIF();
12106 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12107 } IEM_MC_ENDIF();
12108 IEM_MC_ADVANCE_RIP_AND_FINISH();
12109
12110 IEM_MC_END();
12111}
12112
12113
12114
12115
12116/** Opcode 0xdd !11/0. */
12117FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
12118{
12119 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
12120 IEM_MC_BEGIN(3, 3, 0, 0);
12121 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12122 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12123
12124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12125 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12126 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12127 IEM_MC_PREPARE_FPU_USAGE();
12128
12129 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12130 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
12131 IEM_MC_MEM_MAP_R64_WO(pr64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12132
12133 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12134 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12135 IEM_MC_LOCAL(uint16_t, u16Fsw);
12136 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12137 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
12138 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12139 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12140 } IEM_MC_ELSE() {
12141 IEM_MC_IF_FCW_IM() {
12142 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
12143 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12144 } IEM_MC_ELSE() {
12145 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12146 } IEM_MC_ENDIF();
12147 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12148 } IEM_MC_ENDIF();
12149 IEM_MC_ADVANCE_RIP_AND_FINISH();
12150
12151 IEM_MC_END();
12152}
12153
12154
12155/** Opcode 0xdd !11/0. */
12156FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
12157{
12158 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
12159 IEM_MC_BEGIN(3, 0, 0, 0);
12160 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
12161 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12162
12163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12164 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12165 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12166
12167 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
12168 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
12169 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
12170 IEM_MC_END();
12171}
12172
12173
12174/** Opcode 0xdd !11/0. */
12175FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
12176{
12177 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
12178 IEM_MC_BEGIN(3, 0, 0, 0);
12179 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
12180 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12181
12182 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12183 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12184 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); /* Note! Implicit fninit after the save, do not use FOR_READ here! */
12185
12186 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
12187 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
12188 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
12189 IEM_MC_END();
12190}
12191
12192/** Opcode 0xdd !11/0. */
12193FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
12194{
12195 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
12196
12197 IEM_MC_BEGIN(0, 2, 0, 0);
12198 IEM_MC_LOCAL(uint16_t, u16Tmp);
12199 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12200
12201 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12203 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12204
12205 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
12206 IEM_MC_FETCH_FSW(u16Tmp);
12207 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
12208 IEM_MC_ADVANCE_RIP_AND_FINISH();
12209
12210/** @todo Debug / drop a hint to the verifier that things may differ
12211 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
12212 * NT4SP1. (X86_FSW_PE) */
12213 IEM_MC_END();
12214}
12215
12216
12217/** Opcode 0xdd 11/0. */
12218FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
12219{
12220 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
12221 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
12222 unmodified. */
12223 IEM_MC_BEGIN(0, 0, 0, 0);
12224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12225
12226 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12227 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12228
12229 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12230 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
12231 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12232
12233 IEM_MC_ADVANCE_RIP_AND_FINISH();
12234 IEM_MC_END();
12235}
12236
12237
12238/** Opcode 0xdd 11/1. */
12239FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
12240{
12241 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
12242 IEM_MC_BEGIN(0, 2, 0, 0);
12243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12244 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
12245 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12246 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12247 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12248
12249 IEM_MC_PREPARE_FPU_USAGE();
12250 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12251 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
12252 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
12253 } IEM_MC_ELSE() {
12254 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
12255 } IEM_MC_ENDIF();
12256
12257 IEM_MC_ADVANCE_RIP_AND_FINISH();
12258 IEM_MC_END();
12259}
12260
12261
12262/** Opcode 0xdd 11/3. */
12263FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
12264{
12265 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
12266 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
12267}
12268
12269
12270/** Opcode 0xdd 11/4. */
12271FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
12272{
12273 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
12274 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
12275}
12276
12277
12278/**
12279 * @opcode 0xdd
12280 */
12281FNIEMOP_DEF(iemOp_EscF5)
12282{
12283 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12284 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
12285 if (IEM_IS_MODRM_REG_MODE(bRm))
12286 {
12287 switch (IEM_GET_MODRM_REG_8(bRm))
12288 {
12289 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
12290 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
12291 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
12292 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
12293 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
12294 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
12295 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
12296 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
12297 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12298 }
12299 }
12300 else
12301 {
12302 switch (IEM_GET_MODRM_REG_8(bRm))
12303 {
12304 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
12305 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
12306 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
12307 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
12308 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
12309 case 5: IEMOP_RAISE_INVALID_OPCODE_RET();
12310 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
12311 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
12312 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12313 }
12314 }
12315}
12316
12317
12318/** Opcode 0xde 11/0. */
12319FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
12320{
12321 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
12322 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
12323}
12324
12325
12326/** Opcode 0xde 11/0. */
12327FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
12328{
12329 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
12330 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
12331}
12332
12333
12334/** Opcode 0xde 0xd9. */
12335FNIEMOP_DEF(iemOp_fcompp)
12336{
12337 IEMOP_MNEMONIC(fcompp, "fcompp");
12338 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fcom_r80_by_r80);
12339}
12340
12341
12342/** Opcode 0xde 11/4. */
12343FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
12344{
12345 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
12346 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
12347}
12348
12349
12350/** Opcode 0xde 11/5. */
12351FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
12352{
12353 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
12354 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
12355}
12356
12357
12358/** Opcode 0xde 11/6. */
12359FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
12360{
12361 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
12362 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
12363}
12364
12365
12366/** Opcode 0xde 11/7. */
12367FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
12368{
12369 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
12370 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
12371}
12372
12373
12374/**
12375 * Common worker for FPU instructions working on ST0 and an m16i, and storing
12376 * the result in ST0.
12377 *
12378 * @param bRm Mod R/M byte.
12379 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12380 */
12381FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
12382{
12383 IEM_MC_BEGIN(3, 3, 0, 0);
12384 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12385 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12386 IEM_MC_LOCAL(int16_t, i16Val2);
12387 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12388 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12389 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
12390
12391 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12393
12394 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12395 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12396 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12397
12398 IEM_MC_PREPARE_FPU_USAGE();
12399 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12400 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
12401 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
12402 } IEM_MC_ELSE() {
12403 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
12404 } IEM_MC_ENDIF();
12405 IEM_MC_ADVANCE_RIP_AND_FINISH();
12406
12407 IEM_MC_END();
12408}
12409
12410
12411/** Opcode 0xde !11/0. */
12412FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
12413{
12414 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
12415 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
12416}
12417
12418
12419/** Opcode 0xde !11/1. */
12420FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
12421{
12422 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
12423 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
12424}
12425
12426
12427/** Opcode 0xde !11/2. */
12428FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
12429{
12430 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
12431
12432 IEM_MC_BEGIN(3, 3, 0, 0);
12433 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12434 IEM_MC_LOCAL(uint16_t, u16Fsw);
12435 IEM_MC_LOCAL(int16_t, i16Val2);
12436 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12437 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12438 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
12439
12440 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12442
12443 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12444 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12445 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12446
12447 IEM_MC_PREPARE_FPU_USAGE();
12448 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12449 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
12450 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12451 } IEM_MC_ELSE() {
12452 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12453 } IEM_MC_ENDIF();
12454 IEM_MC_ADVANCE_RIP_AND_FINISH();
12455
12456 IEM_MC_END();
12457}
12458
12459
12460/** Opcode 0xde !11/3. */
12461FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
12462{
12463 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
12464
12465 IEM_MC_BEGIN(3, 3, 0, 0);
12466 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12467 IEM_MC_LOCAL(uint16_t, u16Fsw);
12468 IEM_MC_LOCAL(int16_t, i16Val2);
12469 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12470 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12471 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
12472
12473 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12475
12476 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12477 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12478 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12479
12480 IEM_MC_PREPARE_FPU_USAGE();
12481 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12482 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
12483 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12484 } IEM_MC_ELSE() {
12485 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12486 } IEM_MC_ENDIF();
12487 IEM_MC_ADVANCE_RIP_AND_FINISH();
12488
12489 IEM_MC_END();
12490}
12491
12492
12493/** Opcode 0xde !11/4. */
12494FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
12495{
12496 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
12497 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
12498}
12499
12500
12501/** Opcode 0xde !11/5. */
12502FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
12503{
12504 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
12505 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
12506}
12507
12508
12509/** Opcode 0xde !11/6. */
12510FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
12511{
12512 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
12513 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
12514}
12515
12516
12517/** Opcode 0xde !11/7. */
12518FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
12519{
12520 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
12521 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
12522}
12523
12524
12525/**
12526 * @opcode 0xde
12527 */
12528FNIEMOP_DEF(iemOp_EscF6)
12529{
12530 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12531 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
12532 if (IEM_IS_MODRM_REG_MODE(bRm))
12533 {
12534 switch (IEM_GET_MODRM_REG_8(bRm))
12535 {
12536 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
12537 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
12538 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
12539 case 3: if (bRm == 0xd9)
12540 return FNIEMOP_CALL(iemOp_fcompp);
12541 IEMOP_RAISE_INVALID_OPCODE_RET();
12542 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
12543 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
12544 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
12545 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
12546 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12547 }
12548 }
12549 else
12550 {
12551 switch (IEM_GET_MODRM_REG_8(bRm))
12552 {
12553 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
12554 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
12555 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
12556 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
12557 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
12558 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
12559 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
12560 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
12561 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12562 }
12563 }
12564}
12565
12566
12567/** Opcode 0xdf 11/0.
12568 * Undocument instruction, assumed to work like ffree + fincstp. */
12569FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
12570{
12571 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
12572 IEM_MC_BEGIN(0, 0, 0, 0);
12573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12574
12575 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12576 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12577
12578 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12579 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
12580 IEM_MC_FPU_STACK_INC_TOP();
12581 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12582
12583 IEM_MC_ADVANCE_RIP_AND_FINISH();
12584 IEM_MC_END();
12585}
12586
12587
12588/** Opcode 0xdf 0xe0. */
12589FNIEMOP_DEF(iemOp_fnstsw_ax)
12590{
12591 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
12592 IEM_MC_BEGIN(0, 1, 0, 0);
12593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12594 IEM_MC_LOCAL(uint16_t, u16Tmp);
12595 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12596 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
12597 IEM_MC_FETCH_FSW(u16Tmp);
12598 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
12599 IEM_MC_ADVANCE_RIP_AND_FINISH();
12600 IEM_MC_END();
12601}
12602
12603
12604/** Opcode 0xdf 11/5. */
12605FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
12606{
12607 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
12608 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
12609 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
12610 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
12611}
12612
12613
12614/** Opcode 0xdf 11/6. */
12615FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
12616{
12617 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
12618 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
12619 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
12620 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
12621}
12622
12623
12624/** Opcode 0xdf !11/0. */
12625FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
12626{
12627 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
12628
12629 IEM_MC_BEGIN(2, 3, 0, 0);
12630 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12631 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12632 IEM_MC_LOCAL(int16_t, i16Val);
12633 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12634 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
12635
12636 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12638
12639 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12640 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12641 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12642
12643 IEM_MC_PREPARE_FPU_USAGE();
12644 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12645 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i16, pFpuRes, pi16Val);
12646 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12647 } IEM_MC_ELSE() {
12648 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12649 } IEM_MC_ENDIF();
12650 IEM_MC_ADVANCE_RIP_AND_FINISH();
12651
12652 IEM_MC_END();
12653}
12654
12655
12656/** Opcode 0xdf !11/1. */
12657FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
12658{
12659 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
12660 IEM_MC_BEGIN(3, 3, 0, 0);
12661 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12662 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12663
12664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12665 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12666 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12667 IEM_MC_PREPARE_FPU_USAGE();
12668
12669 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12670 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12671 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12672
12673 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12674 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12675 IEM_MC_LOCAL(uint16_t, u16Fsw);
12676 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12677 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12678 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12679 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12680 } IEM_MC_ELSE() {
12681 IEM_MC_IF_FCW_IM() {
12682 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12683 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12684 } IEM_MC_ELSE() {
12685 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12686 } IEM_MC_ENDIF();
12687 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12688 } IEM_MC_ENDIF();
12689 IEM_MC_ADVANCE_RIP_AND_FINISH();
12690
12691 IEM_MC_END();
12692}
12693
12694
12695/** Opcode 0xdf !11/2. */
12696FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
12697{
12698 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
12699 IEM_MC_BEGIN(3, 3, 0, 0);
12700 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12701 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12702
12703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12704 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12705 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12706 IEM_MC_PREPARE_FPU_USAGE();
12707
12708 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12709 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12710 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12711
12712 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12713 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12714 IEM_MC_LOCAL(uint16_t, u16Fsw);
12715 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12716 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12717 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12718 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12719 } IEM_MC_ELSE() {
12720 IEM_MC_IF_FCW_IM() {
12721 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12722 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12723 } IEM_MC_ELSE() {
12724 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12725 } IEM_MC_ENDIF();
12726 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12727 } IEM_MC_ENDIF();
12728 IEM_MC_ADVANCE_RIP_AND_FINISH();
12729
12730 IEM_MC_END();
12731}
12732
12733
12734/** Opcode 0xdf !11/3. */
12735FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
12736{
12737 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
12738 IEM_MC_BEGIN(3, 3, 0, 0);
12739 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12740 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12741
12742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12743 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12744 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12745 IEM_MC_PREPARE_FPU_USAGE();
12746
12747 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12748 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12749 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12750
12751 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12752 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12753 IEM_MC_LOCAL(uint16_t, u16Fsw);
12754 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12755 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12756 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12757 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12758 } IEM_MC_ELSE() {
12759 IEM_MC_IF_FCW_IM() {
12760 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12761 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12762 } IEM_MC_ELSE() {
12763 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12764 } IEM_MC_ENDIF();
12765 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12766 } IEM_MC_ENDIF();
12767 IEM_MC_ADVANCE_RIP_AND_FINISH();
12768
12769 IEM_MC_END();
12770}
12771
12772
12773/** Opcode 0xdf !11/4. */
12774FNIEMOP_DEF_1(iemOp_fbld_m80d, uint8_t, bRm)
12775{
12776 IEMOP_MNEMONIC(fbld_m80d, "fbld m80d");
12777
12778 IEM_MC_BEGIN(2, 3, 0, 0);
12779 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12780 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12781 IEM_MC_LOCAL(RTPBCD80U, d80Val);
12782 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12783 IEM_MC_ARG_LOCAL_REF(PCRTPBCD80U, pd80Val, d80Val, 1);
12784
12785 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12787
12788 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12789 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12790 IEM_MC_FETCH_MEM_D80(d80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12791
12792 IEM_MC_PREPARE_FPU_USAGE();
12793 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12794 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_d80, pFpuRes, pd80Val);
12795 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12796 } IEM_MC_ELSE() {
12797 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12798 } IEM_MC_ENDIF();
12799 IEM_MC_ADVANCE_RIP_AND_FINISH();
12800
12801 IEM_MC_END();
12802}
12803
12804
12805/** Opcode 0xdf !11/5. */
12806FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
12807{
12808 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
12809
12810 IEM_MC_BEGIN(2, 3, 0, 0);
12811 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12812 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12813 IEM_MC_LOCAL(int64_t, i64Val);
12814 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12815 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
12816
12817 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12819
12820 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12821 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12822 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12823
12824 IEM_MC_PREPARE_FPU_USAGE();
12825 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12826 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i64, pFpuRes, pi64Val);
12827 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12828 } IEM_MC_ELSE() {
12829 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12830 } IEM_MC_ENDIF();
12831 IEM_MC_ADVANCE_RIP_AND_FINISH();
12832
12833 IEM_MC_END();
12834}
12835
12836
12837/** Opcode 0xdf !11/6. */
12838FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
12839{
12840 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
12841 IEM_MC_BEGIN(3, 3, 0, 0);
12842 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12843 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12844
12845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12846 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12847 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12848 IEM_MC_PREPARE_FPU_USAGE();
12849
12850 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12851 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
12852 IEM_MC_MEM_MAP_D80_WO(pd80Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12853
12854 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12855 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12856 IEM_MC_LOCAL(uint16_t, u16Fsw);
12857 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12858 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
12859 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12860 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12861 } IEM_MC_ELSE() {
12862 IEM_MC_IF_FCW_IM() {
12863 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
12864 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12865 } IEM_MC_ELSE() {
12866 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12867 } IEM_MC_ENDIF();
12868 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12869 } IEM_MC_ENDIF();
12870 IEM_MC_ADVANCE_RIP_AND_FINISH();
12871
12872 IEM_MC_END();
12873}
12874
12875
12876/** Opcode 0xdf !11/7. */
12877FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
12878{
12879 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
12880 IEM_MC_BEGIN(3, 3, 0, 0);
12881 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12882 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12883
12884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12885 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12886 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12887 IEM_MC_PREPARE_FPU_USAGE();
12888
12889 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12890 IEM_MC_ARG(int64_t *, pi64Dst, 1);
12891 IEM_MC_MEM_MAP_I64_WO(pi64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12892
12893 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12894 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12895 IEM_MC_LOCAL(uint16_t, u16Fsw);
12896 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12897 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
12898 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12899 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12900 } IEM_MC_ELSE() {
12901 IEM_MC_IF_FCW_IM() {
12902 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
12903 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12904 } IEM_MC_ELSE() {
12905 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12906 } IEM_MC_ENDIF();
12907 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12908 } IEM_MC_ENDIF();
12909 IEM_MC_ADVANCE_RIP_AND_FINISH();
12910
12911 IEM_MC_END();
12912}
12913
12914
12915/**
12916 * @opcode 0xdf
12917 */
12918FNIEMOP_DEF(iemOp_EscF7)
12919{
12920 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12921 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdf & 0x7);
12922 if (IEM_IS_MODRM_REG_MODE(bRm))
12923 {
12924 switch (IEM_GET_MODRM_REG_8(bRm))
12925 {
12926 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
12927 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
12928 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
12929 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
12930 case 4: if (bRm == 0xe0)
12931 return FNIEMOP_CALL(iemOp_fnstsw_ax);
12932 IEMOP_RAISE_INVALID_OPCODE_RET();
12933 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
12934 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
12935 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
12936 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12937 }
12938 }
12939 else
12940 {
12941 switch (IEM_GET_MODRM_REG_8(bRm))
12942 {
12943 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
12944 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
12945 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
12946 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
12947 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
12948 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
12949 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
12950 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
12951 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12952 }
12953 }
12954}
12955
12956
12957/**
12958 * @opcode 0xe0
12959 * @opfltest zf
12960 */
12961FNIEMOP_DEF(iemOp_loopne_Jb)
12962{
12963 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
12964 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12965 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12966
12967 switch (pVCpu->iem.s.enmEffAddrMode)
12968 {
12969 case IEMMODE_16BIT:
12970 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12972 IEM_MC_IF_CX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12973 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
12974 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12975 } IEM_MC_ELSE() {
12976 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
12977 IEM_MC_ADVANCE_RIP_AND_FINISH();
12978 } IEM_MC_ENDIF();
12979 IEM_MC_END();
12980 break;
12981
12982 case IEMMODE_32BIT:
12983 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12985 IEM_MC_IF_ECX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12986 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
12987 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12988 } IEM_MC_ELSE() {
12989 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
12990 IEM_MC_ADVANCE_RIP_AND_FINISH();
12991 } IEM_MC_ENDIF();
12992 IEM_MC_END();
12993 break;
12994
12995 case IEMMODE_64BIT:
12996 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12998 IEM_MC_IF_RCX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12999 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13000 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13001 } IEM_MC_ELSE() {
13002 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13003 IEM_MC_ADVANCE_RIP_AND_FINISH();
13004 } IEM_MC_ENDIF();
13005 IEM_MC_END();
13006 break;
13007
13008 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13009 }
13010}
13011
13012
13013/**
13014 * @opcode 0xe1
13015 * @opfltest zf
13016 */
13017FNIEMOP_DEF(iemOp_loope_Jb)
13018{
13019 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
13020 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13021 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13022
13023 switch (pVCpu->iem.s.enmEffAddrMode)
13024 {
13025 case IEMMODE_16BIT:
13026 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13028 IEM_MC_IF_CX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
13029 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13030 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13031 } IEM_MC_ELSE() {
13032 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13033 IEM_MC_ADVANCE_RIP_AND_FINISH();
13034 } IEM_MC_ENDIF();
13035 IEM_MC_END();
13036 break;
13037
13038 case IEMMODE_32BIT:
13039 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13041 IEM_MC_IF_ECX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
13042 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13043 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13044 } IEM_MC_ELSE() {
13045 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13046 IEM_MC_ADVANCE_RIP_AND_FINISH();
13047 } IEM_MC_ENDIF();
13048 IEM_MC_END();
13049 break;
13050
13051 case IEMMODE_64BIT:
13052 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13054 IEM_MC_IF_RCX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
13055 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13056 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13057 } IEM_MC_ELSE() {
13058 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13059 IEM_MC_ADVANCE_RIP_AND_FINISH();
13060 } IEM_MC_ENDIF();
13061 IEM_MC_END();
13062 break;
13063
13064 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13065 }
13066}
13067
13068
13069/**
13070 * @opcode 0xe2
13071 */
13072FNIEMOP_DEF(iemOp_loop_Jb)
13073{
13074 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
13075 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13076 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13077
13078 /** @todo Check out the \#GP case if EIP < CS.Base or EIP > CS.Limit when
13079 * using the 32-bit operand size override. How can that be restarted? See
13080 * weird pseudo code in intel manual. */
13081
13082 /* NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
13083 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
13084 * the loop causes guest crashes, but when logging it's nice to skip a few million
13085 * lines of useless output. */
13086#if defined(LOG_ENABLED)
13087 if ((LogIs3Enabled() || LogIs4Enabled()) && -(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm)
13088 switch (pVCpu->iem.s.enmEffAddrMode)
13089 {
13090 case IEMMODE_16BIT:
13091 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13092 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13093 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
13094 IEM_MC_ADVANCE_RIP_AND_FINISH();
13095 IEM_MC_END();
13096 break;
13097
13098 case IEMMODE_32BIT:
13099 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13101 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
13102 IEM_MC_ADVANCE_RIP_AND_FINISH();
13103 IEM_MC_END();
13104 break;
13105
13106 case IEMMODE_64BIT:
13107 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13109 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
13110 IEM_MC_ADVANCE_RIP_AND_FINISH();
13111 IEM_MC_END();
13112 break;
13113
13114 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13115 }
13116#endif
13117
13118 switch (pVCpu->iem.s.enmEffAddrMode)
13119 {
13120 case IEMMODE_16BIT:
13121 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13123 IEM_MC_IF_CX_IS_NOT_ONE() {
13124 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13125 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13126 } IEM_MC_ELSE() {
13127 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
13128 IEM_MC_ADVANCE_RIP_AND_FINISH();
13129 } IEM_MC_ENDIF();
13130 IEM_MC_END();
13131 break;
13132
13133 case IEMMODE_32BIT:
13134 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13136 IEM_MC_IF_ECX_IS_NOT_ONE() {
13137 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13138 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13139 } IEM_MC_ELSE() {
13140 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
13141 IEM_MC_ADVANCE_RIP_AND_FINISH();
13142 } IEM_MC_ENDIF();
13143 IEM_MC_END();
13144 break;
13145
13146 case IEMMODE_64BIT:
13147 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13149 IEM_MC_IF_RCX_IS_NOT_ONE() {
13150 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13151 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13152 } IEM_MC_ELSE() {
13153 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
13154 IEM_MC_ADVANCE_RIP_AND_FINISH();
13155 } IEM_MC_ENDIF();
13156 IEM_MC_END();
13157 break;
13158
13159 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13160 }
13161}
13162
13163
13164/**
13165 * @opcode 0xe3
13166 */
13167FNIEMOP_DEF(iemOp_jecxz_Jb)
13168{
13169 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
13170 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13171 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13172
13173 switch (pVCpu->iem.s.enmEffAddrMode)
13174 {
13175 case IEMMODE_16BIT:
13176 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13178 IEM_MC_IF_CX_IS_NZ() {
13179 IEM_MC_ADVANCE_RIP_AND_FINISH();
13180 } IEM_MC_ELSE() {
13181 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13182 } IEM_MC_ENDIF();
13183 IEM_MC_END();
13184 break;
13185
13186 case IEMMODE_32BIT:
13187 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13189 IEM_MC_IF_ECX_IS_NZ() {
13190 IEM_MC_ADVANCE_RIP_AND_FINISH();
13191 } IEM_MC_ELSE() {
13192 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13193 } IEM_MC_ENDIF();
13194 IEM_MC_END();
13195 break;
13196
13197 case IEMMODE_64BIT:
13198 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13200 IEM_MC_IF_RCX_IS_NZ() {
13201 IEM_MC_ADVANCE_RIP_AND_FINISH();
13202 } IEM_MC_ELSE() {
13203 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13204 } IEM_MC_ENDIF();
13205 IEM_MC_END();
13206 break;
13207
13208 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13209 }
13210}
13211
13212
13213/**
13214 * @opcode 0xe4
13215 * @opfltest iopl
13216 */
13217FNIEMOP_DEF(iemOp_in_AL_Ib)
13218{
13219 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
13220 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13222 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13223 iemCImpl_in, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13224}
13225
13226
13227/**
13228 * @opcode 0xe5
13229 * @opfltest iopl
13230 */
13231FNIEMOP_DEF(iemOp_in_eAX_Ib)
13232{
13233 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
13234 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13236 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13237 iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
13238 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13239}
13240
13241
13242/**
13243 * @opcode 0xe6
13244 * @opfltest iopl
13245 */
13246FNIEMOP_DEF(iemOp_out_Ib_AL)
13247{
13248 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
13249 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13251 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
13252 iemCImpl_out, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13253}
13254
13255
13256/**
13257 * @opcode 0xe7
13258 * @opfltest iopl
13259 */
13260FNIEMOP_DEF(iemOp_out_Ib_eAX)
13261{
13262 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
13263 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13265 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
13266 iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
13267 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13268}
13269
13270
13271/**
13272 * @opcode 0xe8
13273 */
13274FNIEMOP_DEF(iemOp_call_Jv)
13275{
13276 IEMOP_MNEMONIC(call_Jv, "call Jv");
13277 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13278 switch (pVCpu->iem.s.enmEffOpSize)
13279 {
13280 case IEMMODE_16BIT:
13281 {
13282 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13283 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
13284 iemCImpl_call_rel_16, (int16_t)u16Imm);
13285 }
13286
13287 case IEMMODE_32BIT:
13288 {
13289 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13290 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
13291 iemCImpl_call_rel_32, (int32_t)u32Imm);
13292 }
13293
13294 case IEMMODE_64BIT:
13295 {
13296 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13297 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
13298 iemCImpl_call_rel_64, u64Imm);
13299 }
13300
13301 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13302 }
13303}
13304
13305
13306/**
13307 * @opcode 0xe9
13308 */
13309FNIEMOP_DEF(iemOp_jmp_Jv)
13310{
13311 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
13312 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13313 switch (pVCpu->iem.s.enmEffOpSize)
13314 {
13315 case IEMMODE_16BIT:
13316 IEM_MC_BEGIN(0, 0, 0, 0);
13317 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
13318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13319 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
13320 IEM_MC_END();
13321 break;
13322
13323 case IEMMODE_64BIT:
13324 case IEMMODE_32BIT:
13325 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13326 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
13327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13328 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
13329 IEM_MC_END();
13330 break;
13331
13332 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13333 }
13334}
13335
13336
13337/**
13338 * @opcode 0xea
13339 */
13340FNIEMOP_DEF(iemOp_jmp_Ap)
13341{
13342 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
13343 IEMOP_HLP_NO_64BIT();
13344
13345 /* Decode the far pointer address and pass it on to the far call C implementation. */
13346 uint32_t off32Seg;
13347 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
13348 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
13349 else
13350 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
13351 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
13352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13353 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
13354 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, UINT64_MAX,
13355 iemCImpl_FarJmp, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
13356 /** @todo make task-switches, ring-switches, ++ return non-zero status */
13357}
13358
13359
13360/**
13361 * @opcode 0xeb
13362 */
13363FNIEMOP_DEF(iemOp_jmp_Jb)
13364{
13365 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
13366 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13367 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13368
13369 IEM_MC_BEGIN(0, 0, 0, 0);
13370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13371 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13372 IEM_MC_END();
13373}
13374
13375
13376/**
13377 * @opcode 0xec
13378 * @opfltest iopl
13379 */
13380FNIEMOP_DEF(iemOp_in_AL_DX)
13381{
13382 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
13383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13384 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
13385 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13386 iemCImpl_in_eAX_DX, 1, pVCpu->iem.s.enmEffAddrMode);
13387}
13388
13389
13390/**
13391 * @opcode 0xed
13392 * @opfltest iopl
13393 */
13394FNIEMOP_DEF(iemOp_in_eAX_DX)
13395{
13396 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
13397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13398 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
13399 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13400 iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
13401 pVCpu->iem.s.enmEffAddrMode);
13402}
13403
13404
13405/**
13406 * @opcode 0xee
13407 * @opfltest iopl
13408 */
13409FNIEMOP_DEF(iemOp_out_DX_AL)
13410{
13411 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
13412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13413 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
13414 iemCImpl_out_DX_eAX, 1, pVCpu->iem.s.enmEffAddrMode);
13415}
13416
13417
13418/**
13419 * @opcode 0xef
13420 * @opfltest iopl
13421 */
13422FNIEMOP_DEF(iemOp_out_DX_eAX)
13423{
13424 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
13425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13426 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
13427 iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
13428 pVCpu->iem.s.enmEffAddrMode);
13429}
13430
13431
13432/**
13433 * @opcode 0xf0
13434 */
13435FNIEMOP_DEF(iemOp_lock)
13436{
13437 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
13438 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
13439
13440 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
13441 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
13442}
13443
13444
13445/**
13446 * @opcode 0xf1
13447 */
13448FNIEMOP_DEF(iemOp_int1)
13449{
13450 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
13451 /** @todo Does not generate \#UD on 286, or so they say... Was allegedly a
13452 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
13453 * LOADALL memo. Needs some testing. */
13454 IEMOP_HLP_MIN_386();
13455 /** @todo testcase! */
13456 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
13457 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
13458 iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
13459}
13460
13461
13462/**
13463 * @opcode 0xf2
13464 */
13465FNIEMOP_DEF(iemOp_repne)
13466{
13467 /* This overrides any previous REPE prefix. */
13468 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
13469 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
13470 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
13471
13472 /* For the 4 entry opcode tables, REPNZ overrides any previous
13473 REPZ and operand size prefixes. */
13474 pVCpu->iem.s.idxPrefix = 3;
13475
13476 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
13477 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
13478}
13479
13480
13481/**
13482 * @opcode 0xf3
13483 */
13484FNIEMOP_DEF(iemOp_repe)
13485{
13486 /* This overrides any previous REPNE prefix. */
13487 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
13488 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
13489 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
13490
13491 /* For the 4 entry opcode tables, REPNZ overrides any previous
13492 REPNZ and operand size prefixes. */
13493 pVCpu->iem.s.idxPrefix = 2;
13494
13495 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
13496 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
13497}
13498
13499
13500/**
13501 * @opcode 0xf4
13502 */
13503FNIEMOP_DEF(iemOp_hlt)
13504{
13505 IEMOP_MNEMONIC(hlt, "hlt");
13506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13507 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, 0, iemCImpl_hlt);
13508}
13509
13510
13511/**
13512 * @opcode 0xf5
13513 * @opflmodify cf
13514 */
13515FNIEMOP_DEF(iemOp_cmc)
13516{
13517 IEMOP_MNEMONIC(cmc, "cmc");
13518 IEM_MC_BEGIN(0, 0, 0, 0);
13519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13520 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
13521 IEM_MC_ADVANCE_RIP_AND_FINISH();
13522 IEM_MC_END();
13523}
13524
13525
13526/**
13527 * Body for of 'inc/dec/not/neg Eb'.
13528 */
13529#define IEMOP_BODY_UNARY_Eb(a_bRm, a_fnNormalU8, a_fnLockedU8) \
13530 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
13531 { \
13532 /* register access */ \
13533 IEM_MC_BEGIN(2, 0, 0, 0); \
13534 IEMOP_HLP_DONE_DECODING(); \
13535 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
13536 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13537 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
13538 IEM_MC_REF_EFLAGS(pEFlags); \
13539 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
13540 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13541 IEM_MC_END(); \
13542 } \
13543 else \
13544 { \
13545 /* memory access. */ \
13546 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
13547 { \
13548 IEM_MC_BEGIN(2, 2, 0, 0); \
13549 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
13550 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13551 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13552 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13553 \
13554 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
13555 IEMOP_HLP_DONE_DECODING(); \
13556 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13557 IEM_MC_FETCH_EFLAGS(EFlags); \
13558 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
13559 \
13560 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13561 IEM_MC_COMMIT_EFLAGS(EFlags); \
13562 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13563 IEM_MC_END(); \
13564 } \
13565 else \
13566 { \
13567 IEM_MC_BEGIN(2, 2, 0, 0); \
13568 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
13569 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13571 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13572 \
13573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
13574 IEMOP_HLP_DONE_DECODING(); \
13575 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13576 IEM_MC_FETCH_EFLAGS(EFlags); \
13577 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU8, pu8Dst, pEFlags); \
13578 \
13579 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
13580 IEM_MC_COMMIT_EFLAGS(EFlags); \
13581 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13582 IEM_MC_END(); \
13583 } \
13584 } \
13585 (void)0
13586
13587
13588/**
13589 * Body for 'inc/dec/not/neg Ev' (groups 3 and 5).
13590 */
13591#define IEMOP_BODY_UNARY_Ev(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
13592 if (IEM_IS_MODRM_REG_MODE(bRm)) \
13593 { \
13594 /* \
13595 * Register target \
13596 */ \
13597 switch (pVCpu->iem.s.enmEffOpSize) \
13598 { \
13599 case IEMMODE_16BIT: \
13600 IEM_MC_BEGIN(2, 0, 0, 0); \
13601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13602 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13603 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13604 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13605 IEM_MC_REF_EFLAGS(pEFlags); \
13606 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
13607 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13608 IEM_MC_END(); \
13609 break; \
13610 \
13611 case IEMMODE_32BIT: \
13612 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0); \
13613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13614 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13615 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13616 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13617 IEM_MC_REF_EFLAGS(pEFlags); \
13618 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
13619 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
13620 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13621 IEM_MC_END(); \
13622 break; \
13623 \
13624 case IEMMODE_64BIT: \
13625 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0); \
13626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13627 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13628 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13629 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13630 IEM_MC_REF_EFLAGS(pEFlags); \
13631 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
13632 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13633 IEM_MC_END(); \
13634 break; \
13635 \
13636 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13637 } \
13638 } \
13639 else \
13640 { \
13641 /* \
13642 * Memory target. \
13643 */ \
13644 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
13645 { \
13646 switch (pVCpu->iem.s.enmEffOpSize) \
13647 { \
13648 case IEMMODE_16BIT: \
13649 IEM_MC_BEGIN(2, 3, 0, 0); \
13650 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13651 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13652 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13653 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13654 \
13655 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13656 IEMOP_HLP_DONE_DECODING(); \
13657 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13658 IEM_MC_FETCH_EFLAGS(EFlags); \
13659 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
13660 \
13661 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13662 IEM_MC_COMMIT_EFLAGS(EFlags); \
13663 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13664 IEM_MC_END(); \
13665 break; \
13666 \
13667 case IEMMODE_32BIT: \
13668 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386, 0); \
13669 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13670 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13671 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13672 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13673 \
13674 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13675 IEMOP_HLP_DONE_DECODING(); \
13676 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13677 IEM_MC_FETCH_EFLAGS(EFlags); \
13678 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
13679 \
13680 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13681 IEM_MC_COMMIT_EFLAGS(EFlags); \
13682 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13683 IEM_MC_END(); \
13684 break; \
13685 \
13686 case IEMMODE_64BIT: \
13687 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT, 0); \
13688 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13689 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13690 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13691 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13692 \
13693 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13694 IEMOP_HLP_DONE_DECODING(); \
13695 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13696 IEM_MC_FETCH_EFLAGS(EFlags); \
13697 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
13698 \
13699 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13700 IEM_MC_COMMIT_EFLAGS(EFlags); \
13701 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13702 IEM_MC_END(); \
13703 break; \
13704 \
13705 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13706 } \
13707 } \
13708 else \
13709 { \
13710 (void)0
13711
13712#define IEMOP_BODY_UNARY_Ev_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
13713 switch (pVCpu->iem.s.enmEffOpSize) \
13714 { \
13715 case IEMMODE_16BIT: \
13716 IEM_MC_BEGIN(2, 3, 0, 0); \
13717 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13718 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13719 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13720 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13721 \
13722 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13723 IEMOP_HLP_DONE_DECODING(); \
13724 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13725 IEM_MC_FETCH_EFLAGS(EFlags); \
13726 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU16, pu16Dst, pEFlags); \
13727 \
13728 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
13729 IEM_MC_COMMIT_EFLAGS(EFlags); \
13730 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13731 IEM_MC_END(); \
13732 break; \
13733 \
13734 case IEMMODE_32BIT: \
13735 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386, 0); \
13736 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13737 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13738 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13739 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13740 \
13741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13742 IEMOP_HLP_DONE_DECODING(); \
13743 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13744 IEM_MC_FETCH_EFLAGS(EFlags); \
13745 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU32, pu32Dst, pEFlags); \
13746 \
13747 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
13748 IEM_MC_COMMIT_EFLAGS(EFlags); \
13749 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13750 IEM_MC_END(); \
13751 break; \
13752 \
13753 case IEMMODE_64BIT: \
13754 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT, 0); \
13755 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13756 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13757 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13758 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13759 \
13760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13761 IEMOP_HLP_DONE_DECODING(); \
13762 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13763 IEM_MC_FETCH_EFLAGS(EFlags); \
13764 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU64, pu64Dst, pEFlags); \
13765 \
13766 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
13767 IEM_MC_COMMIT_EFLAGS(EFlags); \
13768 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13769 IEM_MC_END(); \
13770 break; \
13771 \
13772 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13773 } \
13774 } \
13775 } \
13776 (void)0
13777
13778
13779/**
13780 * @opmaps grp3_f6
13781 * @opcode /0
13782 * @opflclass logical
13783 * @todo also /1
13784 */
13785FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
13786{
13787 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
13788 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
13789
13790 if (IEM_IS_MODRM_REG_MODE(bRm))
13791 {
13792 /* register access */
13793 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13794 IEM_MC_BEGIN(3, 0, 0, 0);
13795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13796 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13797 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
13798 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13799 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13800 IEM_MC_REF_EFLAGS(pEFlags);
13801 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
13802 IEM_MC_ADVANCE_RIP_AND_FINISH();
13803 IEM_MC_END();
13804 }
13805 else
13806 {
13807 /* memory access. */
13808 IEM_MC_BEGIN(3, 3, 0, 0);
13809 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13810 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13811
13812 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13814
13815 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13816 IEM_MC_ARG(uint8_t const *, pu8Dst, 0);
13817 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13818
13819 IEM_MC_ARG_CONST(uint8_t, u8Src, u8Imm, 1);
13820 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13821 IEM_MC_FETCH_EFLAGS(EFlags);
13822 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
13823
13824 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
13825 IEM_MC_COMMIT_EFLAGS(EFlags);
13826 IEM_MC_ADVANCE_RIP_AND_FINISH();
13827 IEM_MC_END();
13828 }
13829}
13830
13831
13832/** Opcode 0xf6 /4, /5, /6 and /7. */
13833FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
13834{
13835 if (IEM_IS_MODRM_REG_MODE(bRm))
13836 {
13837 /* register access */
13838 IEM_MC_BEGIN(3, 1, 0, 0);
13839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13840 IEM_MC_ARG(uint16_t *, pu16AX, 0);
13841 IEM_MC_ARG(uint8_t, u8Value, 1);
13842 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13843 IEM_MC_LOCAL(int32_t, rc);
13844
13845 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
13846 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
13847 IEM_MC_REF_EFLAGS(pEFlags);
13848 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
13849 IEM_MC_IF_LOCAL_IS_Z(rc) {
13850 IEM_MC_ADVANCE_RIP_AND_FINISH();
13851 } IEM_MC_ELSE() {
13852 IEM_MC_RAISE_DIVIDE_ERROR();
13853 } IEM_MC_ENDIF();
13854
13855 IEM_MC_END();
13856 }
13857 else
13858 {
13859 /* memory access. */
13860 IEM_MC_BEGIN(3, 2, 0, 0);
13861 IEM_MC_ARG(uint16_t *, pu16AX, 0);
13862 IEM_MC_ARG(uint8_t, u8Value, 1);
13863 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13864 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13865 IEM_MC_LOCAL(int32_t, rc);
13866
13867 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13869 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13870 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
13871 IEM_MC_REF_EFLAGS(pEFlags);
13872 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
13873 IEM_MC_IF_LOCAL_IS_Z(rc) {
13874 IEM_MC_ADVANCE_RIP_AND_FINISH();
13875 } IEM_MC_ELSE() {
13876 IEM_MC_RAISE_DIVIDE_ERROR();
13877 } IEM_MC_ENDIF();
13878
13879 IEM_MC_END();
13880 }
13881}
13882
13883
13884/** Opcode 0xf7 /4, /5, /6 and /7. */
13885FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
13886{
13887 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13888
13889 if (IEM_IS_MODRM_REG_MODE(bRm))
13890 {
13891 /* register access */
13892 switch (pVCpu->iem.s.enmEffOpSize)
13893 {
13894 case IEMMODE_16BIT:
13895 IEM_MC_BEGIN(4, 1, 0, 0);
13896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13897 IEM_MC_ARG(uint16_t *, pu16AX, 0);
13898 IEM_MC_ARG(uint16_t *, pu16DX, 1);
13899 IEM_MC_ARG(uint16_t, u16Value, 2);
13900 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13901 IEM_MC_LOCAL(int32_t, rc);
13902
13903 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
13904 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
13905 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
13906 IEM_MC_REF_EFLAGS(pEFlags);
13907 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
13908 IEM_MC_IF_LOCAL_IS_Z(rc) {
13909 IEM_MC_ADVANCE_RIP_AND_FINISH();
13910 } IEM_MC_ELSE() {
13911 IEM_MC_RAISE_DIVIDE_ERROR();
13912 } IEM_MC_ENDIF();
13913
13914 IEM_MC_END();
13915 break;
13916
13917 case IEMMODE_32BIT:
13918 IEM_MC_BEGIN(4, 1, IEM_MC_F_MIN_386, 0);
13919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13920 IEM_MC_ARG(uint32_t *, pu32AX, 0);
13921 IEM_MC_ARG(uint32_t *, pu32DX, 1);
13922 IEM_MC_ARG(uint32_t, u32Value, 2);
13923 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13924 IEM_MC_LOCAL(int32_t, rc);
13925
13926 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
13927 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
13928 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
13929 IEM_MC_REF_EFLAGS(pEFlags);
13930 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
13931 IEM_MC_IF_LOCAL_IS_Z(rc) {
13932 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX);
13933 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX);
13934 IEM_MC_ADVANCE_RIP_AND_FINISH();
13935 } IEM_MC_ELSE() {
13936 IEM_MC_RAISE_DIVIDE_ERROR();
13937 } IEM_MC_ENDIF();
13938
13939 IEM_MC_END();
13940 break;
13941
13942 case IEMMODE_64BIT:
13943 IEM_MC_BEGIN(4, 1, IEM_MC_F_64BIT, 0);
13944 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13945 IEM_MC_ARG(uint64_t *, pu64AX, 0);
13946 IEM_MC_ARG(uint64_t *, pu64DX, 1);
13947 IEM_MC_ARG(uint64_t, u64Value, 2);
13948 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13949 IEM_MC_LOCAL(int32_t, rc);
13950
13951 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
13952 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
13953 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
13954 IEM_MC_REF_EFLAGS(pEFlags);
13955 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
13956 IEM_MC_IF_LOCAL_IS_Z(rc) {
13957 IEM_MC_ADVANCE_RIP_AND_FINISH();
13958 } IEM_MC_ELSE() {
13959 IEM_MC_RAISE_DIVIDE_ERROR();
13960 } IEM_MC_ENDIF();
13961
13962 IEM_MC_END();
13963 break;
13964
13965 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13966 }
13967 }
13968 else
13969 {
13970 /* memory access. */
13971 switch (pVCpu->iem.s.enmEffOpSize)
13972 {
13973 case IEMMODE_16BIT:
13974 IEM_MC_BEGIN(4, 2, 0, 0);
13975 IEM_MC_ARG(uint16_t *, pu16AX, 0);
13976 IEM_MC_ARG(uint16_t *, pu16DX, 1);
13977 IEM_MC_ARG(uint16_t, u16Value, 2);
13978 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13979 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13980 IEM_MC_LOCAL(int32_t, rc);
13981
13982 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13983 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13984 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13985 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
13986 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
13987 IEM_MC_REF_EFLAGS(pEFlags);
13988 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
13989 IEM_MC_IF_LOCAL_IS_Z(rc) {
13990 IEM_MC_ADVANCE_RIP_AND_FINISH();
13991 } IEM_MC_ELSE() {
13992 IEM_MC_RAISE_DIVIDE_ERROR();
13993 } IEM_MC_ENDIF();
13994
13995 IEM_MC_END();
13996 break;
13997
13998 case IEMMODE_32BIT:
13999 IEM_MC_BEGIN(4, 2, IEM_MC_F_MIN_386, 0);
14000 IEM_MC_ARG(uint32_t *, pu32AX, 0);
14001 IEM_MC_ARG(uint32_t *, pu32DX, 1);
14002 IEM_MC_ARG(uint32_t, u32Value, 2);
14003 IEM_MC_ARG(uint32_t *, pEFlags, 3);
14004 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14005 IEM_MC_LOCAL(int32_t, rc);
14006
14007 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14009 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14010 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
14011 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
14012 IEM_MC_REF_EFLAGS(pEFlags);
14013 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
14014 IEM_MC_IF_LOCAL_IS_Z(rc) {
14015 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX);
14016 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX);
14017 IEM_MC_ADVANCE_RIP_AND_FINISH();
14018 } IEM_MC_ELSE() {
14019 IEM_MC_RAISE_DIVIDE_ERROR();
14020 } IEM_MC_ENDIF();
14021
14022 IEM_MC_END();
14023 break;
14024
14025 case IEMMODE_64BIT:
14026 IEM_MC_BEGIN(4, 2, IEM_MC_F_64BIT, 0);
14027 IEM_MC_ARG(uint64_t *, pu64AX, 0);
14028 IEM_MC_ARG(uint64_t *, pu64DX, 1);
14029 IEM_MC_ARG(uint64_t, u64Value, 2);
14030 IEM_MC_ARG(uint32_t *, pEFlags, 3);
14031 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14032 IEM_MC_LOCAL(int32_t, rc);
14033
14034 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
14035 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14036 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14037 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
14038 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
14039 IEM_MC_REF_EFLAGS(pEFlags);
14040 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
14041 IEM_MC_IF_LOCAL_IS_Z(rc) {
14042 IEM_MC_ADVANCE_RIP_AND_FINISH();
14043 } IEM_MC_ELSE() {
14044 IEM_MC_RAISE_DIVIDE_ERROR();
14045 } IEM_MC_ENDIF();
14046
14047 IEM_MC_END();
14048 break;
14049
14050 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14051 }
14052 }
14053}
14054
14055
14056/**
14057 * @opmaps grp3_f6
14058 * @opcode /2
14059 * @opflclass unchanged
14060 */
14061FNIEMOP_DEF_1(iemOp_grp3_not_Eb, uint8_t, bRm)
14062{
14063/** @todo does not modify EFLAGS. */
14064 IEMOP_MNEMONIC(not_Eb, "not Eb");
14065 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_not_u8, iemAImpl_not_u8_locked);
14066}
14067
14068
14069/**
14070 * @opmaps grp3_f6
14071 * @opcode /3
14072 * @opflclass arithmetic
14073 */
14074FNIEMOP_DEF_1(iemOp_grp3_neg_Eb, uint8_t, bRm)
14075{
14076 IEMOP_MNEMONIC(net_Eb, "neg Eb");
14077 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_neg_u8, iemAImpl_neg_u8_locked);
14078}
14079
14080
14081/**
14082 * @opcode 0xf6
14083 */
14084FNIEMOP_DEF(iemOp_Grp3_Eb)
14085{
14086 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14087 switch (IEM_GET_MODRM_REG_8(bRm))
14088 {
14089 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
14090 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
14091 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Eb, bRm);
14092 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Eb, bRm);
14093 case 4:
14094 /**
14095 * @opdone
14096 * @opmaps grp3_f6
14097 * @opcode /4
14098 * @opflclass multiply
14099 */
14100 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
14101 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14102 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
14103 case 5:
14104 /**
14105 * @opdone
14106 * @opmaps grp3_f6
14107 * @opcode /5
14108 * @opflclass multiply
14109 */
14110 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
14111 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14112 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
14113 case 6:
14114 /**
14115 * @opdone
14116 * @opmaps grp3_f6
14117 * @opcode /6
14118 * @opflclass division
14119 */
14120 IEMOP_MNEMONIC(div_Eb, "div Eb");
14121 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14122 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
14123 case 7:
14124 /**
14125 * @opdone
14126 * @opmaps grp3_f6
14127 * @opcode /7
14128 * @opflclass division
14129 */
14130 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
14131 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14132 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
14133 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14134 }
14135}
14136
14137
14138/**
14139 * @opmaps grp3_f7
14140 * @opcode /0
14141 * @opflclass logical
14142 */
14143FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
14144{
14145 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
14146 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
14147
14148 if (IEM_IS_MODRM_REG_MODE(bRm))
14149 {
14150 /* register access */
14151 switch (pVCpu->iem.s.enmEffOpSize)
14152 {
14153 case IEMMODE_16BIT:
14154 IEM_MC_BEGIN(3, 0, 0, 0);
14155 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
14156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14157 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14158 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
14159 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14160 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
14161 IEM_MC_REF_EFLAGS(pEFlags);
14162 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
14163 IEM_MC_ADVANCE_RIP_AND_FINISH();
14164 IEM_MC_END();
14165 break;
14166
14167 case IEMMODE_32BIT:
14168 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
14169 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
14170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14171 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14172 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
14173 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14174 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
14175 IEM_MC_REF_EFLAGS(pEFlags);
14176 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
14177 /* No clearing the high dword here - test doesn't write back the result. */
14178 IEM_MC_ADVANCE_RIP_AND_FINISH();
14179 IEM_MC_END();
14180 break;
14181
14182 case IEMMODE_64BIT:
14183 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
14184 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
14185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14186 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14187 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
14188 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14189 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
14190 IEM_MC_REF_EFLAGS(pEFlags);
14191 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
14192 IEM_MC_ADVANCE_RIP_AND_FINISH();
14193 IEM_MC_END();
14194 break;
14195
14196 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14197 }
14198 }
14199 else
14200 {
14201 /* memory access. */
14202 switch (pVCpu->iem.s.enmEffOpSize)
14203 {
14204 case IEMMODE_16BIT:
14205 IEM_MC_BEGIN(3, 3, 0, 0);
14206 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14207 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
14208
14209 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
14210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14211
14212 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
14213 IEM_MC_ARG(uint16_t const *, pu16Dst, 0);
14214 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14215
14216 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
14217 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14218 IEM_MC_FETCH_EFLAGS(EFlags);
14219 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
14220
14221 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
14222 IEM_MC_COMMIT_EFLAGS(EFlags);
14223 IEM_MC_ADVANCE_RIP_AND_FINISH();
14224 IEM_MC_END();
14225 break;
14226
14227 case IEMMODE_32BIT:
14228 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
14229 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
14231
14232 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
14233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14234
14235 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
14236 IEM_MC_ARG(uint32_t const *, pu32Dst, 0);
14237 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14238
14239 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
14240 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14241 IEM_MC_FETCH_EFLAGS(EFlags);
14242 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
14243
14244 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
14245 IEM_MC_COMMIT_EFLAGS(EFlags);
14246 IEM_MC_ADVANCE_RIP_AND_FINISH();
14247 IEM_MC_END();
14248 break;
14249
14250 case IEMMODE_64BIT:
14251 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
14252 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14253 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
14254
14255 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
14256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14257
14258 IEM_MC_ARG(uint64_t const *, pu64Dst, 0);
14259 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
14260 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14261
14262 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1);
14263 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14264 IEM_MC_FETCH_EFLAGS(EFlags);
14265 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
14266
14267 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
14268 IEM_MC_COMMIT_EFLAGS(EFlags);
14269 IEM_MC_ADVANCE_RIP_AND_FINISH();
14270 IEM_MC_END();
14271 break;
14272
14273 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14274 }
14275 }
14276}
14277
14278
14279/**
14280 * @opmaps grp3_f7
14281 * @opcode /2
14282 * @opflclass unchanged
14283 */
14284FNIEMOP_DEF_1(iemOp_grp3_not_Ev, uint8_t, bRm)
14285{
14286/** @todo does not modify EFLAGS */
14287 IEMOP_MNEMONIC(not_Ev, "not Ev");
14288 IEMOP_BODY_UNARY_Ev( iemAImpl_not_u16, iemAImpl_not_u32, iemAImpl_not_u64);
14289 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_not_u16_locked, iemAImpl_not_u32_locked, iemAImpl_not_u64_locked);
14290}
14291
14292
14293/**
14294 * @opmaps grp3_f7
14295 * @opcode /3
14296 * @opflclass arithmetic
14297 */
14298FNIEMOP_DEF_1(iemOp_grp3_neg_Ev, uint8_t, bRm)
14299{
14300 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
14301 IEMOP_BODY_UNARY_Ev( iemAImpl_neg_u16, iemAImpl_neg_u32, iemAImpl_neg_u64);
14302 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_neg_u16_locked, iemAImpl_neg_u32_locked, iemAImpl_neg_u64_locked);
14303}
14304
14305
14306/**
14307 * @opcode 0xf7
14308 */
14309FNIEMOP_DEF(iemOp_Grp3_Ev)
14310{
14311 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14312 switch (IEM_GET_MODRM_REG_8(bRm))
14313 {
14314 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
14315 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
14316 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Ev, bRm);
14317 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Ev, bRm);
14318 case 4:
14319 /**
14320 * @opdone
14321 * @opmaps grp3_f7
14322 * @opcode /4
14323 * @opflclass multiply
14324 */
14325 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
14326 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14327 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
14328 case 5:
14329 /**
14330 * @opdone
14331 * @opmaps grp3_f7
14332 * @opcode /5
14333 * @opflclass multiply
14334 */
14335 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
14336 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14337 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
14338 case 6:
14339 /**
14340 * @opdone
14341 * @opmaps grp3_f7
14342 * @opcode /6
14343 * @opflclass division
14344 */
14345 IEMOP_MNEMONIC(div_Ev, "div Ev");
14346 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14347 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
14348 case 7:
14349 /**
14350 * @opdone
14351 * @opmaps grp3_f7
14352 * @opcode /7
14353 * @opflclass division
14354 */
14355 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
14356 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14357 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
14358 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14359 }
14360}
14361
14362
14363/**
14364 * @opcode 0xf8
14365 * @opflmodify cf
14366 * @opflclear cf
14367 */
14368FNIEMOP_DEF(iemOp_clc)
14369{
14370 IEMOP_MNEMONIC(clc, "clc");
14371 IEM_MC_BEGIN(0, 0, 0, 0);
14372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14373 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
14374 IEM_MC_ADVANCE_RIP_AND_FINISH();
14375 IEM_MC_END();
14376}
14377
14378
14379/**
14380 * @opcode 0xf9
14381 * @opflmodify cf
14382 * @opflset cf
14383 */
14384FNIEMOP_DEF(iemOp_stc)
14385{
14386 IEMOP_MNEMONIC(stc, "stc");
14387 IEM_MC_BEGIN(0, 0, 0, 0);
14388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14389 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
14390 IEM_MC_ADVANCE_RIP_AND_FINISH();
14391 IEM_MC_END();
14392}
14393
14394
14395/**
14396 * @opcode 0xfa
14397 * @opfltest iopl,vm
14398 * @opflmodify if,vif
14399 */
14400FNIEMOP_DEF(iemOp_cli)
14401{
14402 IEMOP_MNEMONIC(cli, "cli");
14403 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14404 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_CHECK_IRQ_BEFORE, 0, iemCImpl_cli);
14405}
14406
14407
14408/**
14409 * @opcode 0xfb
14410 * @opfltest iopl,vm
14411 * @opflmodify if,vif
14412 */
14413FNIEMOP_DEF(iemOp_sti)
14414{
14415 IEMOP_MNEMONIC(sti, "sti");
14416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14417 IEM_MC_DEFER_TO_CIMPL_0_RET( IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_AFTER
14418 | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_INHIBIT_SHADOW, 0, iemCImpl_sti);
14419}
14420
14421
14422/**
14423 * @opcode 0xfc
14424 * @opflmodify df
14425 * @opflclear df
14426 */
14427FNIEMOP_DEF(iemOp_cld)
14428{
14429 IEMOP_MNEMONIC(cld, "cld");
14430 IEM_MC_BEGIN(0, 0, 0, 0);
14431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14432 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
14433 IEM_MC_ADVANCE_RIP_AND_FINISH();
14434 IEM_MC_END();
14435}
14436
14437
14438/**
14439 * @opcode 0xfd
14440 * @opflmodify df
14441 * @opflset df
14442 */
14443FNIEMOP_DEF(iemOp_std)
14444{
14445 IEMOP_MNEMONIC(std, "std");
14446 IEM_MC_BEGIN(0, 0, 0, 0);
14447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14448 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
14449 IEM_MC_ADVANCE_RIP_AND_FINISH();
14450 IEM_MC_END();
14451}
14452
14453
14454/**
14455 * @opmaps grp4
14456 * @opcode /0
14457 * @opflclass incdec
14458 */
14459FNIEMOP_DEF_1(iemOp_Grp4_inc_Eb, uint8_t, bRm)
14460{
14461 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
14462 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_inc_u8, iemAImpl_inc_u8_locked);
14463}
14464
14465
14466/**
14467 * @opmaps grp4
14468 * @opcode /1
14469 * @opflclass incdec
14470 */
14471FNIEMOP_DEF_1(iemOp_Grp4_dec_Eb, uint8_t, bRm)
14472{
14473 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
14474 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_dec_u8, iemAImpl_dec_u8_locked);
14475}
14476
14477
14478/**
14479 * @opcode 0xfe
14480 */
14481FNIEMOP_DEF(iemOp_Grp4)
14482{
14483 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14484 switch (IEM_GET_MODRM_REG_8(bRm))
14485 {
14486 case 0: return FNIEMOP_CALL_1(iemOp_Grp4_inc_Eb, bRm);
14487 case 1: return FNIEMOP_CALL_1(iemOp_Grp4_dec_Eb, bRm);
14488 default:
14489 /** @todo is the eff-addr decoded? */
14490 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
14491 IEMOP_RAISE_INVALID_OPCODE_RET();
14492 }
14493}
14494
14495/**
14496 * @opmaps grp5
14497 * @opcode /0
14498 * @opflclass incdec
14499 */
14500FNIEMOP_DEF_1(iemOp_Grp5_inc_Ev, uint8_t, bRm)
14501{
14502 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
14503 IEMOP_BODY_UNARY_Ev( iemAImpl_inc_u16, iemAImpl_inc_u32, iemAImpl_inc_u64);
14504 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_inc_u16_locked, iemAImpl_inc_u32_locked, iemAImpl_inc_u64_locked);
14505}
14506
14507
14508/**
14509 * @opmaps grp5
14510 * @opcode /1
14511 * @opflclass incdec
14512 */
14513FNIEMOP_DEF_1(iemOp_Grp5_dec_Ev, uint8_t, bRm)
14514{
14515 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
14516 IEMOP_BODY_UNARY_Ev( iemAImpl_dec_u16, iemAImpl_dec_u32, iemAImpl_dec_u64);
14517 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_dec_u16_locked, iemAImpl_dec_u32_locked, iemAImpl_dec_u64_locked);
14518}
14519
14520
14521/**
14522 * Opcode 0xff /2.
14523 * @param bRm The RM byte.
14524 */
14525FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
14526{
14527 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
14528 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
14529
14530 if (IEM_IS_MODRM_REG_MODE(bRm))
14531 {
14532 /* The new RIP is taken from a register. */
14533 switch (pVCpu->iem.s.enmEffOpSize)
14534 {
14535 case IEMMODE_16BIT:
14536 IEM_MC_BEGIN(1, 0, 0, 0);
14537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14538 IEM_MC_ARG(uint16_t, u16Target, 0);
14539 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14540 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_16, u16Target);
14541 IEM_MC_END();
14542 break;
14543
14544 case IEMMODE_32BIT:
14545 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_386, 0);
14546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14547 IEM_MC_ARG(uint32_t, u32Target, 0);
14548 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14549 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_32, u32Target);
14550 IEM_MC_END();
14551 break;
14552
14553 case IEMMODE_64BIT:
14554 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT, 0);
14555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14556 IEM_MC_ARG(uint64_t, u64Target, 0);
14557 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14558 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_64, u64Target);
14559 IEM_MC_END();
14560 break;
14561
14562 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14563 }
14564 }
14565 else
14566 {
14567 /* The new RIP is taken from a register. */
14568 switch (pVCpu->iem.s.enmEffOpSize)
14569 {
14570 case IEMMODE_16BIT:
14571 IEM_MC_BEGIN(1, 1, 0, 0);
14572 IEM_MC_ARG(uint16_t, u16Target, 0);
14573 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14574 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14576 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14577 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_16, u16Target);
14578 IEM_MC_END();
14579 break;
14580
14581 case IEMMODE_32BIT:
14582 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_386, 0);
14583 IEM_MC_ARG(uint32_t, u32Target, 0);
14584 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14585 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14587 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14588 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_32, u32Target);
14589 IEM_MC_END();
14590 break;
14591
14592 case IEMMODE_64BIT:
14593 IEM_MC_BEGIN(1, 1, IEM_MC_F_64BIT, 0);
14594 IEM_MC_ARG(uint64_t, u64Target, 0);
14595 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14596 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14598 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14599 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_64, u64Target);
14600 IEM_MC_END();
14601 break;
14602
14603 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14604 }
14605 }
14606}
14607
14608#define IEMOP_BODY_GRP5_FAR_EP(a_bRm, a_fnCImpl, a_fCImplExtra) \
14609 /* Registers? How?? */ \
14610 if (RT_LIKELY(IEM_IS_MODRM_MEM_MODE(a_bRm))) \
14611 { /* likely */ } \
14612 else \
14613 IEMOP_RAISE_INVALID_OPCODE_RET(); /* callf eax is not legal */ \
14614 \
14615 /* 64-bit mode: Default is 32-bit, but only intel respects a REX.W prefix. */ \
14616 /** @todo what does VIA do? */ \
14617 if (!IEM_IS_64BIT_CODE(pVCpu) || pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT || IEM_IS_GUEST_CPU_INTEL(pVCpu)) \
14618 { /* likely */ } \
14619 else \
14620 pVCpu->iem.s.enmEffOpSize = IEMMODE_32BIT; \
14621 \
14622 /* Far pointer loaded from memory. */ \
14623 switch (pVCpu->iem.s.enmEffOpSize) \
14624 { \
14625 case IEMMODE_16BIT: \
14626 IEM_MC_BEGIN(3, 1, 0, 0); \
14627 IEM_MC_ARG(uint16_t, u16Sel, 0); \
14628 IEM_MC_ARG(uint16_t, offSeg, 1); \
14629 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2); \
14630 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
14631 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
14632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14633 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
14634 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2); \
14635 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
14636 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 0, \
14637 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
14638 IEM_MC_END(); \
14639 break; \
14640 \
14641 case IEMMODE_32BIT: \
14642 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0); \
14643 IEM_MC_ARG(uint16_t, u16Sel, 0); \
14644 IEM_MC_ARG(uint32_t, offSeg, 1); \
14645 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2); \
14646 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
14647 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
14648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14649 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
14650 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4); \
14651 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
14652 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 0, \
14653 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
14654 IEM_MC_END(); \
14655 break; \
14656 \
14657 case IEMMODE_64BIT: \
14658 Assert(!IEM_IS_GUEST_CPU_AMD(pVCpu)); \
14659 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0); \
14660 IEM_MC_ARG(uint16_t, u16Sel, 0); \
14661 IEM_MC_ARG(uint64_t, offSeg, 1); \
14662 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_64BIT, 2); \
14663 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
14664 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
14665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14666 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
14667 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8); \
14668 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
14669 | IEM_CIMPL_F_MODE /* no gates */, 0, \
14670 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
14671 IEM_MC_END(); \
14672 break; \
14673 \
14674 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14675 } do {} while (0)
14676
14677
14678/**
14679 * Opcode 0xff /3.
14680 * @param bRm The RM byte.
14681 */
14682FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
14683{
14684 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
14685 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_callf, IEM_CIMPL_F_BRANCH_STACK);
14686}
14687
14688
14689/**
14690 * Opcode 0xff /4.
14691 * @param bRm The RM byte.
14692 */
14693FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
14694{
14695 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
14696 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
14697
14698 if (IEM_IS_MODRM_REG_MODE(bRm))
14699 {
14700 /* The new RIP is taken from a register. */
14701 switch (pVCpu->iem.s.enmEffOpSize)
14702 {
14703 case IEMMODE_16BIT:
14704 IEM_MC_BEGIN(0, 1, 0, 0);
14705 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14706 IEM_MC_LOCAL(uint16_t, u16Target);
14707 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14708 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
14709 IEM_MC_END();
14710 break;
14711
14712 case IEMMODE_32BIT:
14713 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
14714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14715 IEM_MC_LOCAL(uint32_t, u32Target);
14716 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14717 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
14718 IEM_MC_END();
14719 break;
14720
14721 case IEMMODE_64BIT:
14722 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
14723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14724 IEM_MC_LOCAL(uint64_t, u64Target);
14725 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14726 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
14727 IEM_MC_END();
14728 break;
14729
14730 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14731 }
14732 }
14733 else
14734 {
14735 /* The new RIP is taken from a memory location. */
14736 switch (pVCpu->iem.s.enmEffOpSize)
14737 {
14738 case IEMMODE_16BIT:
14739 IEM_MC_BEGIN(0, 2, 0, 0);
14740 IEM_MC_LOCAL(uint16_t, u16Target);
14741 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14742 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14744 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14745 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
14746 IEM_MC_END();
14747 break;
14748
14749 case IEMMODE_32BIT:
14750 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
14751 IEM_MC_LOCAL(uint32_t, u32Target);
14752 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14755 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14756 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
14757 IEM_MC_END();
14758 break;
14759
14760 case IEMMODE_64BIT:
14761 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
14762 IEM_MC_LOCAL(uint64_t, u64Target);
14763 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14764 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14766 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14767 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
14768 IEM_MC_END();
14769 break;
14770
14771 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14772 }
14773 }
14774}
14775
14776
14777/**
14778 * Opcode 0xff /5.
14779 * @param bRm The RM byte.
14780 */
14781FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
14782{
14783 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
14784 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_FarJmp, 0);
14785}
14786
14787
14788/**
14789 * Opcode 0xff /6.
14790 * @param bRm The RM byte.
14791 */
14792FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
14793{
14794 IEMOP_MNEMONIC(push_Ev, "push Ev");
14795
14796 /* Registers are handled by a common worker. */
14797 if (IEM_IS_MODRM_REG_MODE(bRm))
14798 return FNIEMOP_CALL_1(iemOpCommonPushGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
14799
14800 /* Memory we do here. */
14801 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
14802 switch (pVCpu->iem.s.enmEffOpSize)
14803 {
14804 case IEMMODE_16BIT:
14805 IEM_MC_BEGIN(0, 2, 0, 0);
14806 IEM_MC_LOCAL(uint16_t, u16Src);
14807 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14808 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14810 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14811 IEM_MC_PUSH_U16(u16Src);
14812 IEM_MC_ADVANCE_RIP_AND_FINISH();
14813 IEM_MC_END();
14814 break;
14815
14816 case IEMMODE_32BIT:
14817 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
14818 IEM_MC_LOCAL(uint32_t, u32Src);
14819 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14820 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14822 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14823 IEM_MC_PUSH_U32(u32Src);
14824 IEM_MC_ADVANCE_RIP_AND_FINISH();
14825 IEM_MC_END();
14826 break;
14827
14828 case IEMMODE_64BIT:
14829 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
14830 IEM_MC_LOCAL(uint64_t, u64Src);
14831 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14832 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14834 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14835 IEM_MC_PUSH_U64(u64Src);
14836 IEM_MC_ADVANCE_RIP_AND_FINISH();
14837 IEM_MC_END();
14838 break;
14839
14840 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14841 }
14842}
14843
14844
14845/**
14846 * @opcode 0xff
14847 */
14848FNIEMOP_DEF(iemOp_Grp5)
14849{
14850 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14851 switch (IEM_GET_MODRM_REG_8(bRm))
14852 {
14853 case 0:
14854 return FNIEMOP_CALL_1(iemOp_Grp5_inc_Ev, bRm);
14855 case 1:
14856 return FNIEMOP_CALL_1(iemOp_Grp5_dec_Ev, bRm);
14857 case 2:
14858 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
14859 case 3:
14860 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
14861 case 4:
14862 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
14863 case 5:
14864 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
14865 case 6:
14866 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
14867 case 7:
14868 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
14869 IEMOP_RAISE_INVALID_OPCODE_RET();
14870 }
14871 AssertFailedReturn(VERR_IEM_IPE_3);
14872}
14873
14874
14875
14876const PFNIEMOP g_apfnOneByteMap[256] =
14877{
14878 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
14879 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
14880 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
14881 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
14882 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
14883 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
14884 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
14885 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
14886 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
14887 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
14888 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
14889 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
14890 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
14891 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
14892 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
14893 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
14894 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
14895 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
14896 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
14897 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
14898 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
14899 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
14900 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
14901 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
14902 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
14903 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
14904 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
14905 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
14906 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
14907 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
14908 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
14909 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
14910 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
14911 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
14912 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
14913 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
14914 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
14915 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
14916 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
14917 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
14918 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
14919 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
14920 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
14921 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
14922 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
14923 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
14924 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
14925 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
14926 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
14927 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
14928 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
14929 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
14930 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
14931 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
14932 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
14933 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
14934 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
14935 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
14936 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
14937 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
14938 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
14939 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
14940 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
14941 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
14942};
14943
14944
14945/** @} */
14946
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette