VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstOneByte.cpp.h@ 103640

Last change on this file since 103640 was 103640, checked in by vboxsync, 9 months ago

VMM/IEM: Prepped emitting native code for all IEMOP_BODY_BINARY_rv_rm users, kicking out the experimental code in iemOp_xor_Gv_Ev. bugref:10376

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 557.6 KB
Line 
1/* $Id: IEMAllInstOneByte.cpp.h 103640 2024-03-01 22:06:51Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*******************************************************************************
30* Global Variables *
31*******************************************************************************/
32extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
33
34/* Instruction group definitions: */
35
36/** @defgroup og_gen General
37 * @{ */
38 /** @defgroup og_gen_arith Arithmetic
39 * @{ */
40 /** @defgroup og_gen_arith_bin Binary numbers */
41 /** @defgroup og_gen_arith_dec Decimal numbers */
42 /** @} */
43/** @} */
44
45/** @defgroup og_stack Stack
46 * @{ */
47 /** @defgroup og_stack_sreg Segment registers */
48/** @} */
49
50/** @defgroup og_prefix Prefixes */
51/** @defgroup og_escapes Escape bytes */
52
53
54
55/** @name One byte opcodes.
56 * @{
57 */
58
59/**
60 * Body for instructions like ADD, AND, OR, TEST, CMP, ++ with a byte
61 * memory/register as the destination.
62 */
63#define IEMOP_BODY_BINARY_rm_r8_RW(a_fnNormalU8, a_fnLockedU8) \
64 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
65 \
66 /* \
67 * If rm is denoting a register, no more instruction bytes. \
68 */ \
69 if (IEM_IS_MODRM_REG_MODE(bRm)) \
70 { \
71 IEM_MC_BEGIN(3, 0, 0, 0); \
72 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
73 IEM_MC_ARG(uint8_t, u8Src, 1); \
74 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
75 \
76 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
77 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
78 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
79 IEM_MC_REF_EFLAGS(pEFlags); \
80 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
81 \
82 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
83 IEM_MC_END(); \
84 } \
85 else \
86 { \
87 /* \
88 * We're accessing memory. \
89 * Note! We're putting the eflags on the stack here so we can commit them \
90 * after the memory. \
91 */ \
92 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
93 { \
94 IEM_MC_BEGIN(3, 3, 0, 0); \
95 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
96 IEM_MC_ARG(uint8_t, u8Src, 1); \
97 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
98 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
99 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
100 \
101 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
102 IEMOP_HLP_DONE_DECODING(); \
103 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
104 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
105 IEM_MC_FETCH_EFLAGS(EFlags); \
106 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
107 \
108 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
109 IEM_MC_COMMIT_EFLAGS(EFlags); \
110 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
111 IEM_MC_END(); \
112 } \
113 else \
114 { \
115 IEM_MC_BEGIN(3, 3, 0, 0); \
116 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
117 IEM_MC_ARG(uint8_t, u8Src, 1); \
118 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
120 IEM_MC_LOCAL(uint8_t, bMapInfoDst); \
121 \
122 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
123 IEMOP_HLP_DONE_DECODING(); \
124 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bMapInfoDst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
125 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
126 IEM_MC_FETCH_EFLAGS(EFlags); \
127 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
128 \
129 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bMapInfoDst); \
130 IEM_MC_COMMIT_EFLAGS(EFlags); \
131 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
132 IEM_MC_END(); \
133 } \
134 } \
135 (void)0
136
137/**
138 * Body for instructions like TEST & CMP, ++ with a byte memory/registers as
139 * operands.
140 */
141#define IEMOP_BODY_BINARY_rm_r8_RO(a_fnNormalU8) \
142 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
143 \
144 /* \
145 * If rm is denoting a register, no more instruction bytes. \
146 */ \
147 if (IEM_IS_MODRM_REG_MODE(bRm)) \
148 { \
149 IEM_MC_BEGIN(3, 0, 0, 0); \
150 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
151 IEM_MC_ARG(uint8_t, u8Src, 1); \
152 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
153 \
154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
155 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
156 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
157 IEM_MC_REF_EFLAGS(pEFlags); \
158 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
159 \
160 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
161 IEM_MC_END(); \
162 } \
163 else \
164 { \
165 /* \
166 * We're accessing memory. \
167 * Note! We're putting the eflags on the stack here so we can commit them \
168 * after the memory. \
169 */ \
170 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
171 { \
172 IEM_MC_BEGIN(3, 3, 0, 0); \
173 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
174 IEM_MC_ARG(uint8_t, u8Src, 1); \
175 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
177 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
178 \
179 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
180 IEMOP_HLP_DONE_DECODING(); \
181 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
182 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
183 IEM_MC_FETCH_EFLAGS(EFlags); \
184 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
185 \
186 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
187 IEM_MC_COMMIT_EFLAGS(EFlags); \
188 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
189 IEM_MC_END(); \
190 } \
191 else \
192 { \
193 IEMOP_HLP_DONE_DECODING(); \
194 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
195 } \
196 } \
197 (void)0
198
199/**
200 * Body for byte instructions like ADD, AND, OR, ++ with a register as the
201 * destination.
202 */
203#define IEMOP_BODY_BINARY_r8_rm(a_fnNormalU8) \
204 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
205 \
206 /* \
207 * If rm is denoting a register, no more instruction bytes. \
208 */ \
209 if (IEM_IS_MODRM_REG_MODE(bRm)) \
210 { \
211 IEM_MC_BEGIN(3, 0, 0, 0); \
212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
213 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
214 IEM_MC_ARG(uint8_t, u8Src, 1); \
215 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
216 \
217 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
218 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
219 IEM_MC_REF_EFLAGS(pEFlags); \
220 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
221 \
222 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
223 IEM_MC_END(); \
224 } \
225 else \
226 { \
227 /* \
228 * We're accessing memory. \
229 */ \
230 IEM_MC_BEGIN(3, 1, 0, 0); \
231 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
232 IEM_MC_ARG(uint8_t, u8Src, 1); \
233 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
234 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
235 \
236 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
238 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
239 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
240 IEM_MC_REF_EFLAGS(pEFlags); \
241 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
242 \
243 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
244 IEM_MC_END(); \
245 } \
246 (void)0
247
248
249/**
250 * Body for word/dword/qword instructions like ADD, AND, OR, ++ with
251 * memory/register as the destination.
252 */
253#define IEMOP_BODY_BINARY_rm_rv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
254 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
255 \
256 /* \
257 * If rm is denoting a register, no more instruction bytes. \
258 */ \
259 if (IEM_IS_MODRM_REG_MODE(bRm)) \
260 { \
261 switch (pVCpu->iem.s.enmEffOpSize) \
262 { \
263 case IEMMODE_16BIT: \
264 IEM_MC_BEGIN(3, 0, 0, 0); \
265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
266 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
267 IEM_MC_ARG(uint16_t, u16Src, 1); \
268 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
269 \
270 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
271 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
272 IEM_MC_REF_EFLAGS(pEFlags); \
273 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
274 \
275 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
276 IEM_MC_END(); \
277 break; \
278 \
279 case IEMMODE_32BIT: \
280 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
282 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
283 IEM_MC_ARG(uint32_t, u32Src, 1); \
284 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
285 \
286 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
287 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
288 IEM_MC_REF_EFLAGS(pEFlags); \
289 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
290 \
291 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
292 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
293 IEM_MC_END(); \
294 break; \
295 \
296 case IEMMODE_64BIT: \
297 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
299 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
300 IEM_MC_ARG(uint64_t, u64Src, 1); \
301 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
302 \
303 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
304 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
305 IEM_MC_REF_EFLAGS(pEFlags); \
306 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
307 \
308 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
309 IEM_MC_END(); \
310 break; \
311 \
312 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
313 } \
314 } \
315 else \
316 { \
317 /* \
318 * We're accessing memory. \
319 * Note! We're putting the eflags on the stack here so we can commit them \
320 * after the memory. \
321 */ \
322 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
323 { \
324 switch (pVCpu->iem.s.enmEffOpSize) \
325 { \
326 case IEMMODE_16BIT: \
327 IEM_MC_BEGIN(3, 3, 0, 0); \
328 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
329 IEM_MC_ARG(uint16_t, u16Src, 1); \
330 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
331 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
332 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
333 \
334 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
335 IEMOP_HLP_DONE_DECODING(); \
336 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
337 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
338 IEM_MC_FETCH_EFLAGS(EFlags); \
339 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
340 \
341 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
342 IEM_MC_COMMIT_EFLAGS(EFlags); \
343 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
344 IEM_MC_END(); \
345 break; \
346 \
347 case IEMMODE_32BIT: \
348 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
349 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
350 IEM_MC_ARG(uint32_t, u32Src, 1); \
351 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
352 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
353 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
354 \
355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
356 IEMOP_HLP_DONE_DECODING(); \
357 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
358 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
359 IEM_MC_FETCH_EFLAGS(EFlags); \
360 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
361 \
362 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
363 IEM_MC_COMMIT_EFLAGS(EFlags); \
364 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
365 IEM_MC_END(); \
366 break; \
367 \
368 case IEMMODE_64BIT: \
369 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
370 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
371 IEM_MC_ARG(uint64_t, u64Src, 1); \
372 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
373 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
374 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
375 \
376 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
377 IEMOP_HLP_DONE_DECODING(); \
378 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
379 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
380 IEM_MC_FETCH_EFLAGS(EFlags); \
381 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
382 \
383 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
384 IEM_MC_COMMIT_EFLAGS(EFlags); \
385 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
386 IEM_MC_END(); \
387 break; \
388 \
389 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
390 } \
391 } \
392 else \
393 { \
394 (void)0
395/* Separate macro to work around parsing issue in IEMAllInstPython.py */
396#define IEMOP_BODY_BINARY_rm_rv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
397 switch (pVCpu->iem.s.enmEffOpSize) \
398 { \
399 case IEMMODE_16BIT: \
400 IEM_MC_BEGIN(3, 3, 0, 0); \
401 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
402 IEM_MC_ARG(uint16_t, u16Src, 1); \
403 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
404 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
405 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
406 \
407 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
408 IEMOP_HLP_DONE_DECODING(); \
409 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
410 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
411 IEM_MC_FETCH_EFLAGS(EFlags); \
412 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
413 \
414 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
415 IEM_MC_COMMIT_EFLAGS(EFlags); \
416 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
417 IEM_MC_END(); \
418 break; \
419 \
420 case IEMMODE_32BIT: \
421 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
422 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
423 IEM_MC_ARG(uint32_t, u32Src, 1); \
424 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
425 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
426 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
427 \
428 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
429 IEMOP_HLP_DONE_DECODING(); \
430 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
431 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
432 IEM_MC_FETCH_EFLAGS(EFlags); \
433 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
434 \
435 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo /* CMP,TEST */); \
436 IEM_MC_COMMIT_EFLAGS(EFlags); \
437 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
438 IEM_MC_END(); \
439 break; \
440 \
441 case IEMMODE_64BIT: \
442 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
443 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
444 IEM_MC_ARG(uint64_t, u64Src, 1); \
445 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
446 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
447 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
448 \
449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
450 IEMOP_HLP_DONE_DECODING(); \
451 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
452 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
453 IEM_MC_FETCH_EFLAGS(EFlags); \
454 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
455 \
456 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
457 IEM_MC_COMMIT_EFLAGS(EFlags); \
458 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
459 IEM_MC_END(); \
460 break; \
461 \
462 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
463 } \
464 } \
465 } \
466 (void)0
467
468/**
469 * Body for read-only word/dword/qword instructions like TEST and CMP with
470 * memory/register as the destination.
471 */
472#define IEMOP_BODY_BINARY_rm_rv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
473 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
474 \
475 /* \
476 * If rm is denoting a register, no more instruction bytes. \
477 */ \
478 if (IEM_IS_MODRM_REG_MODE(bRm)) \
479 { \
480 switch (pVCpu->iem.s.enmEffOpSize) \
481 { \
482 case IEMMODE_16BIT: \
483 IEM_MC_BEGIN(3, 0, 0, 0); \
484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
485 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
486 IEM_MC_ARG(uint16_t, u16Src, 1); \
487 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
488 \
489 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
490 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
491 IEM_MC_REF_EFLAGS(pEFlags); \
492 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
493 \
494 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
495 IEM_MC_END(); \
496 break; \
497 \
498 case IEMMODE_32BIT: \
499 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
501 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
502 IEM_MC_ARG(uint32_t, u32Src, 1); \
503 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
504 \
505 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
506 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
507 IEM_MC_REF_EFLAGS(pEFlags); \
508 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
509 \
510 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
511 IEM_MC_END(); \
512 break; \
513 \
514 case IEMMODE_64BIT: \
515 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
517 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
518 IEM_MC_ARG(uint64_t, u64Src, 1); \
519 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
520 \
521 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
522 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
523 IEM_MC_REF_EFLAGS(pEFlags); \
524 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
525 \
526 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
527 IEM_MC_END(); \
528 break; \
529 \
530 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
531 } \
532 } \
533 else \
534 { \
535 /* \
536 * We're accessing memory. \
537 * Note! We're putting the eflags on the stack here so we can commit them \
538 * after the memory. \
539 */ \
540 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
541 { \
542 switch (pVCpu->iem.s.enmEffOpSize) \
543 { \
544 case IEMMODE_16BIT: \
545 IEM_MC_BEGIN(3, 3, 0, 0); \
546 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
547 IEM_MC_ARG(uint16_t, u16Src, 1); \
548 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
549 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
550 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
551 \
552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
553 IEMOP_HLP_DONE_DECODING(); \
554 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
555 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
556 IEM_MC_FETCH_EFLAGS(EFlags); \
557 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
558 \
559 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
560 IEM_MC_COMMIT_EFLAGS(EFlags); \
561 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
562 IEM_MC_END(); \
563 break; \
564 \
565 case IEMMODE_32BIT: \
566 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
567 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
568 IEM_MC_ARG(uint32_t, u32Src, 1); \
569 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
571 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
572 \
573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
574 IEMOP_HLP_DONE_DECODING(); \
575 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
576 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
577 IEM_MC_FETCH_EFLAGS(EFlags); \
578 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
579 \
580 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
581 IEM_MC_COMMIT_EFLAGS(EFlags); \
582 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
583 IEM_MC_END(); \
584 break; \
585 \
586 case IEMMODE_64BIT: \
587 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
588 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
589 IEM_MC_ARG(uint64_t, u64Src, 1); \
590 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
591 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
592 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
593 \
594 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
595 IEMOP_HLP_DONE_DECODING(); \
596 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
597 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
598 IEM_MC_FETCH_EFLAGS(EFlags); \
599 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
600 \
601 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
602 IEM_MC_COMMIT_EFLAGS(EFlags); \
603 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
604 IEM_MC_END(); \
605 break; \
606 \
607 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
608 } \
609 } \
610 else \
611 { \
612 IEMOP_HLP_DONE_DECODING(); \
613 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
614 } \
615 } \
616 (void)0
617
618
619/**
620 * Body for instructions like ADD, AND, OR, ++ with working on AL with
621 * a byte immediate.
622 */
623#define IEMOP_BODY_BINARY_AL_Ib(a_fnNormalU8) \
624 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
625 \
626 IEM_MC_BEGIN(3, 0, 0, 0); \
627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
628 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
629 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1); \
630 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
631 \
632 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX); \
633 IEM_MC_REF_EFLAGS(pEFlags); \
634 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
635 \
636 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
637 IEM_MC_END()
638
639/**
640 * Body for instructions like ADD, AND, OR, ++ with working on
641 * AX/EAX/RAX with a word/dword immediate.
642 */
643#define IEMOP_BODY_BINARY_rAX_Iz(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fModifiesDstReg) \
644 switch (pVCpu->iem.s.enmEffOpSize) \
645 { \
646 case IEMMODE_16BIT: \
647 { \
648 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
649 \
650 IEM_MC_BEGIN(3, 0, 0, 0); \
651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
652 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
653 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1); \
654 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
655 \
656 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX); \
657 IEM_MC_REF_EFLAGS(pEFlags); \
658 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
659 \
660 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
661 IEM_MC_END(); \
662 } \
663 \
664 case IEMMODE_32BIT: \
665 { \
666 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
667 \
668 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
670 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
671 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1); \
672 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
673 \
674 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX); \
675 IEM_MC_REF_EFLAGS(pEFlags); \
676 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
677 \
678 if (a_fModifiesDstReg) \
679 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
680 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
681 IEM_MC_END(); \
682 } \
683 \
684 case IEMMODE_64BIT: \
685 { \
686 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
687 \
688 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
690 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
691 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1); \
692 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
693 \
694 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX); \
695 IEM_MC_REF_EFLAGS(pEFlags); \
696 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
697 \
698 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
699 IEM_MC_END(); \
700 } \
701 \
702 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
703 } \
704 (void)0
705
706
707
708/* Instruction specification format - work in progress: */
709
710/**
711 * @opcode 0x00
712 * @opmnemonic add
713 * @op1 rm:Eb
714 * @op2 reg:Gb
715 * @opmaps one
716 * @openc ModR/M
717 * @opflclass arithmetic
718 * @ophints harmless ignores_op_sizes
719 * @opstats add_Eb_Gb
720 * @opgroup og_gen_arith_bin
721 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
722 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
723 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
724 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
725 */
726FNIEMOP_DEF(iemOp_add_Eb_Gb)
727{
728 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
729 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_add_u8, iemAImpl_add_u8_locked);
730}
731
732
733/**
734 * @opcode 0x01
735 * @opgroup og_gen_arith_bin
736 * @opflclass arithmetic
737 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
738 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
739 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
740 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
741 */
742FNIEMOP_DEF(iemOp_add_Ev_Gv)
743{
744 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
745 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
746 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
747}
748
749
750/**
751 * @opcode 0x02
752 * @opgroup og_gen_arith_bin
753 * @opflclass arithmetic
754 * @opcopytests iemOp_add_Eb_Gb
755 */
756FNIEMOP_DEF(iemOp_add_Gb_Eb)
757{
758 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
759 IEMOP_BODY_BINARY_r8_rm(iemAImpl_add_u8);
760}
761
762
763/**
764 * @opcode 0x03
765 * @opgroup og_gen_arith_bin
766 * @opflclass arithmetic
767 * @opcopytests iemOp_add_Ev_Gv
768 */
769FNIEMOP_DEF(iemOp_add_Gv_Ev)
770{
771 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
772 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
773 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1, 0, add, 0);
774}
775
776
777/**
778 * @opcode 0x04
779 * @opgroup og_gen_arith_bin
780 * @opflclass arithmetic
781 * @opcopytests iemOp_add_Eb_Gb
782 */
783FNIEMOP_DEF(iemOp_add_Al_Ib)
784{
785 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
786 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_add_u8);
787}
788
789
790/**
791 * @opcode 0x05
792 * @opgroup og_gen_arith_bin
793 * @opflclass arithmetic
794 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
795 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
796 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
797 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
798 */
799FNIEMOP_DEF(iemOp_add_eAX_Iz)
800{
801 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
802 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
803}
804
805
806/**
807 * @opcode 0x06
808 * @opgroup og_stack_sreg
809 */
810FNIEMOP_DEF(iemOp_push_ES)
811{
812 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
813 IEMOP_HLP_NO_64BIT();
814 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
815}
816
817
818/**
819 * @opcode 0x07
820 * @opgroup og_stack_sreg
821 */
822FNIEMOP_DEF(iemOp_pop_ES)
823{
824 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
825 IEMOP_HLP_NO_64BIT();
826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
827 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE,
828 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
829 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
830 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
831 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
832 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES),
833 iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
834}
835
836
837/**
838 * @opcode 0x08
839 * @opgroup og_gen_arith_bin
840 * @opflclass logical
841 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
842 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
843 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
844 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
845 */
846FNIEMOP_DEF(iemOp_or_Eb_Gb)
847{
848 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
849 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
850 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_or_u8, iemAImpl_or_u8_locked);
851}
852
853
854/*
855 * @opcode 0x09
856 * @opgroup og_gen_arith_bin
857 * @opflclass logical
858 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
859 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
860 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
861 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
862 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
863 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
864 * @note AF is documented as undefined, but both modern AMD and Intel CPUs clears it.
865 */
866FNIEMOP_DEF(iemOp_or_Ev_Gv)
867{
868 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
869 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
870 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
871 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
872}
873
874
875/**
876 * @opcode 0x0a
877 * @opgroup og_gen_arith_bin
878 * @opflclass logical
879 * @opcopytests iemOp_or_Eb_Gb
880 */
881FNIEMOP_DEF(iemOp_or_Gb_Eb)
882{
883 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
884 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
885 IEMOP_BODY_BINARY_r8_rm(iemAImpl_or_u8);
886}
887
888
889/**
890 * @opcode 0x0b
891 * @opgroup og_gen_arith_bin
892 * @opflclass logical
893 * @opcopytests iemOp_or_Ev_Gv
894 */
895FNIEMOP_DEF(iemOp_or_Gv_Ev)
896{
897 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
898 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
899 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
900 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1, 0, or, 0);
901}
902
903
904/**
905 * @opcode 0x0c
906 * @opgroup og_gen_arith_bin
907 * @opflclass logical
908 * @opcopytests iemOp_or_Eb_Gb
909 */
910FNIEMOP_DEF(iemOp_or_Al_Ib)
911{
912 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
913 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
914 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_or_u8);
915}
916
917
918/**
919 * @opcode 0x0d
920 * @opgroup og_gen_arith_bin
921 * @opflclass logical
922 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
923 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
924 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
925 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
926 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
927 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
928 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
929 */
930FNIEMOP_DEF(iemOp_or_eAX_Iz)
931{
932 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
933 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
934 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
935}
936
937
938/**
939 * @opcode 0x0e
940 * @opgroup og_stack_sreg
941 */
942FNIEMOP_DEF(iemOp_push_CS)
943{
944 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, 0);
945 IEMOP_HLP_NO_64BIT();
946 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
947}
948
949
950/**
951 * @opcode 0x0f
952 * @opmnemonic EscTwo0f
953 * @openc two0f
954 * @opdisenum OP_2B_ESC
955 * @ophints harmless
956 * @opgroup og_escapes
957 */
958FNIEMOP_DEF(iemOp_2byteEscape)
959{
960#if 0 /// @todo def VBOX_STRICT
961 /* Sanity check the table the first time around. */
962 static bool s_fTested = false;
963 if (RT_LIKELY(s_fTested)) { /* likely */ }
964 else
965 {
966 s_fTested = true;
967 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
968 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
969 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
970 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
971 }
972#endif
973
974 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
975 {
976 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
977 IEMOP_HLP_MIN_286();
978 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
979 }
980 /* @opdone */
981
982 /*
983 * On the 8086 this is a POP CS instruction.
984 * For the time being we don't specify this this.
985 */
986 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, IEMOPHINT_SKIP_PYTHON);
987 IEMOP_HLP_NO_64BIT();
988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
989 /** @todo eliminate END_TB here */
990 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_END_TB,
991 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
992 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_CS),
993 iemCImpl_pop_Sreg, X86_SREG_CS, pVCpu->iem.s.enmEffOpSize);
994}
995
996/**
997 * @opcode 0x10
998 * @opgroup og_gen_arith_bin
999 * @opflclass arithmetic_carry
1000 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1001 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1002 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1003 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1004 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1005 */
1006FNIEMOP_DEF(iemOp_adc_Eb_Gb)
1007{
1008 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1009 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_adc_u8, iemAImpl_adc_u8_locked);
1010}
1011
1012
1013/**
1014 * @opcode 0x11
1015 * @opgroup og_gen_arith_bin
1016 * @opflclass arithmetic_carry
1017 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1018 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1019 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1020 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1021 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1022 */
1023FNIEMOP_DEF(iemOp_adc_Ev_Gv)
1024{
1025 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1026 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
1027 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
1028}
1029
1030
1031/**
1032 * @opcode 0x12
1033 * @opgroup og_gen_arith_bin
1034 * @opflclass arithmetic_carry
1035 * @opcopytests iemOp_adc_Eb_Gb
1036 */
1037FNIEMOP_DEF(iemOp_adc_Gb_Eb)
1038{
1039 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1040 IEMOP_BODY_BINARY_r8_rm(iemAImpl_adc_u8);
1041}
1042
1043
1044/**
1045 * @opcode 0x13
1046 * @opgroup og_gen_arith_bin
1047 * @opflclass arithmetic_carry
1048 * @opcopytests iemOp_adc_Ev_Gv
1049 */
1050FNIEMOP_DEF(iemOp_adc_Gv_Ev)
1051{
1052 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1053 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1054 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1, 0, adc, 0);
1055}
1056
1057
1058/**
1059 * @opcode 0x14
1060 * @opgroup og_gen_arith_bin
1061 * @opflclass arithmetic_carry
1062 * @opcopytests iemOp_adc_Eb_Gb
1063 */
1064FNIEMOP_DEF(iemOp_adc_Al_Ib)
1065{
1066 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1067 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_adc_u8);
1068}
1069
1070
1071/**
1072 * @opcode 0x15
1073 * @opgroup og_gen_arith_bin
1074 * @opflclass arithmetic_carry
1075 * @opcopytests iemOp_adc_Ev_Gv
1076 */
1077FNIEMOP_DEF(iemOp_adc_eAX_Iz)
1078{
1079 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1080 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
1081}
1082
1083
1084/**
1085 * @opcode 0x16
1086 */
1087FNIEMOP_DEF(iemOp_push_SS)
1088{
1089 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1090 IEMOP_HLP_NO_64BIT();
1091 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
1092}
1093
1094
1095/**
1096 * @opcode 0x17
1097 */
1098FNIEMOP_DEF(iemOp_pop_SS)
1099{
1100 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
1101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1102 IEMOP_HLP_NO_64BIT();
1103 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_INHIBIT_SHADOW,
1104 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1105 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_SS)
1106 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_SS)
1107 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_SS)
1108 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_SS),
1109 iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
1110}
1111
1112
1113/**
1114 * @opcode 0x18
1115 * @opgroup og_gen_arith_bin
1116 * @opflclass arithmetic_carry
1117 */
1118FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
1119{
1120 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1121 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_sbb_u8, iemAImpl_sbb_u8_locked);
1122}
1123
1124
1125/**
1126 * @opcode 0x19
1127 * @opgroup og_gen_arith_bin
1128 * @opflclass arithmetic_carry
1129 */
1130FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
1131{
1132 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1133 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
1134 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
1135}
1136
1137
1138/**
1139 * @opcode 0x1a
1140 * @opgroup og_gen_arith_bin
1141 * @opflclass arithmetic_carry
1142 */
1143FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
1144{
1145 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1146 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sbb_u8);
1147}
1148
1149
1150/**
1151 * @opcode 0x1b
1152 * @opgroup og_gen_arith_bin
1153 * @opflclass arithmetic_carry
1154 */
1155FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
1156{
1157 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1158 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1159 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1, 0, sbb, 0);
1160}
1161
1162
1163/**
1164 * @opcode 0x1c
1165 * @opgroup og_gen_arith_bin
1166 * @opflclass arithmetic_carry
1167 */
1168FNIEMOP_DEF(iemOp_sbb_Al_Ib)
1169{
1170 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1171 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sbb_u8);
1172}
1173
1174
1175/**
1176 * @opcode 0x1d
1177 * @opgroup og_gen_arith_bin
1178 * @opflclass arithmetic_carry
1179 */
1180FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
1181{
1182 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1183 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
1184}
1185
1186
1187/**
1188 * @opcode 0x1e
1189 * @opgroup og_stack_sreg
1190 */
1191FNIEMOP_DEF(iemOp_push_DS)
1192{
1193 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1194 IEMOP_HLP_NO_64BIT();
1195 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
1196}
1197
1198
1199/**
1200 * @opcode 0x1f
1201 * @opgroup og_stack_sreg
1202 */
1203FNIEMOP_DEF(iemOp_pop_DS)
1204{
1205 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1207 IEMOP_HLP_NO_64BIT();
1208 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE,
1209 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1210 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
1211 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
1212 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
1213 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS),
1214 iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
1215}
1216
1217
1218/**
1219 * @opcode 0x20
1220 * @opgroup og_gen_arith_bin
1221 * @opflclass logical
1222 */
1223FNIEMOP_DEF(iemOp_and_Eb_Gb)
1224{
1225 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1226 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1227 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_and_u8, iemAImpl_and_u8_locked);
1228}
1229
1230
1231/**
1232 * @opcode 0x21
1233 * @opgroup og_gen_arith_bin
1234 * @opflclass logical
1235 */
1236FNIEMOP_DEF(iemOp_and_Ev_Gv)
1237{
1238 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1239 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1240 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
1241 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
1242}
1243
1244
1245/**
1246 * @opcode 0x22
1247 * @opgroup og_gen_arith_bin
1248 * @opflclass logical
1249 */
1250FNIEMOP_DEF(iemOp_and_Gb_Eb)
1251{
1252 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1253 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1254 IEMOP_BODY_BINARY_r8_rm(iemAImpl_and_u8);
1255}
1256
1257
1258/**
1259 * @opcode 0x23
1260 * @opgroup og_gen_arith_bin
1261 * @opflclass logical
1262 */
1263FNIEMOP_DEF(iemOp_and_Gv_Ev)
1264{
1265 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1266 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1267 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1268 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1, 0, and, 0);
1269}
1270
1271
1272/**
1273 * @opcode 0x24
1274 * @opgroup og_gen_arith_bin
1275 * @opflclass logical
1276 */
1277FNIEMOP_DEF(iemOp_and_Al_Ib)
1278{
1279 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
1280 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1281 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_and_u8);
1282}
1283
1284
1285/**
1286 * @opcode 0x25
1287 * @opgroup og_gen_arith_bin
1288 * @opflclass logical
1289 */
1290FNIEMOP_DEF(iemOp_and_eAX_Iz)
1291{
1292 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1293 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1294 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1295}
1296
1297
1298/**
1299 * @opcode 0x26
1300 * @opmnemonic SEG
1301 * @op1 ES
1302 * @opgroup og_prefix
1303 * @openc prefix
1304 * @opdisenum OP_SEG
1305 * @ophints harmless
1306 */
1307FNIEMOP_DEF(iemOp_seg_ES)
1308{
1309 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
1310 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
1311 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
1312
1313 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1314 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1315}
1316
1317
1318/**
1319 * @opcode 0x27
1320 * @opfltest af,cf
1321 * @opflmodify cf,pf,af,zf,sf,of
1322 * @opflundef of
1323 */
1324FNIEMOP_DEF(iemOp_daa)
1325{
1326 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1327 IEMOP_HLP_NO_64BIT();
1328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1329 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1330 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_daa);
1331}
1332
1333
1334/**
1335 * @opcode 0x28
1336 * @opgroup og_gen_arith_bin
1337 * @opflclass arithmetic
1338 */
1339FNIEMOP_DEF(iemOp_sub_Eb_Gb)
1340{
1341 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1342 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_sub_u8, iemAImpl_sub_u8_locked);
1343}
1344
1345
1346/**
1347 * @opcode 0x29
1348 * @opgroup og_gen_arith_bin
1349 * @opflclass arithmetic
1350 */
1351FNIEMOP_DEF(iemOp_sub_Ev_Gv)
1352{
1353 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1354 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
1355 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
1356}
1357
1358
1359/**
1360 * @opcode 0x2a
1361 * @opgroup og_gen_arith_bin
1362 * @opflclass arithmetic
1363 */
1364FNIEMOP_DEF(iemOp_sub_Gb_Eb)
1365{
1366 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1367 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sub_u8);
1368}
1369
1370
1371/**
1372 * @opcode 0x2b
1373 * @opgroup og_gen_arith_bin
1374 * @opflclass arithmetic
1375 */
1376FNIEMOP_DEF(iemOp_sub_Gv_Ev)
1377{
1378 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1379 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1380 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1, 0, sub, 0);
1381}
1382
1383
1384/**
1385 * @opcode 0x2c
1386 * @opgroup og_gen_arith_bin
1387 * @opflclass arithmetic
1388 */
1389FNIEMOP_DEF(iemOp_sub_Al_Ib)
1390{
1391 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1392 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sub_u8);
1393}
1394
1395
1396/**
1397 * @opcode 0x2d
1398 * @opgroup og_gen_arith_bin
1399 * @opflclass arithmetic
1400 */
1401FNIEMOP_DEF(iemOp_sub_eAX_Iz)
1402{
1403 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1404 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1405}
1406
1407
1408/**
1409 * @opcode 0x2e
1410 * @opmnemonic SEG
1411 * @op1 CS
1412 * @opgroup og_prefix
1413 * @openc prefix
1414 * @opdisenum OP_SEG
1415 * @ophints harmless
1416 */
1417FNIEMOP_DEF(iemOp_seg_CS)
1418{
1419 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
1420 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
1421 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
1422
1423 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1424 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1425}
1426
1427
1428/**
1429 * @opcode 0x2f
1430 * @opfltest af,cf
1431 * @opflmodify cf,pf,af,zf,sf,of
1432 * @opflundef of
1433 */
1434FNIEMOP_DEF(iemOp_das)
1435{
1436 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1437 IEMOP_HLP_NO_64BIT();
1438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1439 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1440 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_das);
1441}
1442
1443
1444/**
1445 * @opcode 0x30
1446 * @opgroup og_gen_arith_bin
1447 * @opflclass logical
1448 */
1449FNIEMOP_DEF(iemOp_xor_Eb_Gb)
1450{
1451 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1452 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1453 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_xor_u8, iemAImpl_xor_u8_locked);
1454}
1455
1456
1457/**
1458 * @opcode 0x31
1459 * @opgroup og_gen_arith_bin
1460 * @opflclass logical
1461 */
1462FNIEMOP_DEF(iemOp_xor_Ev_Gv)
1463{
1464 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1465 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1466 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
1467 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
1468}
1469
1470
1471/**
1472 * @opcode 0x32
1473 * @opgroup og_gen_arith_bin
1474 * @opflclass logical
1475 */
1476FNIEMOP_DEF(iemOp_xor_Gb_Eb)
1477{
1478 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1479 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1480 IEMOP_BODY_BINARY_r8_rm(iemAImpl_xor_u8);
1481}
1482
1483
1484/**
1485 * @opcode 0x33
1486 * @opgroup og_gen_arith_bin
1487 * @opflclass logical
1488 */
1489FNIEMOP_DEF(iemOp_xor_Gv_Ev)
1490{
1491 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1492 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1493
1494 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1495
1496 /*
1497 * Deal with special case of 'xor rN, rN' which sets rN to zero and has a known EFLAGS outcome.
1498 */
1499 if ( (bRm >> X86_MODRM_REG_SHIFT) == ((bRm & X86_MODRM_RM_MASK) | (X86_MOD_REG << X86_MODRM_REG_SHIFT))
1500 && pVCpu->iem.s.uRexReg == pVCpu->iem.s.uRexB)
1501 {
1502 switch (pVCpu->iem.s.enmEffOpSize)
1503 {
1504 case IEMMODE_16BIT:
1505 IEM_MC_BEGIN(1, 0, 0, 0);
1506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1507 IEM_MC_STORE_GREG_U16_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1508 IEM_MC_LOCAL(uint32_t, fEFlags);
1509 IEM_MC_FETCH_EFLAGS(fEFlags);
1510 IEM_MC_AND_LOCAL_U32(fEFlags, ~(uint32_t)X86_EFL_STATUS_BITS);
1511 IEM_MC_OR_LOCAL_U32(fEFlags, X86_EFL_PF | X86_EFL_ZF);
1512 IEM_MC_COMMIT_EFLAGS(fEFlags);
1513 IEM_MC_ADVANCE_RIP_AND_FINISH();
1514 IEM_MC_END();
1515 break;
1516
1517 case IEMMODE_32BIT:
1518 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_386, 0);
1519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1520 IEM_MC_STORE_GREG_U32_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1521 IEM_MC_LOCAL(uint32_t, fEFlags);
1522 IEM_MC_FETCH_EFLAGS(fEFlags);
1523 IEM_MC_AND_LOCAL_U32(fEFlags, ~(uint32_t)X86_EFL_STATUS_BITS);
1524 IEM_MC_OR_LOCAL_U32(fEFlags, X86_EFL_PF | X86_EFL_ZF);
1525 IEM_MC_COMMIT_EFLAGS(fEFlags);
1526 IEM_MC_ADVANCE_RIP_AND_FINISH();
1527 IEM_MC_END();
1528 break;
1529
1530 case IEMMODE_64BIT:
1531 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT, 0);
1532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1533 IEM_MC_STORE_GREG_U64_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1534 IEM_MC_LOCAL(uint32_t, fEFlags);
1535 IEM_MC_FETCH_EFLAGS(fEFlags);
1536 IEM_MC_AND_LOCAL_U32(fEFlags, ~(uint32_t)X86_EFL_STATUS_BITS);
1537 IEM_MC_OR_LOCAL_U32(fEFlags, X86_EFL_PF | X86_EFL_ZF);
1538 IEM_MC_COMMIT_EFLAGS(fEFlags);
1539 IEM_MC_ADVANCE_RIP_AND_FINISH();
1540 IEM_MC_END();
1541 break;
1542
1543 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1544 }
1545 }
1546
1547 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1, 0, xor, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64);
1548}
1549
1550
1551/**
1552 * @opcode 0x34
1553 * @opgroup og_gen_arith_bin
1554 * @opflclass logical
1555 */
1556FNIEMOP_DEF(iemOp_xor_Al_Ib)
1557{
1558 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1559 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1560 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_xor_u8);
1561}
1562
1563
1564/**
1565 * @opcode 0x35
1566 * @opgroup og_gen_arith_bin
1567 * @opflclass logical
1568 */
1569FNIEMOP_DEF(iemOp_xor_eAX_Iz)
1570{
1571 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1572 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1573 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1574}
1575
1576
1577/**
1578 * @opcode 0x36
1579 * @opmnemonic SEG
1580 * @op1 SS
1581 * @opgroup og_prefix
1582 * @openc prefix
1583 * @opdisenum OP_SEG
1584 * @ophints harmless
1585 */
1586FNIEMOP_DEF(iemOp_seg_SS)
1587{
1588 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
1589 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
1590 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
1591
1592 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1593 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1594}
1595
1596
1597/**
1598 * @opcode 0x37
1599 * @opfltest af
1600 * @opflmodify cf,pf,af,zf,sf,of
1601 * @opflundef pf,zf,sf,of
1602 * @opgroup og_gen_arith_dec
1603 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
1604 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
1605 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1606 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1607 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1608 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
1609 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
1610 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
1611 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
1612 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
1613 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
1614 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
1615 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
1616 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
1617 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
1618 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1619 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1620 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1621 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1622 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
1623 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
1624 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
1625 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
1626 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
1627 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
1628 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1629 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
1630 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
1631 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
1632 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
1633 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
1634 */
1635FNIEMOP_DEF(iemOp_aaa)
1636{
1637 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1638 IEMOP_HLP_NO_64BIT();
1639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1640 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1641
1642 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aaa);
1643}
1644
1645
1646/**
1647 * @opcode 0x38
1648 * @opflclass arithmetic
1649 */
1650FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
1651{
1652 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
1653 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_cmp_u8);
1654}
1655
1656
1657/**
1658 * @opcode 0x39
1659 * @opflclass arithmetic
1660 */
1661FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
1662{
1663 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
1664 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
1665}
1666
1667
1668/**
1669 * @opcode 0x3a
1670 * @opflclass arithmetic
1671 */
1672FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
1673{
1674 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
1675 IEMOP_BODY_BINARY_r8_rm(iemAImpl_cmp_u8);
1676}
1677
1678
1679/**
1680 * @opcode 0x3b
1681 * @opflclass arithmetic
1682 */
1683FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
1684{
1685 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
1686 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1687 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0, 0, cmp, 0);
1688}
1689
1690
1691/**
1692 * @opcode 0x3c
1693 * @opflclass arithmetic
1694 */
1695FNIEMOP_DEF(iemOp_cmp_Al_Ib)
1696{
1697 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
1698 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_cmp_u8);
1699}
1700
1701
1702/**
1703 * @opcode 0x3d
1704 * @opflclass arithmetic
1705 */
1706FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1707{
1708 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1709 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1710}
1711
1712
1713/**
1714 * @opcode 0x3e
1715 */
1716FNIEMOP_DEF(iemOp_seg_DS)
1717{
1718 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1719 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1720 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1721
1722 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1723 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1724}
1725
1726
1727/**
1728 * @opcode 0x3f
1729 * @opfltest af
1730 * @opflmodify cf,pf,af,zf,sf,of
1731 * @opflundef pf,zf,sf,of
1732 * @opgroup og_gen_arith_dec
1733 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1734 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1735 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1736 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1737 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1738 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1739 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1740 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1741 * @optest amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1742 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1743 * @optest amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1744 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1745 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1746 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1747 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1748 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1749 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1750 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1751 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1752 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1753 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
1754 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1755 * @optest amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
1756 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1757 * @optest amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
1758 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1759 * @optest amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
1760 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1761 * @optest amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
1762 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1763 * @optest amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
1764 * @optest intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1765 * @optest amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1766 * @optest intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1767 * @optest amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1768 */
1769FNIEMOP_DEF(iemOp_aas)
1770{
1771 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1772 IEMOP_HLP_NO_64BIT();
1773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1774 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1775
1776 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aas);
1777}
1778
1779
1780/**
1781 * Common 'inc/dec register' helper.
1782 *
1783 * Not for 64-bit code, only for what became the rex prefixes.
1784 */
1785#define IEMOP_BODY_UNARY_GReg(a_fnNormalU16, a_fnNormalU32, a_iReg) \
1786 switch (pVCpu->iem.s.enmEffOpSize) \
1787 { \
1788 case IEMMODE_16BIT: \
1789 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_64BIT, 0); \
1790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1791 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
1792 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1793 IEM_MC_REF_GREG_U16(pu16Dst, a_iReg); \
1794 IEM_MC_REF_EFLAGS(pEFlags); \
1795 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
1796 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1797 IEM_MC_END(); \
1798 break; \
1799 \
1800 case IEMMODE_32BIT: \
1801 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0); \
1802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1803 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
1804 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1805 IEM_MC_REF_GREG_U32(pu32Dst, a_iReg); \
1806 IEM_MC_REF_EFLAGS(pEFlags); \
1807 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
1808 IEM_MC_CLEAR_HIGH_GREG_U64(a_iReg); \
1809 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1810 IEM_MC_END(); \
1811 break; \
1812 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1813 } \
1814 (void)0
1815
1816/**
1817 * @opcode 0x40
1818 * @opflclass incdec
1819 */
1820FNIEMOP_DEF(iemOp_inc_eAX)
1821{
1822 /*
1823 * This is a REX prefix in 64-bit mode.
1824 */
1825 if (IEM_IS_64BIT_CODE(pVCpu))
1826 {
1827 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1828 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1829
1830 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1831 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1832 }
1833
1834 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1835 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xAX);
1836}
1837
1838
1839/**
1840 * @opcode 0x41
1841 * @opflclass incdec
1842 */
1843FNIEMOP_DEF(iemOp_inc_eCX)
1844{
1845 /*
1846 * This is a REX prefix in 64-bit mode.
1847 */
1848 if (IEM_IS_64BIT_CODE(pVCpu))
1849 {
1850 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1851 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1852 pVCpu->iem.s.uRexB = 1 << 3;
1853
1854 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1855 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1856 }
1857
1858 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1859 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xCX);
1860}
1861
1862
1863/**
1864 * @opcode 0x42
1865 * @opflclass incdec
1866 */
1867FNIEMOP_DEF(iemOp_inc_eDX)
1868{
1869 /*
1870 * This is a REX prefix in 64-bit mode.
1871 */
1872 if (IEM_IS_64BIT_CODE(pVCpu))
1873 {
1874 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1875 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1876 pVCpu->iem.s.uRexIndex = 1 << 3;
1877
1878 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1879 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1880 }
1881
1882 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1883 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDX);
1884}
1885
1886
1887
1888/**
1889 * @opcode 0x43
1890 * @opflclass incdec
1891 */
1892FNIEMOP_DEF(iemOp_inc_eBX)
1893{
1894 /*
1895 * This is a REX prefix in 64-bit mode.
1896 */
1897 if (IEM_IS_64BIT_CODE(pVCpu))
1898 {
1899 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1900 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1901 pVCpu->iem.s.uRexB = 1 << 3;
1902 pVCpu->iem.s.uRexIndex = 1 << 3;
1903
1904 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1905 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1906 }
1907
1908 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1909 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBX);
1910}
1911
1912
1913/**
1914 * @opcode 0x44
1915 * @opflclass incdec
1916 */
1917FNIEMOP_DEF(iemOp_inc_eSP)
1918{
1919 /*
1920 * This is a REX prefix in 64-bit mode.
1921 */
1922 if (IEM_IS_64BIT_CODE(pVCpu))
1923 {
1924 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1925 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1926 pVCpu->iem.s.uRexReg = 1 << 3;
1927
1928 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1929 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1930 }
1931
1932 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1933 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSP);
1934}
1935
1936
1937/**
1938 * @opcode 0x45
1939 * @opflclass incdec
1940 */
1941FNIEMOP_DEF(iemOp_inc_eBP)
1942{
1943 /*
1944 * This is a REX prefix in 64-bit mode.
1945 */
1946 if (IEM_IS_64BIT_CODE(pVCpu))
1947 {
1948 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1949 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1950 pVCpu->iem.s.uRexReg = 1 << 3;
1951 pVCpu->iem.s.uRexB = 1 << 3;
1952
1953 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1954 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1955 }
1956
1957 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1958 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBP);
1959}
1960
1961
1962/**
1963 * @opcode 0x46
1964 * @opflclass incdec
1965 */
1966FNIEMOP_DEF(iemOp_inc_eSI)
1967{
1968 /*
1969 * This is a REX prefix in 64-bit mode.
1970 */
1971 if (IEM_IS_64BIT_CODE(pVCpu))
1972 {
1973 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1974 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1975 pVCpu->iem.s.uRexReg = 1 << 3;
1976 pVCpu->iem.s.uRexIndex = 1 << 3;
1977
1978 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1979 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1980 }
1981
1982 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1983 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSI);
1984}
1985
1986
1987/**
1988 * @opcode 0x47
1989 * @opflclass incdec
1990 */
1991FNIEMOP_DEF(iemOp_inc_eDI)
1992{
1993 /*
1994 * This is a REX prefix in 64-bit mode.
1995 */
1996 if (IEM_IS_64BIT_CODE(pVCpu))
1997 {
1998 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1999 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
2000 pVCpu->iem.s.uRexReg = 1 << 3;
2001 pVCpu->iem.s.uRexB = 1 << 3;
2002 pVCpu->iem.s.uRexIndex = 1 << 3;
2003
2004 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2005 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2006 }
2007
2008 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
2009 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDI);
2010}
2011
2012
2013/**
2014 * @opcode 0x48
2015 * @opflclass incdec
2016 */
2017FNIEMOP_DEF(iemOp_dec_eAX)
2018{
2019 /*
2020 * This is a REX prefix in 64-bit mode.
2021 */
2022 if (IEM_IS_64BIT_CODE(pVCpu))
2023 {
2024 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
2025 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
2026 iemRecalEffOpSize(pVCpu);
2027
2028 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2029 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2030 }
2031
2032 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
2033 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xAX);
2034}
2035
2036
2037/**
2038 * @opcode 0x49
2039 * @opflclass incdec
2040 */
2041FNIEMOP_DEF(iemOp_dec_eCX)
2042{
2043 /*
2044 * This is a REX prefix in 64-bit mode.
2045 */
2046 if (IEM_IS_64BIT_CODE(pVCpu))
2047 {
2048 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
2049 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2050 pVCpu->iem.s.uRexB = 1 << 3;
2051 iemRecalEffOpSize(pVCpu);
2052
2053 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2054 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2055 }
2056
2057 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
2058 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xCX);
2059}
2060
2061
2062/**
2063 * @opcode 0x4a
2064 * @opflclass incdec
2065 */
2066FNIEMOP_DEF(iemOp_dec_eDX)
2067{
2068 /*
2069 * This is a REX prefix in 64-bit mode.
2070 */
2071 if (IEM_IS_64BIT_CODE(pVCpu))
2072 {
2073 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
2074 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2075 pVCpu->iem.s.uRexIndex = 1 << 3;
2076 iemRecalEffOpSize(pVCpu);
2077
2078 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2079 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2080 }
2081
2082 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
2083 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDX);
2084}
2085
2086
2087/**
2088 * @opcode 0x4b
2089 * @opflclass incdec
2090 */
2091FNIEMOP_DEF(iemOp_dec_eBX)
2092{
2093 /*
2094 * This is a REX prefix in 64-bit mode.
2095 */
2096 if (IEM_IS_64BIT_CODE(pVCpu))
2097 {
2098 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
2099 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2100 pVCpu->iem.s.uRexB = 1 << 3;
2101 pVCpu->iem.s.uRexIndex = 1 << 3;
2102 iemRecalEffOpSize(pVCpu);
2103
2104 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2105 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2106 }
2107
2108 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
2109 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBX);
2110}
2111
2112
2113/**
2114 * @opcode 0x4c
2115 * @opflclass incdec
2116 */
2117FNIEMOP_DEF(iemOp_dec_eSP)
2118{
2119 /*
2120 * This is a REX prefix in 64-bit mode.
2121 */
2122 if (IEM_IS_64BIT_CODE(pVCpu))
2123 {
2124 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
2125 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
2126 pVCpu->iem.s.uRexReg = 1 << 3;
2127 iemRecalEffOpSize(pVCpu);
2128
2129 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2130 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2131 }
2132
2133 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
2134 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSP);
2135}
2136
2137
2138/**
2139 * @opcode 0x4d
2140 * @opflclass incdec
2141 */
2142FNIEMOP_DEF(iemOp_dec_eBP)
2143{
2144 /*
2145 * This is a REX prefix in 64-bit mode.
2146 */
2147 if (IEM_IS_64BIT_CODE(pVCpu))
2148 {
2149 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
2150 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2151 pVCpu->iem.s.uRexReg = 1 << 3;
2152 pVCpu->iem.s.uRexB = 1 << 3;
2153 iemRecalEffOpSize(pVCpu);
2154
2155 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2156 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2157 }
2158
2159 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
2160 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBP);
2161}
2162
2163
2164/**
2165 * @opcode 0x4e
2166 * @opflclass incdec
2167 */
2168FNIEMOP_DEF(iemOp_dec_eSI)
2169{
2170 /*
2171 * This is a REX prefix in 64-bit mode.
2172 */
2173 if (IEM_IS_64BIT_CODE(pVCpu))
2174 {
2175 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
2176 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2177 pVCpu->iem.s.uRexReg = 1 << 3;
2178 pVCpu->iem.s.uRexIndex = 1 << 3;
2179 iemRecalEffOpSize(pVCpu);
2180
2181 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2182 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2183 }
2184
2185 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
2186 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSI);
2187}
2188
2189
2190/**
2191 * @opcode 0x4f
2192 * @opflclass incdec
2193 */
2194FNIEMOP_DEF(iemOp_dec_eDI)
2195{
2196 /*
2197 * This is a REX prefix in 64-bit mode.
2198 */
2199 if (IEM_IS_64BIT_CODE(pVCpu))
2200 {
2201 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
2202 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2203 pVCpu->iem.s.uRexReg = 1 << 3;
2204 pVCpu->iem.s.uRexB = 1 << 3;
2205 pVCpu->iem.s.uRexIndex = 1 << 3;
2206 iemRecalEffOpSize(pVCpu);
2207
2208 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2209 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2210 }
2211
2212 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
2213 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDI);
2214}
2215
2216
2217/**
2218 * Common 'push register' helper.
2219 */
2220FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
2221{
2222 if (IEM_IS_64BIT_CODE(pVCpu))
2223 {
2224 iReg |= pVCpu->iem.s.uRexB;
2225 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2226 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2227 }
2228
2229 switch (pVCpu->iem.s.enmEffOpSize)
2230 {
2231 case IEMMODE_16BIT:
2232 IEM_MC_BEGIN(0, 1, 0, 0);
2233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2234 IEM_MC_LOCAL(uint16_t, u16Value);
2235 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
2236 IEM_MC_PUSH_U16(u16Value);
2237 IEM_MC_ADVANCE_RIP_AND_FINISH();
2238 IEM_MC_END();
2239 break;
2240
2241 case IEMMODE_32BIT:
2242 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2244 IEM_MC_LOCAL(uint32_t, u32Value);
2245 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
2246 IEM_MC_PUSH_U32(u32Value);
2247 IEM_MC_ADVANCE_RIP_AND_FINISH();
2248 IEM_MC_END();
2249 break;
2250
2251 case IEMMODE_64BIT:
2252 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2254 IEM_MC_LOCAL(uint64_t, u64Value);
2255 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
2256 IEM_MC_PUSH_U64(u64Value);
2257 IEM_MC_ADVANCE_RIP_AND_FINISH();
2258 IEM_MC_END();
2259 break;
2260
2261 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2262 }
2263}
2264
2265
2266/**
2267 * @opcode 0x50
2268 */
2269FNIEMOP_DEF(iemOp_push_eAX)
2270{
2271 IEMOP_MNEMONIC(push_rAX, "push rAX");
2272 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
2273}
2274
2275
2276/**
2277 * @opcode 0x51
2278 */
2279FNIEMOP_DEF(iemOp_push_eCX)
2280{
2281 IEMOP_MNEMONIC(push_rCX, "push rCX");
2282 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
2283}
2284
2285
2286/**
2287 * @opcode 0x52
2288 */
2289FNIEMOP_DEF(iemOp_push_eDX)
2290{
2291 IEMOP_MNEMONIC(push_rDX, "push rDX");
2292 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
2293}
2294
2295
2296/**
2297 * @opcode 0x53
2298 */
2299FNIEMOP_DEF(iemOp_push_eBX)
2300{
2301 IEMOP_MNEMONIC(push_rBX, "push rBX");
2302 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
2303}
2304
2305
2306/**
2307 * @opcode 0x54
2308 */
2309FNIEMOP_DEF(iemOp_push_eSP)
2310{
2311 IEMOP_MNEMONIC(push_rSP, "push rSP");
2312 if (IEM_GET_TARGET_CPU(pVCpu) != IEMTARGETCPU_8086)
2313 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
2314
2315 /* 8086 works differently wrt to 'push sp' compared to 80186 and later. */
2316 IEM_MC_BEGIN(0, 1, IEM_MC_F_ONLY_8086, 0);
2317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2318 IEM_MC_LOCAL(uint16_t, u16Value);
2319 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
2320 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
2321 IEM_MC_PUSH_U16(u16Value);
2322 IEM_MC_ADVANCE_RIP_AND_FINISH();
2323 IEM_MC_END();
2324}
2325
2326
2327/**
2328 * @opcode 0x55
2329 */
2330FNIEMOP_DEF(iemOp_push_eBP)
2331{
2332 IEMOP_MNEMONIC(push_rBP, "push rBP");
2333 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
2334}
2335
2336
2337/**
2338 * @opcode 0x56
2339 */
2340FNIEMOP_DEF(iemOp_push_eSI)
2341{
2342 IEMOP_MNEMONIC(push_rSI, "push rSI");
2343 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
2344}
2345
2346
2347/**
2348 * @opcode 0x57
2349 */
2350FNIEMOP_DEF(iemOp_push_eDI)
2351{
2352 IEMOP_MNEMONIC(push_rDI, "push rDI");
2353 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
2354}
2355
2356
2357/**
2358 * Common 'pop register' helper.
2359 */
2360FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
2361{
2362 if (IEM_IS_64BIT_CODE(pVCpu))
2363 {
2364 iReg |= pVCpu->iem.s.uRexB;
2365 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2366 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2367 }
2368
2369 switch (pVCpu->iem.s.enmEffOpSize)
2370 {
2371 case IEMMODE_16BIT:
2372 IEM_MC_BEGIN(0, 0, 0, 0);
2373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2374 IEM_MC_POP_GREG_U16(iReg);
2375 IEM_MC_ADVANCE_RIP_AND_FINISH();
2376 IEM_MC_END();
2377 break;
2378
2379 case IEMMODE_32BIT:
2380 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2382 IEM_MC_POP_GREG_U32(iReg);
2383 IEM_MC_ADVANCE_RIP_AND_FINISH();
2384 IEM_MC_END();
2385 break;
2386
2387 case IEMMODE_64BIT:
2388 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
2389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2390 IEM_MC_POP_GREG_U64(iReg);
2391 IEM_MC_ADVANCE_RIP_AND_FINISH();
2392 IEM_MC_END();
2393 break;
2394
2395 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2396 }
2397}
2398
2399
2400/**
2401 * @opcode 0x58
2402 */
2403FNIEMOP_DEF(iemOp_pop_eAX)
2404{
2405 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
2406 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
2407}
2408
2409
2410/**
2411 * @opcode 0x59
2412 */
2413FNIEMOP_DEF(iemOp_pop_eCX)
2414{
2415 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
2416 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
2417}
2418
2419
2420/**
2421 * @opcode 0x5a
2422 */
2423FNIEMOP_DEF(iemOp_pop_eDX)
2424{
2425 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
2426 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
2427}
2428
2429
2430/**
2431 * @opcode 0x5b
2432 */
2433FNIEMOP_DEF(iemOp_pop_eBX)
2434{
2435 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
2436 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
2437}
2438
2439
2440/**
2441 * @opcode 0x5c
2442 */
2443FNIEMOP_DEF(iemOp_pop_eSP)
2444{
2445 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
2446 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
2447}
2448
2449
2450/**
2451 * @opcode 0x5d
2452 */
2453FNIEMOP_DEF(iemOp_pop_eBP)
2454{
2455 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
2456 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
2457}
2458
2459
2460/**
2461 * @opcode 0x5e
2462 */
2463FNIEMOP_DEF(iemOp_pop_eSI)
2464{
2465 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
2466 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
2467}
2468
2469
2470/**
2471 * @opcode 0x5f
2472 */
2473FNIEMOP_DEF(iemOp_pop_eDI)
2474{
2475 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
2476 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
2477}
2478
2479
2480/**
2481 * @opcode 0x60
2482 */
2483FNIEMOP_DEF(iemOp_pusha)
2484{
2485 IEMOP_MNEMONIC(pusha, "pusha");
2486 IEMOP_HLP_MIN_186();
2487 IEMOP_HLP_NO_64BIT();
2488 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2489 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pusha_16);
2490 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2491 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pusha_32);
2492}
2493
2494
2495/**
2496 * @opcode 0x61
2497 */
2498FNIEMOP_DEF(iemOp_popa__mvex)
2499{
2500 if (!IEM_IS_64BIT_CODE(pVCpu))
2501 {
2502 IEMOP_MNEMONIC(popa, "popa");
2503 IEMOP_HLP_MIN_186();
2504 IEMOP_HLP_NO_64BIT();
2505 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2506 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
2507 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
2508 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
2509 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
2510 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX)
2511 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
2512 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP)
2513 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
2514 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
2515 iemCImpl_popa_16);
2516 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2517 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
2518 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
2519 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
2520 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
2521 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX)
2522 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
2523 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP)
2524 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
2525 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
2526 iemCImpl_popa_32);
2527 }
2528 IEMOP_MNEMONIC(mvex, "mvex");
2529 Log(("mvex prefix is not supported!\n"));
2530 IEMOP_RAISE_INVALID_OPCODE_RET();
2531}
2532
2533
2534/**
2535 * @opcode 0x62
2536 * @opmnemonic bound
2537 * @op1 Gv_RO
2538 * @op2 Ma
2539 * @opmincpu 80186
2540 * @ophints harmless x86_invalid_64
2541 * @optest op1=0 op2=0 ->
2542 * @optest op1=1 op2=0 -> value.xcpt=5
2543 * @optest o16 / op1=0xffff op2=0x0000fffe ->
2544 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
2545 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
2546 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
2547 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
2548 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
2549 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
2550 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
2551 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
2552 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
2553 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
2554 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
2555 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
2556 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
2557 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
2558 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
2559 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
2560 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
2561 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
2562 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
2563 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
2564 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
2565 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
2566 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
2567 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
2568 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
2569 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
2570 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
2571 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
2572 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
2573 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
2574 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
2575 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
2576 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
2577 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
2578 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
2579 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
2580 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
2581 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
2582 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
2583 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
2584 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
2585 */
2586FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
2587{
2588 /* The BOUND instruction is invalid 64-bit mode. In legacy and
2589 compatability mode it is invalid with MOD=3.
2590
2591 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
2592 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
2593 given as R and X without an exact description, so we assume it builds on
2594 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
2595 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
2596 uint8_t bRm;
2597 if (!IEM_IS_64BIT_CODE(pVCpu))
2598 {
2599 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2600 IEMOP_HLP_MIN_186();
2601 IEM_OPCODE_GET_NEXT_U8(&bRm);
2602 if (IEM_IS_MODRM_MEM_MODE(bRm))
2603 {
2604 /** @todo testcase: check that there are two memory accesses involved. Check
2605 * whether they're both read before the \#BR triggers. */
2606 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2607 {
2608 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186 | IEM_MC_F_NOT_64BIT, 0);
2609 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2610 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
2611 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
2612 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2613
2614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2616
2617 IEM_MC_FETCH_GREG_U16(u16Index, IEM_GET_MODRM_REG_8(bRm));
2618 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2619 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
2620
2621 IEM_MC_CALL_CIMPL_3(0, 0, iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
2622 IEM_MC_END();
2623 }
2624 else /* 32-bit operands */
2625 {
2626 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2627 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2628 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
2629 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
2630 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2631
2632 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2634
2635 IEM_MC_FETCH_GREG_U32(u32Index, IEM_GET_MODRM_REG_8(bRm));
2636 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2637 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
2638
2639 IEM_MC_CALL_CIMPL_3(0, 0, iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
2640 IEM_MC_END();
2641 }
2642 }
2643
2644 /*
2645 * @opdone
2646 */
2647 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2648 {
2649 /* Note that there is no need for the CPU to fetch further bytes
2650 here because MODRM.MOD == 3. */
2651 Log(("evex not supported by the guest CPU!\n"));
2652 IEMOP_RAISE_INVALID_OPCODE_RET();
2653 }
2654 }
2655 else
2656 {
2657 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
2658 * does modr/m read, whereas AMD probably doesn't... */
2659 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2660 {
2661 Log(("evex not supported by the guest CPU!\n"));
2662 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
2663 }
2664 IEM_OPCODE_GET_NEXT_U8(&bRm);
2665 }
2666
2667 IEMOP_MNEMONIC(evex, "evex");
2668 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
2669 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
2670 Log(("evex prefix is not implemented!\n"));
2671 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2672}
2673
2674
2675/**
2676 * @opcode 0x63
2677 * @opflmodify zf
2678 * @note non-64-bit modes.
2679 */
2680FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
2681{
2682 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
2683 IEMOP_HLP_MIN_286();
2684 IEMOP_HLP_NO_REAL_OR_V86_MODE();
2685 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2686
2687 if (IEM_IS_MODRM_REG_MODE(bRm))
2688 {
2689 /* Register */
2690 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2691 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2692 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2693 IEM_MC_ARG(uint16_t, u16Src, 1);
2694 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2695
2696 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2697 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM_8(bRm));
2698 IEM_MC_REF_EFLAGS(pEFlags);
2699 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2700
2701 IEM_MC_ADVANCE_RIP_AND_FINISH();
2702 IEM_MC_END();
2703 }
2704 else
2705 {
2706 /* Memory */
2707 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2708 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2709 IEM_MC_ARG(uint16_t, u16Src, 1);
2710 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2711 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2712 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
2713
2714 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2715 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2716 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2717 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2718 IEM_MC_FETCH_EFLAGS(EFlags);
2719 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2720
2721 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
2722 IEM_MC_COMMIT_EFLAGS(EFlags);
2723 IEM_MC_ADVANCE_RIP_AND_FINISH();
2724 IEM_MC_END();
2725 }
2726}
2727
2728
2729/**
2730 * @opcode 0x63
2731 *
2732 * @note This is a weird one. It works like a regular move instruction if
2733 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2734 * @todo This definitely needs a testcase to verify the odd cases. */
2735FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2736{
2737 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2738
2739 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2740 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2741
2742 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2743 {
2744 if (IEM_IS_MODRM_REG_MODE(bRm))
2745 {
2746 /*
2747 * Register to register.
2748 */
2749 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2751 IEM_MC_LOCAL(uint64_t, u64Value);
2752 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
2753 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2754 IEM_MC_ADVANCE_RIP_AND_FINISH();
2755 IEM_MC_END();
2756 }
2757 else
2758 {
2759 /*
2760 * We're loading a register from memory.
2761 */
2762 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
2763 IEM_MC_LOCAL(uint64_t, u64Value);
2764 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2767 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2768 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2769 IEM_MC_ADVANCE_RIP_AND_FINISH();
2770 IEM_MC_END();
2771 }
2772 }
2773 else
2774 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
2775}
2776
2777
2778/**
2779 * @opcode 0x64
2780 * @opmnemonic segfs
2781 * @opmincpu 80386
2782 * @opgroup og_prefixes
2783 */
2784FNIEMOP_DEF(iemOp_seg_FS)
2785{
2786 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2787 IEMOP_HLP_MIN_386();
2788
2789 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2790 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2791
2792 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2793 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2794}
2795
2796
2797/**
2798 * @opcode 0x65
2799 * @opmnemonic seggs
2800 * @opmincpu 80386
2801 * @opgroup og_prefixes
2802 */
2803FNIEMOP_DEF(iemOp_seg_GS)
2804{
2805 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2806 IEMOP_HLP_MIN_386();
2807
2808 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2809 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2810
2811 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2812 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2813}
2814
2815
2816/**
2817 * @opcode 0x66
2818 * @opmnemonic opsize
2819 * @openc prefix
2820 * @opmincpu 80386
2821 * @ophints harmless
2822 * @opgroup og_prefixes
2823 */
2824FNIEMOP_DEF(iemOp_op_size)
2825{
2826 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2827 IEMOP_HLP_MIN_386();
2828
2829 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2830 iemRecalEffOpSize(pVCpu);
2831
2832 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2833 when REPZ or REPNZ are present. */
2834 if (pVCpu->iem.s.idxPrefix == 0)
2835 pVCpu->iem.s.idxPrefix = 1;
2836
2837 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2838 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2839}
2840
2841
2842/**
2843 * @opcode 0x67
2844 * @opmnemonic addrsize
2845 * @openc prefix
2846 * @opmincpu 80386
2847 * @ophints harmless
2848 * @opgroup og_prefixes
2849 */
2850FNIEMOP_DEF(iemOp_addr_size)
2851{
2852 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2853 IEMOP_HLP_MIN_386();
2854
2855 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2856 switch (pVCpu->iem.s.enmDefAddrMode)
2857 {
2858 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2859 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2860 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2861 default: AssertFailed();
2862 }
2863
2864 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2865 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2866}
2867
2868
2869/**
2870 * @opcode 0x68
2871 */
2872FNIEMOP_DEF(iemOp_push_Iz)
2873{
2874 IEMOP_MNEMONIC(push_Iz, "push Iz");
2875 IEMOP_HLP_MIN_186();
2876 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2877 switch (pVCpu->iem.s.enmEffOpSize)
2878 {
2879 case IEMMODE_16BIT:
2880 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_186, 0);
2881 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2883 IEM_MC_LOCAL_CONST(uint16_t, u16Value, u16Imm);
2884 IEM_MC_PUSH_U16(u16Value);
2885 IEM_MC_ADVANCE_RIP_AND_FINISH();
2886 IEM_MC_END();
2887 break;
2888
2889 case IEMMODE_32BIT:
2890 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2891 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2892 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2893 IEM_MC_LOCAL_CONST(uint32_t, u32Value, u32Imm);
2894 IEM_MC_PUSH_U32(u32Value);
2895 IEM_MC_ADVANCE_RIP_AND_FINISH();
2896 IEM_MC_END();
2897 break;
2898
2899 case IEMMODE_64BIT:
2900 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2901 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2902 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2903 IEM_MC_LOCAL_CONST(uint64_t, u64Value, u64Imm);
2904 IEM_MC_PUSH_U64(u64Value);
2905 IEM_MC_ADVANCE_RIP_AND_FINISH();
2906 IEM_MC_END();
2907 break;
2908
2909 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2910 }
2911}
2912
2913
2914/**
2915 * @opcode 0x69
2916 * @opflclass multiply
2917 */
2918FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
2919{
2920 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
2921 IEMOP_HLP_MIN_186();
2922 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2923 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2924
2925 switch (pVCpu->iem.s.enmEffOpSize)
2926 {
2927 case IEMMODE_16BIT:
2928 {
2929 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
2930 if (IEM_IS_MODRM_REG_MODE(bRm))
2931 {
2932 /* register operand */
2933 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2934 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186, 0);
2935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2936 IEM_MC_LOCAL(uint16_t, u16Tmp);
2937 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2938 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
2939 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
2940 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2941 IEM_MC_REF_EFLAGS(pEFlags);
2942 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2943 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2944
2945 IEM_MC_ADVANCE_RIP_AND_FINISH();
2946 IEM_MC_END();
2947 }
2948 else
2949 {
2950 /* memory operand */
2951 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186, 0);
2952 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2953 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2954
2955 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2957
2958 IEM_MC_LOCAL(uint16_t, u16Tmp);
2959 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2960
2961 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
2962 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
2963 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2964 IEM_MC_REF_EFLAGS(pEFlags);
2965 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2966 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2967
2968 IEM_MC_ADVANCE_RIP_AND_FINISH();
2969 IEM_MC_END();
2970 }
2971 break;
2972 }
2973
2974 case IEMMODE_32BIT:
2975 {
2976 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
2977 if (IEM_IS_MODRM_REG_MODE(bRm))
2978 {
2979 /* register operand */
2980 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2981 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
2982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2983 IEM_MC_LOCAL(uint32_t, u32Tmp);
2984 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2985
2986 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
2987 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
2988 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2989 IEM_MC_REF_EFLAGS(pEFlags);
2990 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2991 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2992
2993 IEM_MC_ADVANCE_RIP_AND_FINISH();
2994 IEM_MC_END();
2995 }
2996 else
2997 {
2998 /* memory operand */
2999 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0);
3000 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3001 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3002
3003 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3005
3006 IEM_MC_LOCAL(uint32_t, u32Tmp);
3007 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3008
3009 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3010 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
3011 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3012 IEM_MC_REF_EFLAGS(pEFlags);
3013 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3014 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3015
3016 IEM_MC_ADVANCE_RIP_AND_FINISH();
3017 IEM_MC_END();
3018 }
3019 break;
3020 }
3021
3022 case IEMMODE_64BIT:
3023 {
3024 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3025 if (IEM_IS_MODRM_REG_MODE(bRm))
3026 {
3027 /* register operand */
3028 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3029 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
3030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3031 IEM_MC_LOCAL(uint64_t, u64Tmp);
3032 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3033
3034 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3035 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
3036 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3037 IEM_MC_REF_EFLAGS(pEFlags);
3038 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3039 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3040
3041 IEM_MC_ADVANCE_RIP_AND_FINISH();
3042 IEM_MC_END();
3043 }
3044 else
3045 {
3046 /* memory operand */
3047 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3048 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3049 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3050
3051 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); /* Not using IEM_OPCODE_GET_NEXT_S32_SX_U64 to reduce the */
3052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /* parameter count for the threaded function for this block. */
3053
3054 IEM_MC_LOCAL(uint64_t, u64Tmp);
3055 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3056
3057 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3058 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int32_t)u32Imm, 1);
3059 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3060 IEM_MC_REF_EFLAGS(pEFlags);
3061 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3062 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3063
3064 IEM_MC_ADVANCE_RIP_AND_FINISH();
3065 IEM_MC_END();
3066 }
3067 break;
3068 }
3069
3070 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3071 }
3072}
3073
3074
3075/**
3076 * @opcode 0x6a
3077 */
3078FNIEMOP_DEF(iemOp_push_Ib)
3079{
3080 IEMOP_MNEMONIC(push_Ib, "push Ib");
3081 IEMOP_HLP_MIN_186();
3082 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3083 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3084
3085 switch (pVCpu->iem.s.enmEffOpSize)
3086 {
3087 case IEMMODE_16BIT:
3088 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_186, 0);
3089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3090 IEM_MC_LOCAL_CONST(uint16_t, uValue, (int16_t)i8Imm);
3091 IEM_MC_PUSH_U16(uValue);
3092 IEM_MC_ADVANCE_RIP_AND_FINISH();
3093 IEM_MC_END();
3094 break;
3095 case IEMMODE_32BIT:
3096 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
3097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3098 IEM_MC_LOCAL_CONST(uint32_t, uValue, (int32_t)i8Imm);
3099 IEM_MC_PUSH_U32(uValue);
3100 IEM_MC_ADVANCE_RIP_AND_FINISH();
3101 IEM_MC_END();
3102 break;
3103 case IEMMODE_64BIT:
3104 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
3105 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3106 IEM_MC_LOCAL_CONST(uint64_t, uValue, (int64_t)i8Imm);
3107 IEM_MC_PUSH_U64(uValue);
3108 IEM_MC_ADVANCE_RIP_AND_FINISH();
3109 IEM_MC_END();
3110 break;
3111 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3112 }
3113}
3114
3115
3116/**
3117 * @opcode 0x6b
3118 * @opflclass multiply
3119 */
3120FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
3121{
3122 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
3123 IEMOP_HLP_MIN_186();
3124 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3125 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3126
3127 switch (pVCpu->iem.s.enmEffOpSize)
3128 {
3129 case IEMMODE_16BIT:
3130 {
3131 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
3132 if (IEM_IS_MODRM_REG_MODE(bRm))
3133 {
3134 /* register operand */
3135 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186, 0);
3136 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3138
3139 IEM_MC_LOCAL(uint16_t, u16Tmp);
3140 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3141
3142 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3143 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
3144 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3145 IEM_MC_REF_EFLAGS(pEFlags);
3146 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3147 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3148
3149 IEM_MC_ADVANCE_RIP_AND_FINISH();
3150 IEM_MC_END();
3151 }
3152 else
3153 {
3154 /* memory operand */
3155 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186, 0);
3156
3157 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3158 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3159
3160 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
3161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3162
3163 IEM_MC_LOCAL(uint16_t, u16Tmp);
3164 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3165
3166 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3167 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
3168 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3169 IEM_MC_REF_EFLAGS(pEFlags);
3170 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3171 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3172
3173 IEM_MC_ADVANCE_RIP_AND_FINISH();
3174 IEM_MC_END();
3175 }
3176 break;
3177 }
3178
3179 case IEMMODE_32BIT:
3180 {
3181 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
3182 if (IEM_IS_MODRM_REG_MODE(bRm))
3183 {
3184 /* register operand */
3185 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3186 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
3187 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3188 IEM_MC_LOCAL(uint32_t, u32Tmp);
3189 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3190
3191 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3192 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
3193 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3194 IEM_MC_REF_EFLAGS(pEFlags);
3195 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3196 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3197
3198 IEM_MC_ADVANCE_RIP_AND_FINISH();
3199 IEM_MC_END();
3200 }
3201 else
3202 {
3203 /* memory operand */
3204 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0);
3205 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3206 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3207
3208 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
3209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3210
3211 IEM_MC_LOCAL(uint32_t, u32Tmp);
3212 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3213
3214 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3215 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
3216 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3217 IEM_MC_REF_EFLAGS(pEFlags);
3218 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3219 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3220
3221 IEM_MC_ADVANCE_RIP_AND_FINISH();
3222 IEM_MC_END();
3223 }
3224 break;
3225 }
3226
3227 case IEMMODE_64BIT:
3228 {
3229 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3230 if (IEM_IS_MODRM_REG_MODE(bRm))
3231 {
3232 /* register operand */
3233 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3234 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
3235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3236 IEM_MC_LOCAL(uint64_t, u64Tmp);
3237 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3238
3239 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3240 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 1);
3241 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3242 IEM_MC_REF_EFLAGS(pEFlags);
3243 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3244 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3245
3246 IEM_MC_ADVANCE_RIP_AND_FINISH();
3247 IEM_MC_END();
3248 }
3249 else
3250 {
3251 /* memory operand */
3252 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3253 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3254 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3255
3256 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); /* Not using IEM_OPCODE_GET_NEXT_S8_SX_U64 to reduce the threaded parameter count. */
3257 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3258
3259 IEM_MC_LOCAL(uint64_t, u64Tmp);
3260 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3261
3262 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3263 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 1);
3264 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3265 IEM_MC_REF_EFLAGS(pEFlags);
3266 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3267 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3268
3269 IEM_MC_ADVANCE_RIP_AND_FINISH();
3270 IEM_MC_END();
3271 }
3272 break;
3273 }
3274
3275 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3276 }
3277}
3278
3279
3280/**
3281 * @opcode 0x6c
3282 * @opfltest iopl,df
3283 */
3284FNIEMOP_DEF(iemOp_insb_Yb_DX)
3285{
3286 IEMOP_HLP_MIN_186();
3287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3288 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3289 {
3290 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
3291 switch (pVCpu->iem.s.enmEffAddrMode)
3292 {
3293 case IEMMODE_16BIT:
3294 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3295 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3296 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3297 iemCImpl_rep_ins_op8_addr16, false);
3298 case IEMMODE_32BIT:
3299 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3300 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3301 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3302 iemCImpl_rep_ins_op8_addr32, false);
3303 case IEMMODE_64BIT:
3304 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3305 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3306 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3307 iemCImpl_rep_ins_op8_addr64, false);
3308 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3309 }
3310 }
3311 else
3312 {
3313 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
3314 switch (pVCpu->iem.s.enmEffAddrMode)
3315 {
3316 case IEMMODE_16BIT:
3317 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3318 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3319 iemCImpl_ins_op8_addr16, false);
3320 case IEMMODE_32BIT:
3321 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3322 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3323 iemCImpl_ins_op8_addr32, false);
3324 case IEMMODE_64BIT:
3325 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3326 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3327 iemCImpl_ins_op8_addr64, false);
3328 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3329 }
3330 }
3331}
3332
3333
3334/**
3335 * @opcode 0x6d
3336 * @opfltest iopl,df
3337 */
3338FNIEMOP_DEF(iemOp_inswd_Yv_DX)
3339{
3340 IEMOP_HLP_MIN_186();
3341 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3342 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3343 {
3344 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
3345 switch (pVCpu->iem.s.enmEffOpSize)
3346 {
3347 case IEMMODE_16BIT:
3348 switch (pVCpu->iem.s.enmEffAddrMode)
3349 {
3350 case IEMMODE_16BIT:
3351 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3352 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3353 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3354 iemCImpl_rep_ins_op16_addr16, false);
3355 case IEMMODE_32BIT:
3356 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3357 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3358 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3359 iemCImpl_rep_ins_op16_addr32, false);
3360 case IEMMODE_64BIT:
3361 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3362 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3363 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3364 iemCImpl_rep_ins_op16_addr64, false);
3365 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3366 }
3367 break;
3368 case IEMMODE_64BIT:
3369 case IEMMODE_32BIT:
3370 switch (pVCpu->iem.s.enmEffAddrMode)
3371 {
3372 case IEMMODE_16BIT:
3373 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3374 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3375 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3376 iemCImpl_rep_ins_op32_addr16, false);
3377 case IEMMODE_32BIT:
3378 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3379 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3380 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3381 iemCImpl_rep_ins_op32_addr32, false);
3382 case IEMMODE_64BIT:
3383 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3384 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3385 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3386 iemCImpl_rep_ins_op32_addr64, false);
3387 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3388 }
3389 break;
3390 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3391 }
3392 }
3393 else
3394 {
3395 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
3396 switch (pVCpu->iem.s.enmEffOpSize)
3397 {
3398 case IEMMODE_16BIT:
3399 switch (pVCpu->iem.s.enmEffAddrMode)
3400 {
3401 case IEMMODE_16BIT:
3402 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3403 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3404 iemCImpl_ins_op16_addr16, false);
3405 case IEMMODE_32BIT:
3406 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3407 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3408 iemCImpl_ins_op16_addr32, false);
3409 case IEMMODE_64BIT:
3410 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3411 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3412 iemCImpl_ins_op16_addr64, false);
3413 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3414 }
3415 break;
3416 case IEMMODE_64BIT:
3417 case IEMMODE_32BIT:
3418 switch (pVCpu->iem.s.enmEffAddrMode)
3419 {
3420 case IEMMODE_16BIT:
3421 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3422 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3423 iemCImpl_ins_op32_addr16, false);
3424 case IEMMODE_32BIT:
3425 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3426 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3427 iemCImpl_ins_op32_addr32, false);
3428 case IEMMODE_64BIT:
3429 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3430 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3431 iemCImpl_ins_op32_addr64, false);
3432 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3433 }
3434 break;
3435 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3436 }
3437 }
3438}
3439
3440
3441/**
3442 * @opcode 0x6e
3443 * @opfltest iopl,df
3444 */
3445FNIEMOP_DEF(iemOp_outsb_Yb_DX)
3446{
3447 IEMOP_HLP_MIN_186();
3448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3449 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3450 {
3451 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
3452 switch (pVCpu->iem.s.enmEffAddrMode)
3453 {
3454 case IEMMODE_16BIT:
3455 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3456 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3457 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3458 iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3459 case IEMMODE_32BIT:
3460 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3461 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3462 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3463 iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3464 case IEMMODE_64BIT:
3465 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3466 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3467 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3468 iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3469 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3470 }
3471 }
3472 else
3473 {
3474 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
3475 switch (pVCpu->iem.s.enmEffAddrMode)
3476 {
3477 case IEMMODE_16BIT:
3478 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3479 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3480 iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3481 case IEMMODE_32BIT:
3482 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3483 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3484 iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3485 case IEMMODE_64BIT:
3486 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3487 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3488 iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3489 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3490 }
3491 }
3492}
3493
3494
3495/**
3496 * @opcode 0x6f
3497 * @opfltest iopl,df
3498 */
3499FNIEMOP_DEF(iemOp_outswd_Yv_DX)
3500{
3501 IEMOP_HLP_MIN_186();
3502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3503 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3504 {
3505 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
3506 switch (pVCpu->iem.s.enmEffOpSize)
3507 {
3508 case IEMMODE_16BIT:
3509 switch (pVCpu->iem.s.enmEffAddrMode)
3510 {
3511 case IEMMODE_16BIT:
3512 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3513 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3514 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3515 iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3516 case IEMMODE_32BIT:
3517 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3518 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3519 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3520 iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3521 case IEMMODE_64BIT:
3522 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3523 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3524 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3525 iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3526 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3527 }
3528 break;
3529 case IEMMODE_64BIT:
3530 case IEMMODE_32BIT:
3531 switch (pVCpu->iem.s.enmEffAddrMode)
3532 {
3533 case IEMMODE_16BIT:
3534 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3535 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3536 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3537 iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3538 case IEMMODE_32BIT:
3539 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3540 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3541 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3542 iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3543 case IEMMODE_64BIT:
3544 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3545 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3546 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3547 iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3548 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3549 }
3550 break;
3551 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3552 }
3553 }
3554 else
3555 {
3556 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
3557 switch (pVCpu->iem.s.enmEffOpSize)
3558 {
3559 case IEMMODE_16BIT:
3560 switch (pVCpu->iem.s.enmEffAddrMode)
3561 {
3562 case IEMMODE_16BIT:
3563 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3564 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3565 iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3566 case IEMMODE_32BIT:
3567 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3568 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3569 iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3570 case IEMMODE_64BIT:
3571 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3572 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3573 iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3574 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3575 }
3576 break;
3577 case IEMMODE_64BIT:
3578 case IEMMODE_32BIT:
3579 switch (pVCpu->iem.s.enmEffAddrMode)
3580 {
3581 case IEMMODE_16BIT:
3582 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3583 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3584 iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3585 case IEMMODE_32BIT:
3586 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3587 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3588 iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3589 case IEMMODE_64BIT:
3590 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3591 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3592 iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3593 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3594 }
3595 break;
3596 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3597 }
3598 }
3599}
3600
3601
3602/**
3603 * @opcode 0x70
3604 * @opfltest of
3605 */
3606FNIEMOP_DEF(iemOp_jo_Jb)
3607{
3608 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
3609 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3610 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3611
3612 IEM_MC_BEGIN(0, 0, 0, 0);
3613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3614 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3615 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3616 } IEM_MC_ELSE() {
3617 IEM_MC_ADVANCE_RIP_AND_FINISH();
3618 } IEM_MC_ENDIF();
3619 IEM_MC_END();
3620}
3621
3622
3623/**
3624 * @opcode 0x71
3625 * @opfltest of
3626 */
3627FNIEMOP_DEF(iemOp_jno_Jb)
3628{
3629 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
3630 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3631 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3632
3633 IEM_MC_BEGIN(0, 0, 0, 0);
3634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3635 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3636 IEM_MC_ADVANCE_RIP_AND_FINISH();
3637 } IEM_MC_ELSE() {
3638 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3639 } IEM_MC_ENDIF();
3640 IEM_MC_END();
3641}
3642
3643/**
3644 * @opcode 0x72
3645 * @opfltest cf
3646 */
3647FNIEMOP_DEF(iemOp_jc_Jb)
3648{
3649 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
3650 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3651 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3652
3653 IEM_MC_BEGIN(0, 0, 0, 0);
3654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3655 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3656 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3657 } IEM_MC_ELSE() {
3658 IEM_MC_ADVANCE_RIP_AND_FINISH();
3659 } IEM_MC_ENDIF();
3660 IEM_MC_END();
3661}
3662
3663
3664/**
3665 * @opcode 0x73
3666 * @opfltest cf
3667 */
3668FNIEMOP_DEF(iemOp_jnc_Jb)
3669{
3670 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
3671 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3672 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3673
3674 IEM_MC_BEGIN(0, 0, 0, 0);
3675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3676 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3677 IEM_MC_ADVANCE_RIP_AND_FINISH();
3678 } IEM_MC_ELSE() {
3679 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3680 } IEM_MC_ENDIF();
3681 IEM_MC_END();
3682}
3683
3684
3685/**
3686 * @opcode 0x74
3687 * @opfltest zf
3688 */
3689FNIEMOP_DEF(iemOp_je_Jb)
3690{
3691 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
3692 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3693 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3694
3695 IEM_MC_BEGIN(0, 0, 0, 0);
3696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3697 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3698 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3699 } IEM_MC_ELSE() {
3700 IEM_MC_ADVANCE_RIP_AND_FINISH();
3701 } IEM_MC_ENDIF();
3702 IEM_MC_END();
3703}
3704
3705
3706/**
3707 * @opcode 0x75
3708 * @opfltest zf
3709 */
3710FNIEMOP_DEF(iemOp_jne_Jb)
3711{
3712 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
3713 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3714 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3715
3716 IEM_MC_BEGIN(0, 0, 0, 0);
3717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3718 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3719 IEM_MC_ADVANCE_RIP_AND_FINISH();
3720 } IEM_MC_ELSE() {
3721 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3722 } IEM_MC_ENDIF();
3723 IEM_MC_END();
3724}
3725
3726
3727/**
3728 * @opcode 0x76
3729 * @opfltest cf,zf
3730 */
3731FNIEMOP_DEF(iemOp_jbe_Jb)
3732{
3733 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
3734 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3735 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3736
3737 IEM_MC_BEGIN(0, 0, 0, 0);
3738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3739 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3740 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3741 } IEM_MC_ELSE() {
3742 IEM_MC_ADVANCE_RIP_AND_FINISH();
3743 } IEM_MC_ENDIF();
3744 IEM_MC_END();
3745}
3746
3747
3748/**
3749 * @opcode 0x77
3750 * @opfltest cf,zf
3751 */
3752FNIEMOP_DEF(iemOp_jnbe_Jb)
3753{
3754 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
3755 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3756 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3757
3758 IEM_MC_BEGIN(0, 0, 0, 0);
3759 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3760 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3761 IEM_MC_ADVANCE_RIP_AND_FINISH();
3762 } IEM_MC_ELSE() {
3763 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3764 } IEM_MC_ENDIF();
3765 IEM_MC_END();
3766}
3767
3768
3769/**
3770 * @opcode 0x78
3771 * @opfltest sf
3772 */
3773FNIEMOP_DEF(iemOp_js_Jb)
3774{
3775 IEMOP_MNEMONIC(js_Jb, "js Jb");
3776 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3777 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3778
3779 IEM_MC_BEGIN(0, 0, 0, 0);
3780 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3781 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3782 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3783 } IEM_MC_ELSE() {
3784 IEM_MC_ADVANCE_RIP_AND_FINISH();
3785 } IEM_MC_ENDIF();
3786 IEM_MC_END();
3787}
3788
3789
3790/**
3791 * @opcode 0x79
3792 * @opfltest sf
3793 */
3794FNIEMOP_DEF(iemOp_jns_Jb)
3795{
3796 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
3797 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3798 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3799
3800 IEM_MC_BEGIN(0, 0, 0, 0);
3801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3802 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3803 IEM_MC_ADVANCE_RIP_AND_FINISH();
3804 } IEM_MC_ELSE() {
3805 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3806 } IEM_MC_ENDIF();
3807 IEM_MC_END();
3808}
3809
3810
3811/**
3812 * @opcode 0x7a
3813 * @opfltest pf
3814 */
3815FNIEMOP_DEF(iemOp_jp_Jb)
3816{
3817 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
3818 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3819 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3820
3821 IEM_MC_BEGIN(0, 0, 0, 0);
3822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3823 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3824 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3825 } IEM_MC_ELSE() {
3826 IEM_MC_ADVANCE_RIP_AND_FINISH();
3827 } IEM_MC_ENDIF();
3828 IEM_MC_END();
3829}
3830
3831
3832/**
3833 * @opcode 0x7b
3834 * @opfltest pf
3835 */
3836FNIEMOP_DEF(iemOp_jnp_Jb)
3837{
3838 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
3839 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3840 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3841
3842 IEM_MC_BEGIN(0, 0, 0, 0);
3843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3844 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3845 IEM_MC_ADVANCE_RIP_AND_FINISH();
3846 } IEM_MC_ELSE() {
3847 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3848 } IEM_MC_ENDIF();
3849 IEM_MC_END();
3850}
3851
3852
3853/**
3854 * @opcode 0x7c
3855 * @opfltest sf,of
3856 */
3857FNIEMOP_DEF(iemOp_jl_Jb)
3858{
3859 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
3860 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3861 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3862
3863 IEM_MC_BEGIN(0, 0, 0, 0);
3864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3865 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3866 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3867 } IEM_MC_ELSE() {
3868 IEM_MC_ADVANCE_RIP_AND_FINISH();
3869 } IEM_MC_ENDIF();
3870 IEM_MC_END();
3871}
3872
3873
3874/**
3875 * @opcode 0x7d
3876 * @opfltest sf,of
3877 */
3878FNIEMOP_DEF(iemOp_jnl_Jb)
3879{
3880 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
3881 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3882 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3883
3884 IEM_MC_BEGIN(0, 0, 0, 0);
3885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3886 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3887 IEM_MC_ADVANCE_RIP_AND_FINISH();
3888 } IEM_MC_ELSE() {
3889 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3890 } IEM_MC_ENDIF();
3891 IEM_MC_END();
3892}
3893
3894
3895/**
3896 * @opcode 0x7e
3897 * @opfltest zf,sf,of
3898 */
3899FNIEMOP_DEF(iemOp_jle_Jb)
3900{
3901 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
3902 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3903 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3904
3905 IEM_MC_BEGIN(0, 0, 0, 0);
3906 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3907 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3908 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3909 } IEM_MC_ELSE() {
3910 IEM_MC_ADVANCE_RIP_AND_FINISH();
3911 } IEM_MC_ENDIF();
3912 IEM_MC_END();
3913}
3914
3915
3916/**
3917 * @opcode 0x7f
3918 * @opfltest zf,sf,of
3919 */
3920FNIEMOP_DEF(iemOp_jnle_Jb)
3921{
3922 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
3923 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3924 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3925
3926 IEM_MC_BEGIN(0, 0, 0, 0);
3927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3928 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3929 IEM_MC_ADVANCE_RIP_AND_FINISH();
3930 } IEM_MC_ELSE() {
3931 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3932 } IEM_MC_ENDIF();
3933 IEM_MC_END();
3934}
3935
3936
3937/**
3938 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
3939 * iemOp_Grp1_Eb_Ib_80.
3940 */
3941#define IEMOP_BODY_BINARY_Eb_Ib_RW(a_fnNormalU8) \
3942 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3943 { \
3944 /* register target */ \
3945 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3946 IEM_MC_BEGIN(3, 0, 0, 0); \
3947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3948 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3949 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3950 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3951 \
3952 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3953 IEM_MC_REF_EFLAGS(pEFlags); \
3954 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3955 \
3956 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3957 IEM_MC_END(); \
3958 } \
3959 else \
3960 { \
3961 /* memory target */ \
3962 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
3963 { \
3964 IEM_MC_BEGIN(3, 3, 0, 0); \
3965 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3966 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3967 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3968 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3969 \
3970 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3971 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3972 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3973 IEMOP_HLP_DONE_DECODING(); \
3974 \
3975 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3976 IEM_MC_FETCH_EFLAGS(EFlags); \
3977 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3978 \
3979 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
3980 IEM_MC_COMMIT_EFLAGS(EFlags); \
3981 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3982 IEM_MC_END(); \
3983 } \
3984 else \
3985 { \
3986 (void)0
3987
3988#define IEMOP_BODY_BINARY_Eb_Ib_LOCKED(a_fnLockedU8) \
3989 IEM_MC_BEGIN(3, 3, 0, 0); \
3990 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3991 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3992 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3993 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3994 \
3995 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3996 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3997 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3998 IEMOP_HLP_DONE_DECODING(); \
3999 \
4000 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4001 IEM_MC_FETCH_EFLAGS(EFlags); \
4002 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
4003 \
4004 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4005 IEM_MC_COMMIT_EFLAGS(EFlags); \
4006 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4007 IEM_MC_END(); \
4008 } \
4009 } \
4010 (void)0
4011
4012#define IEMOP_BODY_BINARY_Eb_Ib_RO(a_fnNormalU8) \
4013 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4014 { \
4015 /* register target */ \
4016 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4017 IEM_MC_BEGIN(3, 0, 0, 0); \
4018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4019 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
4020 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4021 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4022 \
4023 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4024 IEM_MC_REF_EFLAGS(pEFlags); \
4025 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
4026 \
4027 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4028 IEM_MC_END(); \
4029 } \
4030 else \
4031 { \
4032 /* memory target */ \
4033 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4034 { \
4035 IEM_MC_BEGIN(3, 3, 0, 0); \
4036 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
4037 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4038 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4039 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4040 \
4041 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4042 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4043 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4044 IEMOP_HLP_DONE_DECODING(); \
4045 \
4046 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4047 IEM_MC_FETCH_EFLAGS(EFlags); \
4048 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
4049 \
4050 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4051 IEM_MC_COMMIT_EFLAGS(EFlags); \
4052 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4053 IEM_MC_END(); \
4054 } \
4055 else \
4056 { \
4057 (void)0
4058
4059#define IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK() \
4060 IEMOP_HLP_DONE_DECODING(); \
4061 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4062 } \
4063 } \
4064 (void)0
4065
4066
4067
4068/**
4069 * @opmaps grp1_80,grp1_83
4070 * @opcode /0
4071 * @opflclass arithmetic
4072 */
4073FNIEMOP_DEF_1(iemOp_Grp1_add_Eb_Ib, uint8_t, bRm)
4074{
4075 IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib");
4076 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_add_u8);
4077 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_add_u8_locked);
4078}
4079
4080
4081/**
4082 * @opmaps grp1_80,grp1_83
4083 * @opcode /1
4084 * @opflclass logical
4085 */
4086FNIEMOP_DEF_1(iemOp_Grp1_or_Eb_Ib, uint8_t, bRm)
4087{
4088 IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib");
4089 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_or_u8);
4090 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_or_u8_locked);
4091}
4092
4093
4094/**
4095 * @opmaps grp1_80,grp1_83
4096 * @opcode /2
4097 * @opflclass arithmetic_carry
4098 */
4099FNIEMOP_DEF_1(iemOp_Grp1_adc_Eb_Ib, uint8_t, bRm)
4100{
4101 IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib");
4102 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_adc_u8);
4103 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_adc_u8_locked);
4104}
4105
4106
4107/**
4108 * @opmaps grp1_80,grp1_83
4109 * @opcode /3
4110 * @opflclass arithmetic_carry
4111 */
4112FNIEMOP_DEF_1(iemOp_Grp1_sbb_Eb_Ib, uint8_t, bRm)
4113{
4114 IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib");
4115 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sbb_u8);
4116 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sbb_u8_locked);
4117}
4118
4119
4120/**
4121 * @opmaps grp1_80,grp1_83
4122 * @opcode /4
4123 * @opflclass logical
4124 */
4125FNIEMOP_DEF_1(iemOp_Grp1_and_Eb_Ib, uint8_t, bRm)
4126{
4127 IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib");
4128 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_and_u8);
4129 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_and_u8_locked);
4130}
4131
4132
4133/**
4134 * @opmaps grp1_80,grp1_83
4135 * @opcode /5
4136 * @opflclass arithmetic
4137 */
4138FNIEMOP_DEF_1(iemOp_Grp1_sub_Eb_Ib, uint8_t, bRm)
4139{
4140 IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib");
4141 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sub_u8);
4142 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sub_u8_locked);
4143}
4144
4145
4146/**
4147 * @opmaps grp1_80,grp1_83
4148 * @opcode /6
4149 * @opflclass logical
4150 */
4151FNIEMOP_DEF_1(iemOp_Grp1_xor_Eb_Ib, uint8_t, bRm)
4152{
4153 IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib");
4154 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_xor_u8);
4155 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_xor_u8_locked);
4156}
4157
4158
4159/**
4160 * @opmaps grp1_80,grp1_83
4161 * @opcode /7
4162 * @opflclass arithmetic
4163 */
4164FNIEMOP_DEF_1(iemOp_Grp1_cmp_Eb_Ib, uint8_t, bRm)
4165{
4166 IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib");
4167 IEMOP_BODY_BINARY_Eb_Ib_RO(iemAImpl_cmp_u8);
4168 IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK();
4169}
4170
4171
4172/**
4173 * @opcode 0x80
4174 */
4175FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
4176{
4177 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4178 switch (IEM_GET_MODRM_REG_8(bRm))
4179 {
4180 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Eb_Ib, bRm);
4181 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Eb_Ib, bRm);
4182 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Eb_Ib, bRm);
4183 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Eb_Ib, bRm);
4184 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Eb_Ib, bRm);
4185 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Eb_Ib, bRm);
4186 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Eb_Ib, bRm);
4187 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Eb_Ib, bRm);
4188 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4189 }
4190}
4191
4192
4193/**
4194 * Body for a group 1 binary operator.
4195 */
4196#define IEMOP_BODY_BINARY_Ev_Iz_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4197 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4198 { \
4199 /* register target */ \
4200 switch (pVCpu->iem.s.enmEffOpSize) \
4201 { \
4202 case IEMMODE_16BIT: \
4203 { \
4204 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4205 IEM_MC_BEGIN(3, 0, 0, 0); \
4206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4207 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4208 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4209 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4210 \
4211 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4212 IEM_MC_REF_EFLAGS(pEFlags); \
4213 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4214 \
4215 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4216 IEM_MC_END(); \
4217 break; \
4218 } \
4219 \
4220 case IEMMODE_32BIT: \
4221 { \
4222 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4223 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4225 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4226 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4227 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4228 \
4229 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4230 IEM_MC_REF_EFLAGS(pEFlags); \
4231 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4232 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
4233 \
4234 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4235 IEM_MC_END(); \
4236 break; \
4237 } \
4238 \
4239 case IEMMODE_64BIT: \
4240 { \
4241 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4242 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4244 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4245 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4246 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4247 \
4248 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4249 IEM_MC_REF_EFLAGS(pEFlags); \
4250 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4251 \
4252 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4253 IEM_MC_END(); \
4254 break; \
4255 } \
4256 \
4257 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4258 } \
4259 } \
4260 else \
4261 { \
4262 /* memory target */ \
4263 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4264 { \
4265 switch (pVCpu->iem.s.enmEffOpSize) \
4266 { \
4267 case IEMMODE_16BIT: \
4268 { \
4269 IEM_MC_BEGIN(3, 3, 0, 0); \
4270 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4271 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4272 \
4273 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4274 IEMOP_HLP_DONE_DECODING(); \
4275 \
4276 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4277 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4278 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4279 \
4280 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4281 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4282 IEM_MC_FETCH_EFLAGS(EFlags); \
4283 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4284 \
4285 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4286 IEM_MC_COMMIT_EFLAGS(EFlags); \
4287 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4288 IEM_MC_END(); \
4289 break; \
4290 } \
4291 \
4292 case IEMMODE_32BIT: \
4293 { \
4294 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4295 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4296 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4297 \
4298 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4299 IEMOP_HLP_DONE_DECODING(); \
4300 \
4301 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4302 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4303 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4304 \
4305 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4306 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4307 IEM_MC_FETCH_EFLAGS(EFlags); \
4308 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4309 \
4310 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4311 IEM_MC_COMMIT_EFLAGS(EFlags); \
4312 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4313 IEM_MC_END(); \
4314 break; \
4315 } \
4316 \
4317 case IEMMODE_64BIT: \
4318 { \
4319 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4320 \
4321 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4322 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4323 \
4324 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4325 IEMOP_HLP_DONE_DECODING(); \
4326 \
4327 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4328 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4329 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4330 \
4331 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4332 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4333 IEM_MC_FETCH_EFLAGS(EFlags); \
4334 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4335 \
4336 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4337 IEM_MC_COMMIT_EFLAGS(EFlags); \
4338 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4339 IEM_MC_END(); \
4340 break; \
4341 } \
4342 \
4343 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4344 } \
4345 } \
4346 else \
4347 { \
4348 (void)0
4349/* This must be a separate macro due to parsing restrictions in IEMAllInstPython.py. */
4350#define IEMOP_BODY_BINARY_Ev_Iz_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4351 switch (pVCpu->iem.s.enmEffOpSize) \
4352 { \
4353 case IEMMODE_16BIT: \
4354 { \
4355 IEM_MC_BEGIN(3, 3, 0, 0); \
4356 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4357 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4358 \
4359 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4360 IEMOP_HLP_DONE_DECODING(); \
4361 \
4362 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4363 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4364 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4365 \
4366 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4367 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4368 IEM_MC_FETCH_EFLAGS(EFlags); \
4369 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4370 \
4371 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4372 IEM_MC_COMMIT_EFLAGS(EFlags); \
4373 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4374 IEM_MC_END(); \
4375 break; \
4376 } \
4377 \
4378 case IEMMODE_32BIT: \
4379 { \
4380 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4381 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4383 \
4384 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4385 IEMOP_HLP_DONE_DECODING(); \
4386 \
4387 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4388 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4389 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4390 \
4391 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4392 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4393 IEM_MC_FETCH_EFLAGS(EFlags); \
4394 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4395 \
4396 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4397 IEM_MC_COMMIT_EFLAGS(EFlags); \
4398 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4399 IEM_MC_END(); \
4400 break; \
4401 } \
4402 \
4403 case IEMMODE_64BIT: \
4404 { \
4405 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4406 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4407 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4408 \
4409 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4410 IEMOP_HLP_DONE_DECODING(); \
4411 \
4412 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4413 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4414 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4415 \
4416 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4417 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4418 IEM_MC_FETCH_EFLAGS(EFlags); \
4419 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4420 \
4421 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4422 IEM_MC_COMMIT_EFLAGS(EFlags); \
4423 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4424 IEM_MC_END(); \
4425 break; \
4426 } \
4427 \
4428 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4429 } \
4430 } \
4431 } \
4432 (void)0
4433
4434/* read-only version */
4435#define IEMOP_BODY_BINARY_Ev_Iz_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4436 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4437 { \
4438 /* register target */ \
4439 switch (pVCpu->iem.s.enmEffOpSize) \
4440 { \
4441 case IEMMODE_16BIT: \
4442 { \
4443 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4444 IEM_MC_BEGIN(3, 0, 0, 0); \
4445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4446 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4447 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4448 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4449 \
4450 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4451 IEM_MC_REF_EFLAGS(pEFlags); \
4452 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4453 \
4454 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4455 IEM_MC_END(); \
4456 break; \
4457 } \
4458 \
4459 case IEMMODE_32BIT: \
4460 { \
4461 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4462 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4464 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4465 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4466 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4467 \
4468 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4469 IEM_MC_REF_EFLAGS(pEFlags); \
4470 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4471 \
4472 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4473 IEM_MC_END(); \
4474 break; \
4475 } \
4476 \
4477 case IEMMODE_64BIT: \
4478 { \
4479 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4480 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4482 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4483 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4484 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4485 \
4486 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4487 IEM_MC_REF_EFLAGS(pEFlags); \
4488 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4489 \
4490 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4491 IEM_MC_END(); \
4492 break; \
4493 } \
4494 \
4495 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4496 } \
4497 } \
4498 else \
4499 { \
4500 /* memory target */ \
4501 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4502 { \
4503 switch (pVCpu->iem.s.enmEffOpSize) \
4504 { \
4505 case IEMMODE_16BIT: \
4506 { \
4507 IEM_MC_BEGIN(3, 3, 0, 0); \
4508 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4509 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4510 \
4511 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4512 IEMOP_HLP_DONE_DECODING(); \
4513 \
4514 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4515 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
4516 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4517 \
4518 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4519 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4520 IEM_MC_FETCH_EFLAGS(EFlags); \
4521 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4522 \
4523 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4524 IEM_MC_COMMIT_EFLAGS(EFlags); \
4525 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4526 IEM_MC_END(); \
4527 break; \
4528 } \
4529 \
4530 case IEMMODE_32BIT: \
4531 { \
4532 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4533 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4534 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4535 \
4536 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4537 IEMOP_HLP_DONE_DECODING(); \
4538 \
4539 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4540 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
4541 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4542 \
4543 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4544 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4545 IEM_MC_FETCH_EFLAGS(EFlags); \
4546 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4547 \
4548 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4549 IEM_MC_COMMIT_EFLAGS(EFlags); \
4550 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4551 IEM_MC_END(); \
4552 break; \
4553 } \
4554 \
4555 case IEMMODE_64BIT: \
4556 { \
4557 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4558 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4559 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4560 \
4561 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4562 IEMOP_HLP_DONE_DECODING(); \
4563 \
4564 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4565 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
4566 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4567 \
4568 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4569 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4570 IEM_MC_FETCH_EFLAGS(EFlags); \
4571 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4572 \
4573 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4574 IEM_MC_COMMIT_EFLAGS(EFlags); \
4575 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4576 IEM_MC_END(); \
4577 break; \
4578 } \
4579 \
4580 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4581 } \
4582 } \
4583 else \
4584 { \
4585 IEMOP_HLP_DONE_DECODING(); \
4586 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4587 } \
4588 } \
4589 (void)0
4590
4591
4592/**
4593 * @opmaps grp1_81
4594 * @opcode /0
4595 * @opflclass arithmetic
4596 */
4597FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Iz, uint8_t, bRm)
4598{
4599 IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz");
4600 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
4601 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4602}
4603
4604
4605/**
4606 * @opmaps grp1_81
4607 * @opcode /1
4608 * @opflclass logical
4609 */
4610FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Iz, uint8_t, bRm)
4611{
4612 IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz");
4613 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
4614 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4615}
4616
4617
4618/**
4619 * @opmaps grp1_81
4620 * @opcode /2
4621 * @opflclass arithmetic_carry
4622 */
4623FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Iz, uint8_t, bRm)
4624{
4625 IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz");
4626 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
4627 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
4628}
4629
4630
4631/**
4632 * @opmaps grp1_81
4633 * @opcode /3
4634 * @opflclass arithmetic_carry
4635 */
4636FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Iz, uint8_t, bRm)
4637{
4638 IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz");
4639 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
4640 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
4641}
4642
4643
4644/**
4645 * @opmaps grp1_81
4646 * @opcode /4
4647 * @opflclass logical
4648 */
4649FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Iz, uint8_t, bRm)
4650{
4651 IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz");
4652 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
4653 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
4654}
4655
4656
4657/**
4658 * @opmaps grp1_81
4659 * @opcode /5
4660 * @opflclass arithmetic
4661 */
4662FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Iz, uint8_t, bRm)
4663{
4664 IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz");
4665 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
4666 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
4667}
4668
4669
4670/**
4671 * @opmaps grp1_81
4672 * @opcode /6
4673 * @opflclass logical
4674 */
4675FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Iz, uint8_t, bRm)
4676{
4677 IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz");
4678 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
4679 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
4680}
4681
4682
4683/**
4684 * @opmaps grp1_81
4685 * @opcode /7
4686 * @opflclass arithmetic
4687 */
4688FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Iz, uint8_t, bRm)
4689{
4690 IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz");
4691 IEMOP_BODY_BINARY_Ev_Iz_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
4692}
4693
4694
4695/**
4696 * @opcode 0x81
4697 */
4698FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
4699{
4700 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4701 switch (IEM_GET_MODRM_REG_8(bRm))
4702 {
4703 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Iz, bRm);
4704 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Iz, bRm);
4705 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Iz, bRm);
4706 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Iz, bRm);
4707 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Iz, bRm);
4708 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Iz, bRm);
4709 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Iz, bRm);
4710 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Iz, bRm);
4711 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4712 }
4713}
4714
4715
4716/**
4717 * @opcode 0x82
4718 * @opmnemonic grp1_82
4719 * @opgroup og_groups
4720 */
4721FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
4722{
4723 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
4724 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
4725}
4726
4727
4728/**
4729 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
4730 * iemOp_Grp1_Ev_Ib.
4731 */
4732#define IEMOP_BODY_BINARY_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4733 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4734 { \
4735 /* \
4736 * Register target \
4737 */ \
4738 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4739 switch (pVCpu->iem.s.enmEffOpSize) \
4740 { \
4741 case IEMMODE_16BIT: \
4742 IEM_MC_BEGIN(3, 0, 0, 0); \
4743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4744 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4745 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (uint16_t)(int16_t)(int8_t)u8Imm, 1); \
4746 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4747 \
4748 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4749 IEM_MC_REF_EFLAGS(pEFlags); \
4750 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4751 \
4752 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4753 IEM_MC_END(); \
4754 break; \
4755 \
4756 case IEMMODE_32BIT: \
4757 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4759 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4760 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (uint32_t)(int32_t)(int8_t)u8Imm, 1); \
4761 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4762 \
4763 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4764 IEM_MC_REF_EFLAGS(pEFlags); \
4765 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4766 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
4767 \
4768 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4769 IEM_MC_END(); \
4770 break; \
4771 \
4772 case IEMMODE_64BIT: \
4773 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4774 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4775 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4776 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (uint64_t)(int64_t)(int8_t)u8Imm, 1); \
4777 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4778 \
4779 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4780 IEM_MC_REF_EFLAGS(pEFlags); \
4781 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4782 \
4783 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4784 IEM_MC_END(); \
4785 break; \
4786 \
4787 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4788 } \
4789 } \
4790 else \
4791 { \
4792 /* \
4793 * Memory target. \
4794 */ \
4795 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4796 { \
4797 switch (pVCpu->iem.s.enmEffOpSize) \
4798 { \
4799 case IEMMODE_16BIT: \
4800 IEM_MC_BEGIN(3, 3, 0, 0); \
4801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4802 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4803 \
4804 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4805 IEMOP_HLP_DONE_DECODING(); \
4806 \
4807 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4808 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4809 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4810 \
4811 IEM_MC_ARG_CONST(uint16_t, u16Src, (uint16_t)(int16_t)(int8_t)u8Imm, 1); \
4812 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4813 IEM_MC_FETCH_EFLAGS(EFlags); \
4814 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4815 \
4816 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4817 IEM_MC_COMMIT_EFLAGS(EFlags); \
4818 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4819 IEM_MC_END(); \
4820 break; \
4821 \
4822 case IEMMODE_32BIT: \
4823 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4824 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4825 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4826 \
4827 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4828 IEMOP_HLP_DONE_DECODING(); \
4829 \
4830 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4831 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4832 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4833 \
4834 IEM_MC_ARG_CONST(uint32_t, u32Src, (uint32_t)(int32_t)(int8_t)u8Imm, 1); \
4835 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4836 IEM_MC_FETCH_EFLAGS(EFlags); \
4837 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4838 \
4839 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4840 IEM_MC_COMMIT_EFLAGS(EFlags); \
4841 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4842 IEM_MC_END(); \
4843 break; \
4844 \
4845 case IEMMODE_64BIT: \
4846 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4847 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4849 \
4850 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4851 IEMOP_HLP_DONE_DECODING(); \
4852 \
4853 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4854 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4855 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4856 \
4857 IEM_MC_ARG_CONST(uint64_t, u64Src, (uint64_t)(int64_t)(int8_t)u8Imm, 1); \
4858 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4859 IEM_MC_FETCH_EFLAGS(EFlags); \
4860 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4861 \
4862 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4863 IEM_MC_COMMIT_EFLAGS(EFlags); \
4864 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4865 IEM_MC_END(); \
4866 break; \
4867 \
4868 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4869 } \
4870 } \
4871 else \
4872 { \
4873 (void)0
4874/* Separate macro to work around parsing issue in IEMAllInstPython.py */
4875#define IEMOP_BODY_BINARY_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4876 switch (pVCpu->iem.s.enmEffOpSize) \
4877 { \
4878 case IEMMODE_16BIT: \
4879 IEM_MC_BEGIN(3, 3, 0, 0); \
4880 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4881 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4882 \
4883 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4884 IEMOP_HLP_DONE_DECODING(); \
4885 \
4886 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4887 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4888 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4889 \
4890 IEM_MC_ARG_CONST(uint16_t, u16Src, (uint16_t)(int16_t)(int8_t)u8Imm, 1); \
4891 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4892 IEM_MC_FETCH_EFLAGS(EFlags); \
4893 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4894 \
4895 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4896 IEM_MC_COMMIT_EFLAGS(EFlags); \
4897 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4898 IEM_MC_END(); \
4899 break; \
4900 \
4901 case IEMMODE_32BIT: \
4902 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4903 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4904 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4905 \
4906 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4907 IEMOP_HLP_DONE_DECODING(); \
4908 \
4909 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4910 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4911 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4912 \
4913 IEM_MC_ARG_CONST(uint32_t, u32Src, (uint32_t)(int32_t)(int8_t)u8Imm, 1); \
4914 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4915 IEM_MC_FETCH_EFLAGS(EFlags); \
4916 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4917 \
4918 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4919 IEM_MC_COMMIT_EFLAGS(EFlags); \
4920 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4921 IEM_MC_END(); \
4922 break; \
4923 \
4924 case IEMMODE_64BIT: \
4925 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4926 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4927 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4928 \
4929 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4930 IEMOP_HLP_DONE_DECODING(); \
4931 \
4932 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4933 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4934 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4935 \
4936 IEM_MC_ARG_CONST(uint64_t, u64Src, (uint64_t)(int64_t)(int8_t)u8Imm, 1); \
4937 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4938 IEM_MC_FETCH_EFLAGS(EFlags); \
4939 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4940 \
4941 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4942 IEM_MC_COMMIT_EFLAGS(EFlags); \
4943 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4944 IEM_MC_END(); \
4945 break; \
4946 \
4947 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4948 } \
4949 } \
4950 } \
4951 (void)0
4952
4953/* read-only variant */
4954#define IEMOP_BODY_BINARY_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4955 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4956 { \
4957 /* \
4958 * Register target \
4959 */ \
4960 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4961 switch (pVCpu->iem.s.enmEffOpSize) \
4962 { \
4963 case IEMMODE_16BIT: \
4964 IEM_MC_BEGIN(3, 0, 0, 0); \
4965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4966 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4967 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (uint16_t)(int16_t)(int8_t)u8Imm, 1); \
4968 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4969 \
4970 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4971 IEM_MC_REF_EFLAGS(pEFlags); \
4972 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4973 \
4974 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4975 IEM_MC_END(); \
4976 break; \
4977 \
4978 case IEMMODE_32BIT: \
4979 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4980 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4981 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4982 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (uint32_t)(int32_t)(int8_t)u8Imm, 1); \
4983 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4984 \
4985 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4986 IEM_MC_REF_EFLAGS(pEFlags); \
4987 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4988 \
4989 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4990 IEM_MC_END(); \
4991 break; \
4992 \
4993 case IEMMODE_64BIT: \
4994 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4996 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4997 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (uint64_t)(int64_t)(int8_t)u8Imm, 1); \
4998 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4999 \
5000 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5001 IEM_MC_REF_EFLAGS(pEFlags); \
5002 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
5003 \
5004 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5005 IEM_MC_END(); \
5006 break; \
5007 \
5008 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5009 } \
5010 } \
5011 else \
5012 { \
5013 /* \
5014 * Memory target. \
5015 */ \
5016 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
5017 { \
5018 switch (pVCpu->iem.s.enmEffOpSize) \
5019 { \
5020 case IEMMODE_16BIT: \
5021 IEM_MC_BEGIN(3, 3, 0, 0); \
5022 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5023 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5024 \
5025 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5026 IEMOP_HLP_DONE_DECODING(); \
5027 \
5028 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5029 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
5030 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5031 \
5032 IEM_MC_ARG_CONST(uint16_t, u16Src, (uint16_t)(int16_t)(int8_t)u8Imm, 1); \
5033 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5034 IEM_MC_FETCH_EFLAGS(EFlags); \
5035 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
5036 \
5037 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5038 IEM_MC_COMMIT_EFLAGS(EFlags); \
5039 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5040 IEM_MC_END(); \
5041 break; \
5042 \
5043 case IEMMODE_32BIT: \
5044 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
5045 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5046 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5047 \
5048 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5049 IEMOP_HLP_DONE_DECODING(); \
5050 \
5051 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5052 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
5053 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5054 \
5055 IEM_MC_ARG_CONST(uint32_t, u32Src, (uint32_t)(int32_t)(int8_t)u8Imm, 1); \
5056 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5057 IEM_MC_FETCH_EFLAGS(EFlags); \
5058 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
5059 \
5060 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5061 IEM_MC_COMMIT_EFLAGS(EFlags); \
5062 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5063 IEM_MC_END(); \
5064 break; \
5065 \
5066 case IEMMODE_64BIT: \
5067 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
5068 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5069 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5070 \
5071 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5072 IEMOP_HLP_DONE_DECODING(); \
5073 \
5074 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5075 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
5076 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5077 \
5078 IEM_MC_ARG_CONST(uint64_t, u64Src, (uint64_t)(int64_t)(int8_t)u8Imm, 1); \
5079 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5080 IEM_MC_FETCH_EFLAGS(EFlags); \
5081 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
5082 \
5083 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5084 IEM_MC_COMMIT_EFLAGS(EFlags); \
5085 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5086 IEM_MC_END(); \
5087 break; \
5088 \
5089 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5090 } \
5091 } \
5092 else \
5093 { \
5094 IEMOP_HLP_DONE_DECODING(); \
5095 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
5096 } \
5097 } \
5098 (void)0
5099
5100/**
5101 * @opmaps grp1_83
5102 * @opcode /0
5103 * @opflclass arithmetic
5104 */
5105FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Ib, uint8_t, bRm)
5106{
5107 IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib");
5108 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
5109 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
5110}
5111
5112
5113/**
5114 * @opmaps grp1_83
5115 * @opcode /1
5116 * @opflclass logical
5117 */
5118FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Ib, uint8_t, bRm)
5119{
5120 IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib");
5121 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
5122 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
5123}
5124
5125
5126/**
5127 * @opmaps grp1_83
5128 * @opcode /2
5129 * @opflclass arithmetic_carry
5130 */
5131FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Ib, uint8_t, bRm)
5132{
5133 IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib");
5134 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
5135 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
5136}
5137
5138
5139/**
5140 * @opmaps grp1_83
5141 * @opcode /3
5142 * @opflclass arithmetic_carry
5143 */
5144FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Ib, uint8_t, bRm)
5145{
5146 IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib");
5147 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
5148 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
5149}
5150
5151
5152/**
5153 * @opmaps grp1_83
5154 * @opcode /4
5155 * @opflclass logical
5156 */
5157FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Ib, uint8_t, bRm)
5158{
5159 IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib");
5160 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
5161 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
5162}
5163
5164
5165/**
5166 * @opmaps grp1_83
5167 * @opcode /5
5168 * @opflclass arithmetic
5169 */
5170FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Ib, uint8_t, bRm)
5171{
5172 IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib");
5173 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
5174 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
5175}
5176
5177
5178/**
5179 * @opmaps grp1_83
5180 * @opcode /6
5181 * @opflclass logical
5182 */
5183FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Ib, uint8_t, bRm)
5184{
5185 IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib");
5186 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
5187 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
5188}
5189
5190
5191/**
5192 * @opmaps grp1_83
5193 * @opcode /7
5194 * @opflclass arithmetic
5195 */
5196FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Ib, uint8_t, bRm)
5197{
5198 IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib");
5199 IEMOP_BODY_BINARY_Ev_Ib_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
5200}
5201
5202
5203/**
5204 * @opcode 0x83
5205 */
5206FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
5207{
5208 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
5209 to the 386 even if absent in the intel reference manuals and some
5210 3rd party opcode listings. */
5211 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5212 switch (IEM_GET_MODRM_REG_8(bRm))
5213 {
5214 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Ib, bRm);
5215 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Ib, bRm);
5216 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Ib, bRm);
5217 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Ib, bRm);
5218 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Ib, bRm);
5219 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Ib, bRm);
5220 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Ib, bRm);
5221 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Ib, bRm);
5222 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5223 }
5224}
5225
5226
5227/**
5228 * @opcode 0x84
5229 * @opflclass logical
5230 */
5231FNIEMOP_DEF(iemOp_test_Eb_Gb)
5232{
5233 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
5234 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5235 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_test_u8);
5236}
5237
5238
5239/**
5240 * @opcode 0x85
5241 * @opflclass logical
5242 */
5243FNIEMOP_DEF(iemOp_test_Ev_Gv)
5244{
5245 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
5246 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5247 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64);
5248}
5249
5250
5251/**
5252 * @opcode 0x86
5253 */
5254FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
5255{
5256 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5257 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
5258
5259 /*
5260 * If rm is denoting a register, no more instruction bytes.
5261 */
5262 if (IEM_IS_MODRM_REG_MODE(bRm))
5263 {
5264 IEM_MC_BEGIN(0, 2, 0, 0);
5265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5266 IEM_MC_LOCAL(uint8_t, uTmp1);
5267 IEM_MC_LOCAL(uint8_t, uTmp2);
5268
5269 IEM_MC_FETCH_GREG_U8(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5270 IEM_MC_FETCH_GREG_U8(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5271 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5272 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5273
5274 IEM_MC_ADVANCE_RIP_AND_FINISH();
5275 IEM_MC_END();
5276 }
5277 else
5278 {
5279 /*
5280 * We're accessing memory.
5281 */
5282#define IEMOP_XCHG_BYTE(a_fnWorker, a_Style) \
5283 IEM_MC_BEGIN(2, 4, 0, 0); \
5284 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5285 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5286 IEM_MC_LOCAL(uint8_t, uTmpReg); \
5287 IEM_MC_ARG(uint8_t *, pu8Mem, 0); \
5288 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, uTmpReg, 1); \
5289 \
5290 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5291 IEMOP_HLP_DONE_DECODING(); /** @todo testcase: lock xchg */ \
5292 IEM_MC_MEM_MAP_U8_##a_Style(pu8Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5293 IEM_MC_FETCH_GREG_U8(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5294 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker, pu8Mem, pu8Reg); \
5295 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Style(bUnmapInfo); \
5296 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5297 \
5298 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5299 IEM_MC_END()
5300
5301 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5302 {
5303 IEMOP_XCHG_BYTE(iemAImpl_xchg_u8_locked,ATOMIC);
5304 }
5305 else
5306 {
5307 IEMOP_XCHG_BYTE(iemAImpl_xchg_u8_unlocked,RW);
5308 }
5309 }
5310}
5311
5312
5313/**
5314 * @opcode 0x87
5315 */
5316FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
5317{
5318 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
5319 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5320
5321 /*
5322 * If rm is denoting a register, no more instruction bytes.
5323 */
5324 if (IEM_IS_MODRM_REG_MODE(bRm))
5325 {
5326 switch (pVCpu->iem.s.enmEffOpSize)
5327 {
5328 case IEMMODE_16BIT:
5329 IEM_MC_BEGIN(0, 2, 0, 0);
5330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5331 IEM_MC_LOCAL(uint16_t, uTmp1);
5332 IEM_MC_LOCAL(uint16_t, uTmp2);
5333
5334 IEM_MC_FETCH_GREG_U16(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5335 IEM_MC_FETCH_GREG_U16(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5336 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5337 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5338
5339 IEM_MC_ADVANCE_RIP_AND_FINISH();
5340 IEM_MC_END();
5341 break;
5342
5343 case IEMMODE_32BIT:
5344 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5346 IEM_MC_LOCAL(uint32_t, uTmp1);
5347 IEM_MC_LOCAL(uint32_t, uTmp2);
5348
5349 IEM_MC_FETCH_GREG_U32(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5350 IEM_MC_FETCH_GREG_U32(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5351 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5352 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5353
5354 IEM_MC_ADVANCE_RIP_AND_FINISH();
5355 IEM_MC_END();
5356 break;
5357
5358 case IEMMODE_64BIT:
5359 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5361 IEM_MC_LOCAL(uint64_t, uTmp1);
5362 IEM_MC_LOCAL(uint64_t, uTmp2);
5363
5364 IEM_MC_FETCH_GREG_U64(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5365 IEM_MC_FETCH_GREG_U64(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5366 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5367 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5368
5369 IEM_MC_ADVANCE_RIP_AND_FINISH();
5370 IEM_MC_END();
5371 break;
5372
5373 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5374 }
5375 }
5376 else
5377 {
5378 /*
5379 * We're accessing memory.
5380 */
5381#define IEMOP_XCHG_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64, a_Type) \
5382 do { \
5383 switch (pVCpu->iem.s.enmEffOpSize) \
5384 { \
5385 case IEMMODE_16BIT: \
5386 IEM_MC_BEGIN(2, 4, 0, 0); \
5387 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5388 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5389 IEM_MC_LOCAL(uint16_t, uTmpReg); \
5390 IEM_MC_ARG(uint16_t *, pu16Mem, 0); \
5391 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, uTmpReg, 1); \
5392 \
5393 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5394 IEMOP_HLP_DONE_DECODING(); /** @todo testcase: lock xchg */ \
5395 IEM_MC_MEM_MAP_U16_##a_Type(pu16Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5396 IEM_MC_FETCH_GREG_U16(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5397 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker16, pu16Mem, pu16Reg); \
5398 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
5399 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5400 \
5401 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5402 IEM_MC_END(); \
5403 break; \
5404 \
5405 case IEMMODE_32BIT: \
5406 IEM_MC_BEGIN(2, 4, IEM_MC_F_MIN_386, 0); \
5407 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5408 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5409 IEM_MC_LOCAL(uint32_t, uTmpReg); \
5410 IEM_MC_ARG(uint32_t *, pu32Mem, 0); \
5411 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, uTmpReg, 1); \
5412 \
5413 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5414 IEMOP_HLP_DONE_DECODING(); \
5415 IEM_MC_MEM_MAP_U32_##a_Type(pu32Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5416 IEM_MC_FETCH_GREG_U32(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5417 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker32, pu32Mem, pu32Reg); \
5418 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
5419 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5420 \
5421 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5422 IEM_MC_END(); \
5423 break; \
5424 \
5425 case IEMMODE_64BIT: \
5426 IEM_MC_BEGIN(2, 4, IEM_MC_F_64BIT, 0); \
5427 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5428 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5429 IEM_MC_LOCAL(uint64_t, uTmpReg); \
5430 IEM_MC_ARG(uint64_t *, pu64Mem, 0); \
5431 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, uTmpReg, 1); \
5432 \
5433 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5434 IEMOP_HLP_DONE_DECODING(); \
5435 IEM_MC_MEM_MAP_U64_##a_Type(pu64Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5436 IEM_MC_FETCH_GREG_U64(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5437 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker64, pu64Mem, pu64Reg); \
5438 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
5439 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5440 \
5441 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5442 IEM_MC_END(); \
5443 break; \
5444 \
5445 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5446 } \
5447 } while (0)
5448 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5449 {
5450 IEMOP_XCHG_EV_GV(iemAImpl_xchg_u16_locked, iemAImpl_xchg_u32_locked, iemAImpl_xchg_u64_locked,ATOMIC);
5451 }
5452 else
5453 {
5454 IEMOP_XCHG_EV_GV(iemAImpl_xchg_u16_unlocked, iemAImpl_xchg_u32_unlocked, iemAImpl_xchg_u64_unlocked,RW);
5455 }
5456 }
5457}
5458
5459
5460/**
5461 * @opcode 0x88
5462 */
5463FNIEMOP_DEF(iemOp_mov_Eb_Gb)
5464{
5465 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
5466
5467 uint8_t bRm;
5468 IEM_OPCODE_GET_NEXT_U8(&bRm);
5469
5470 /*
5471 * If rm is denoting a register, no more instruction bytes.
5472 */
5473 if (IEM_IS_MODRM_REG_MODE(bRm))
5474 {
5475 IEM_MC_BEGIN(0, 1, 0, 0);
5476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5477 IEM_MC_LOCAL(uint8_t, u8Value);
5478 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5479 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Value);
5480 IEM_MC_ADVANCE_RIP_AND_FINISH();
5481 IEM_MC_END();
5482 }
5483 else
5484 {
5485 /*
5486 * We're writing a register to memory.
5487 */
5488 IEM_MC_BEGIN(0, 2, 0, 0);
5489 IEM_MC_LOCAL(uint8_t, u8Value);
5490 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5491 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5493 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5494 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
5495 IEM_MC_ADVANCE_RIP_AND_FINISH();
5496 IEM_MC_END();
5497 }
5498}
5499
5500
5501/**
5502 * @opcode 0x89
5503 */
5504FNIEMOP_DEF(iemOp_mov_Ev_Gv)
5505{
5506 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
5507
5508 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5509
5510 /*
5511 * If rm is denoting a register, no more instruction bytes.
5512 */
5513 if (IEM_IS_MODRM_REG_MODE(bRm))
5514 {
5515 switch (pVCpu->iem.s.enmEffOpSize)
5516 {
5517 case IEMMODE_16BIT:
5518 IEM_MC_BEGIN(0, 1, 0, 0);
5519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5520 IEM_MC_LOCAL(uint16_t, u16Value);
5521 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5522 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5523 IEM_MC_ADVANCE_RIP_AND_FINISH();
5524 IEM_MC_END();
5525 break;
5526
5527 case IEMMODE_32BIT:
5528 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5530 IEM_MC_LOCAL(uint32_t, u32Value);
5531 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5532 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5533 IEM_MC_ADVANCE_RIP_AND_FINISH();
5534 IEM_MC_END();
5535 break;
5536
5537 case IEMMODE_64BIT:
5538 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5540 IEM_MC_LOCAL(uint64_t, u64Value);
5541 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5542 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5543 IEM_MC_ADVANCE_RIP_AND_FINISH();
5544 IEM_MC_END();
5545 break;
5546
5547 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5548 }
5549 }
5550 else
5551 {
5552 /*
5553 * We're writing a register to memory.
5554 */
5555 switch (pVCpu->iem.s.enmEffOpSize)
5556 {
5557 case IEMMODE_16BIT:
5558 IEM_MC_BEGIN(0, 2, 0, 0);
5559 IEM_MC_LOCAL(uint16_t, u16Value);
5560 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5561 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5563 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5564 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5565 IEM_MC_ADVANCE_RIP_AND_FINISH();
5566 IEM_MC_END();
5567 break;
5568
5569 case IEMMODE_32BIT:
5570 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5571 IEM_MC_LOCAL(uint32_t, u32Value);
5572 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5575 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5576 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
5577 IEM_MC_ADVANCE_RIP_AND_FINISH();
5578 IEM_MC_END();
5579 break;
5580
5581 case IEMMODE_64BIT:
5582 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5583 IEM_MC_LOCAL(uint64_t, u64Value);
5584 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5585 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5587 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5588 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
5589 IEM_MC_ADVANCE_RIP_AND_FINISH();
5590 IEM_MC_END();
5591 break;
5592
5593 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5594 }
5595 }
5596}
5597
5598
5599/**
5600 * @opcode 0x8a
5601 */
5602FNIEMOP_DEF(iemOp_mov_Gb_Eb)
5603{
5604 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
5605
5606 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5607
5608 /*
5609 * If rm is denoting a register, no more instruction bytes.
5610 */
5611 if (IEM_IS_MODRM_REG_MODE(bRm))
5612 {
5613 IEM_MC_BEGIN(0, 1, 0, 0);
5614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5615 IEM_MC_LOCAL(uint8_t, u8Value);
5616 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5617 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5618 IEM_MC_ADVANCE_RIP_AND_FINISH();
5619 IEM_MC_END();
5620 }
5621 else
5622 {
5623 /*
5624 * We're loading a register from memory.
5625 */
5626 IEM_MC_BEGIN(0, 2, 0, 0);
5627 IEM_MC_LOCAL(uint8_t, u8Value);
5628 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5629 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5631 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5632 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5633 IEM_MC_ADVANCE_RIP_AND_FINISH();
5634 IEM_MC_END();
5635 }
5636}
5637
5638
5639/**
5640 * @opcode 0x8b
5641 */
5642FNIEMOP_DEF(iemOp_mov_Gv_Ev)
5643{
5644 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
5645
5646 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5647
5648 /*
5649 * If rm is denoting a register, no more instruction bytes.
5650 */
5651 if (IEM_IS_MODRM_REG_MODE(bRm))
5652 {
5653 switch (pVCpu->iem.s.enmEffOpSize)
5654 {
5655 case IEMMODE_16BIT:
5656 IEM_MC_BEGIN(0, 1, 0, 0);
5657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5658 IEM_MC_LOCAL(uint16_t, u16Value);
5659 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5660 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5661 IEM_MC_ADVANCE_RIP_AND_FINISH();
5662 IEM_MC_END();
5663 break;
5664
5665 case IEMMODE_32BIT:
5666 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5668 IEM_MC_LOCAL(uint32_t, u32Value);
5669 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5670 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5671 IEM_MC_ADVANCE_RIP_AND_FINISH();
5672 IEM_MC_END();
5673 break;
5674
5675 case IEMMODE_64BIT:
5676 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5678 IEM_MC_LOCAL(uint64_t, u64Value);
5679 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5680 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5681 IEM_MC_ADVANCE_RIP_AND_FINISH();
5682 IEM_MC_END();
5683 break;
5684
5685 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5686 }
5687 }
5688 else
5689 {
5690 /*
5691 * We're loading a register from memory.
5692 */
5693 switch (pVCpu->iem.s.enmEffOpSize)
5694 {
5695 case IEMMODE_16BIT:
5696 IEM_MC_BEGIN(0, 2, 0, 0);
5697 IEM_MC_LOCAL(uint16_t, u16Value);
5698 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5699 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5700 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5701 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5702 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5703 IEM_MC_ADVANCE_RIP_AND_FINISH();
5704 IEM_MC_END();
5705 break;
5706
5707 case IEMMODE_32BIT:
5708 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5709 IEM_MC_LOCAL(uint32_t, u32Value);
5710 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5711 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5713 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5714 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5715 IEM_MC_ADVANCE_RIP_AND_FINISH();
5716 IEM_MC_END();
5717 break;
5718
5719 case IEMMODE_64BIT:
5720 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5721 IEM_MC_LOCAL(uint64_t, u64Value);
5722 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5723 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5724 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5725 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5726 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5727 IEM_MC_ADVANCE_RIP_AND_FINISH();
5728 IEM_MC_END();
5729 break;
5730
5731 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5732 }
5733 }
5734}
5735
5736
5737/**
5738 * opcode 0x63
5739 * @todo Table fixme
5740 */
5741FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
5742{
5743 if (!IEM_IS_64BIT_CODE(pVCpu))
5744 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
5745 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
5746 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
5747 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
5748}
5749
5750
5751/**
5752 * @opcode 0x8c
5753 */
5754FNIEMOP_DEF(iemOp_mov_Ev_Sw)
5755{
5756 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
5757
5758 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5759
5760 /*
5761 * Check that the destination register exists. The REX.R prefix is ignored.
5762 */
5763 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5764 if (iSegReg > X86_SREG_GS)
5765 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5766
5767 /*
5768 * If rm is denoting a register, no more instruction bytes.
5769 * In that case, the operand size is respected and the upper bits are
5770 * cleared (starting with some pentium).
5771 */
5772 if (IEM_IS_MODRM_REG_MODE(bRm))
5773 {
5774 switch (pVCpu->iem.s.enmEffOpSize)
5775 {
5776 case IEMMODE_16BIT:
5777 IEM_MC_BEGIN(0, 1, 0, 0);
5778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5779 IEM_MC_LOCAL(uint16_t, u16Value);
5780 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5781 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5782 IEM_MC_ADVANCE_RIP_AND_FINISH();
5783 IEM_MC_END();
5784 break;
5785
5786 case IEMMODE_32BIT:
5787 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5789 IEM_MC_LOCAL(uint32_t, u32Value);
5790 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
5791 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5792 IEM_MC_ADVANCE_RIP_AND_FINISH();
5793 IEM_MC_END();
5794 break;
5795
5796 case IEMMODE_64BIT:
5797 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5798 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5799 IEM_MC_LOCAL(uint64_t, u64Value);
5800 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
5801 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5802 IEM_MC_ADVANCE_RIP_AND_FINISH();
5803 IEM_MC_END();
5804 break;
5805
5806 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5807 }
5808 }
5809 else
5810 {
5811 /*
5812 * We're saving the register to memory. The access is word sized
5813 * regardless of operand size prefixes.
5814 */
5815#if 0 /* not necessary */
5816 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5817#endif
5818 IEM_MC_BEGIN(0, 2, 0, 0);
5819 IEM_MC_LOCAL(uint16_t, u16Value);
5820 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5821 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5823 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5824 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5825 IEM_MC_ADVANCE_RIP_AND_FINISH();
5826 IEM_MC_END();
5827 }
5828}
5829
5830
5831
5832
5833/**
5834 * @opcode 0x8d
5835 */
5836FNIEMOP_DEF(iemOp_lea_Gv_M)
5837{
5838 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
5839 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5840 if (IEM_IS_MODRM_REG_MODE(bRm))
5841 IEMOP_RAISE_INVALID_OPCODE_RET(); /* no register form */
5842
5843 switch (pVCpu->iem.s.enmEffOpSize)
5844 {
5845 case IEMMODE_16BIT:
5846 IEM_MC_BEGIN(0, 2, 0, 0);
5847 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5850 /** @todo optimize: This value casting/masking can be skipped if addr-size ==
5851 * operand-size, which is usually the case. It'll save an instruction
5852 * and a register. */
5853 IEM_MC_LOCAL(uint16_t, u16Cast);
5854 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
5855 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Cast);
5856 IEM_MC_ADVANCE_RIP_AND_FINISH();
5857 IEM_MC_END();
5858 break;
5859
5860 case IEMMODE_32BIT:
5861 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5862 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5863 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5865 /** @todo optimize: This value casting/masking can be skipped if addr-size ==
5866 * operand-size, which is usually the case. It'll save an instruction
5867 * and a register. */
5868 IEM_MC_LOCAL(uint32_t, u32Cast);
5869 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
5870 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Cast);
5871 IEM_MC_ADVANCE_RIP_AND_FINISH();
5872 IEM_MC_END();
5873 break;
5874
5875 case IEMMODE_64BIT:
5876 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5877 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5878 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5879 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5880 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), GCPtrEffSrc);
5881 IEM_MC_ADVANCE_RIP_AND_FINISH();
5882 IEM_MC_END();
5883 break;
5884
5885 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5886 }
5887}
5888
5889
5890/**
5891 * @opcode 0x8e
5892 */
5893FNIEMOP_DEF(iemOp_mov_Sw_Ev)
5894{
5895 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
5896
5897 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5898
5899 /*
5900 * The practical operand size is 16-bit.
5901 */
5902#if 0 /* not necessary */
5903 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5904#endif
5905
5906 /*
5907 * Check that the destination register exists and can be used with this
5908 * instruction. The REX.R prefix is ignored.
5909 */
5910 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5911 /** @todo r=bird: What does 8086 do here wrt CS? */
5912 if ( iSegReg == X86_SREG_CS
5913 || iSegReg > X86_SREG_GS)
5914 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5915
5916 /*
5917 * If rm is denoting a register, no more instruction bytes.
5918 *
5919 * Note! Using IEMOP_MOV_SW_EV_REG_BODY here to specify different
5920 * IEM_CIMPL_F_XXX values depending on the CPU mode and target
5921 * register. This is a restriction of the current recompiler
5922 * approach.
5923 */
5924 if (IEM_IS_MODRM_REG_MODE(bRm))
5925 {
5926#define IEMOP_MOV_SW_EV_REG_BODY(a_fCImplFlags) \
5927 IEM_MC_BEGIN(2, 0, 0, a_fCImplFlags); \
5928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5929 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
5930 IEM_MC_ARG(uint16_t, u16Value, 1); \
5931 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5932 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, \
5933 RT_BIT_64(kIemNativeGstReg_SegSelFirst + iSegReg) \
5934 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + iSegReg) \
5935 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + iSegReg) \
5936 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + iSegReg), \
5937 iemCImpl_load_SReg, iSRegArg, u16Value); \
5938 IEM_MC_END()
5939
5940 if (iSegReg == X86_SREG_SS)
5941 {
5942 if (IEM_IS_32BIT_CODE(pVCpu))
5943 {
5944 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
5945 }
5946 else
5947 {
5948 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
5949 }
5950 }
5951 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5952 {
5953 IEMOP_MOV_SW_EV_REG_BODY(0);
5954 }
5955 else
5956 {
5957 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_MODE);
5958 }
5959#undef IEMOP_MOV_SW_EV_REG_BODY
5960 }
5961 else
5962 {
5963 /*
5964 * We're loading the register from memory. The access is word sized
5965 * regardless of operand size prefixes.
5966 */
5967#define IEMOP_MOV_SW_EV_MEM_BODY(a_fCImplFlags) \
5968 IEM_MC_BEGIN(2, 1, 0, a_fCImplFlags); \
5969 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
5970 IEM_MC_ARG(uint16_t, u16Value, 1); \
5971 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5972 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5974 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5975 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, \
5976 RT_BIT_64(kIemNativeGstReg_SegSelFirst + iSegReg) \
5977 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + iSegReg) \
5978 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + iSegReg) \
5979 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + iSegReg), \
5980 iemCImpl_load_SReg, iSRegArg, u16Value); \
5981 IEM_MC_END()
5982
5983 if (iSegReg == X86_SREG_SS)
5984 {
5985 if (IEM_IS_32BIT_CODE(pVCpu))
5986 {
5987 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
5988 }
5989 else
5990 {
5991 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
5992 }
5993 }
5994 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5995 {
5996 IEMOP_MOV_SW_EV_MEM_BODY(0);
5997 }
5998 else
5999 {
6000 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_MODE);
6001 }
6002#undef IEMOP_MOV_SW_EV_MEM_BODY
6003 }
6004}
6005
6006
6007/** Opcode 0x8f /0. */
6008FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
6009{
6010 /* This bugger is rather annoying as it requires rSP to be updated before
6011 doing the effective address calculations. Will eventually require a
6012 split between the R/M+SIB decoding and the effective address
6013 calculation - which is something that is required for any attempt at
6014 reusing this code for a recompiler. It may also be good to have if we
6015 need to delay #UD exception caused by invalid lock prefixes.
6016
6017 For now, we'll do a mostly safe interpreter-only implementation here. */
6018 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
6019 * now until tests show it's checked.. */
6020 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
6021
6022 /* Register access is relatively easy and can share code. */
6023 if (IEM_IS_MODRM_REG_MODE(bRm))
6024 return FNIEMOP_CALL_1(iemOpCommonPopGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
6025
6026 /*
6027 * Memory target.
6028 *
6029 * Intel says that RSP is incremented before it's used in any effective
6030 * address calcuations. This means some serious extra annoyance here since
6031 * we decode and calculate the effective address in one step and like to
6032 * delay committing registers till everything is done.
6033 *
6034 * So, we'll decode and calculate the effective address twice. This will
6035 * require some recoding if turned into a recompiler.
6036 */
6037 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
6038
6039#if 1 /* This can be compiled, optimize later if needed. */
6040 switch (pVCpu->iem.s.enmEffOpSize)
6041 {
6042 case IEMMODE_16BIT:
6043 IEM_MC_BEGIN(2, 0, 0, 0);
6044 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6045 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2 << 8);
6046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6047 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6048 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem16, iEffSeg, GCPtrEffDst);
6049 IEM_MC_END();
6050 break;
6051
6052 case IEMMODE_32BIT:
6053 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0);
6054 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6055 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4 << 8);
6056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6057 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6058 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem32, iEffSeg, GCPtrEffDst);
6059 IEM_MC_END();
6060 break;
6061
6062 case IEMMODE_64BIT:
6063 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
6064 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6065 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 8 << 8);
6066 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6067 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6068 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem64, iEffSeg, GCPtrEffDst);
6069 IEM_MC_END();
6070 break;
6071
6072 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6073 }
6074
6075#else
6076# ifndef TST_IEM_CHECK_MC
6077 /* Calc effective address with modified ESP. */
6078/** @todo testcase */
6079 RTGCPTR GCPtrEff;
6080 VBOXSTRICTRC rcStrict;
6081 switch (pVCpu->iem.s.enmEffOpSize)
6082 {
6083 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 2 << 8, &GCPtrEff); break;
6084 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 4 << 8, &GCPtrEff); break;
6085 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 8 << 8, &GCPtrEff); break;
6086 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6087 }
6088 if (rcStrict != VINF_SUCCESS)
6089 return rcStrict;
6090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6091
6092 /* Perform the operation - this should be CImpl. */
6093 RTUINT64U TmpRsp;
6094 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
6095 switch (pVCpu->iem.s.enmEffOpSize)
6096 {
6097 case IEMMODE_16BIT:
6098 {
6099 uint16_t u16Value;
6100 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
6101 if (rcStrict == VINF_SUCCESS)
6102 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
6103 break;
6104 }
6105
6106 case IEMMODE_32BIT:
6107 {
6108 uint32_t u32Value;
6109 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
6110 if (rcStrict == VINF_SUCCESS)
6111 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
6112 break;
6113 }
6114
6115 case IEMMODE_64BIT:
6116 {
6117 uint64_t u64Value;
6118 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
6119 if (rcStrict == VINF_SUCCESS)
6120 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
6121 break;
6122 }
6123
6124 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6125 }
6126 if (rcStrict == VINF_SUCCESS)
6127 {
6128 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
6129 return iemRegUpdateRipAndFinishClearingRF(pVCpu);
6130 }
6131 return rcStrict;
6132
6133# else
6134 return VERR_IEM_IPE_2;
6135# endif
6136#endif
6137}
6138
6139
6140/**
6141 * @opcode 0x8f
6142 */
6143FNIEMOP_DEF(iemOp_Grp1A__xop)
6144{
6145 /*
6146 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
6147 * three byte VEX prefix, except that the mmmmm field cannot have the values
6148 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
6149 */
6150 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6151 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
6152 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
6153
6154 IEMOP_MNEMONIC(xop, "xop");
6155 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
6156 {
6157 /** @todo Test when exctly the XOP conformance checks kick in during
6158 * instruction decoding and fetching (using \#PF). */
6159 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
6160 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6161 if ( ( pVCpu->iem.s.fPrefixes
6162 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
6163 == 0)
6164 {
6165 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
6166 if ((bXop2 & 0x80 /* XOP.W */) && IEM_IS_64BIT_CODE(pVCpu))
6167 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
6168 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6169 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
6170 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
6171 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
6172 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
6173 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
6174
6175 /** @todo XOP: Just use new tables and decoders. */
6176 switch (bRm & 0x1f)
6177 {
6178 case 8: /* xop opcode map 8. */
6179 IEMOP_BITCH_ABOUT_STUB();
6180 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6181
6182 case 9: /* xop opcode map 9. */
6183 IEMOP_BITCH_ABOUT_STUB();
6184 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6185
6186 case 10: /* xop opcode map 10. */
6187 IEMOP_BITCH_ABOUT_STUB();
6188 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6189
6190 default:
6191 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6192 IEMOP_RAISE_INVALID_OPCODE_RET();
6193 }
6194 }
6195 else
6196 Log(("XOP: Invalid prefix mix!\n"));
6197 }
6198 else
6199 Log(("XOP: XOP support disabled!\n"));
6200 IEMOP_RAISE_INVALID_OPCODE_RET();
6201}
6202
6203
6204/**
6205 * Common 'xchg reg,rAX' helper.
6206 */
6207FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
6208{
6209 iReg |= pVCpu->iem.s.uRexB;
6210 switch (pVCpu->iem.s.enmEffOpSize)
6211 {
6212 case IEMMODE_16BIT:
6213 IEM_MC_BEGIN(0, 2, 0, 0);
6214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6215 IEM_MC_LOCAL(uint16_t, u16Tmp1);
6216 IEM_MC_LOCAL(uint16_t, u16Tmp2);
6217 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
6218 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
6219 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
6220 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
6221 IEM_MC_ADVANCE_RIP_AND_FINISH();
6222 IEM_MC_END();
6223 break;
6224
6225 case IEMMODE_32BIT:
6226 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6227 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6228 IEM_MC_LOCAL(uint32_t, u32Tmp1);
6229 IEM_MC_LOCAL(uint32_t, u32Tmp2);
6230 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
6231 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
6232 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
6233 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
6234 IEM_MC_ADVANCE_RIP_AND_FINISH();
6235 IEM_MC_END();
6236 break;
6237
6238 case IEMMODE_64BIT:
6239 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6241 IEM_MC_LOCAL(uint64_t, u64Tmp1);
6242 IEM_MC_LOCAL(uint64_t, u64Tmp2);
6243 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
6244 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
6245 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
6246 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
6247 IEM_MC_ADVANCE_RIP_AND_FINISH();
6248 IEM_MC_END();
6249 break;
6250
6251 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6252 }
6253}
6254
6255
6256/**
6257 * @opcode 0x90
6258 */
6259FNIEMOP_DEF(iemOp_nop)
6260{
6261 /* R8/R8D and RAX/EAX can be exchanged. */
6262 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
6263 {
6264 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
6265 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
6266 }
6267
6268 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
6269 {
6270 IEMOP_MNEMONIC(pause, "pause");
6271 /* ASSUMING that we keep the IEM_F_X86_CTX_IN_GUEST, IEM_F_X86_CTX_VMX
6272 and IEM_F_X86_CTX_SVM in the TB key, we can safely do the following: */
6273 if (!IEM_IS_IN_GUEST(pVCpu))
6274 { /* probable */ }
6275#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6276 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_VMX)
6277 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmx_pause);
6278#endif
6279#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
6280 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_SVM)
6281 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_svm_pause);
6282#endif
6283 }
6284 else
6285 IEMOP_MNEMONIC(nop, "nop");
6286 /** @todo testcase: lock nop; lock pause */
6287 IEM_MC_BEGIN(0, 0, 0, 0);
6288 IEMOP_HLP_DONE_DECODING();
6289 IEM_MC_ADVANCE_RIP_AND_FINISH();
6290 IEM_MC_END();
6291}
6292
6293
6294/**
6295 * @opcode 0x91
6296 */
6297FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
6298{
6299 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
6300 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
6301}
6302
6303
6304/**
6305 * @opcode 0x92
6306 */
6307FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
6308{
6309 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
6310 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
6311}
6312
6313
6314/**
6315 * @opcode 0x93
6316 */
6317FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
6318{
6319 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
6320 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
6321}
6322
6323
6324/**
6325 * @opcode 0x94
6326 */
6327FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
6328{
6329 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
6330 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
6331}
6332
6333
6334/**
6335 * @opcode 0x95
6336 */
6337FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
6338{
6339 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
6340 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
6341}
6342
6343
6344/**
6345 * @opcode 0x96
6346 */
6347FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
6348{
6349 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
6350 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
6351}
6352
6353
6354/**
6355 * @opcode 0x97
6356 */
6357FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
6358{
6359 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
6360 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
6361}
6362
6363
6364/**
6365 * @opcode 0x98
6366 */
6367FNIEMOP_DEF(iemOp_cbw)
6368{
6369 switch (pVCpu->iem.s.enmEffOpSize)
6370 {
6371 case IEMMODE_16BIT:
6372 IEMOP_MNEMONIC(cbw, "cbw");
6373 IEM_MC_BEGIN(0, 1, 0, 0);
6374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6375 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
6376 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
6377 } IEM_MC_ELSE() {
6378 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
6379 } IEM_MC_ENDIF();
6380 IEM_MC_ADVANCE_RIP_AND_FINISH();
6381 IEM_MC_END();
6382 break;
6383
6384 case IEMMODE_32BIT:
6385 IEMOP_MNEMONIC(cwde, "cwde");
6386 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6388 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6389 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
6390 } IEM_MC_ELSE() {
6391 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
6392 } IEM_MC_ENDIF();
6393 IEM_MC_ADVANCE_RIP_AND_FINISH();
6394 IEM_MC_END();
6395 break;
6396
6397 case IEMMODE_64BIT:
6398 IEMOP_MNEMONIC(cdqe, "cdqe");
6399 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6400 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6401 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6402 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
6403 } IEM_MC_ELSE() {
6404 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
6405 } IEM_MC_ENDIF();
6406 IEM_MC_ADVANCE_RIP_AND_FINISH();
6407 IEM_MC_END();
6408 break;
6409
6410 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6411 }
6412}
6413
6414
6415/**
6416 * @opcode 0x99
6417 */
6418FNIEMOP_DEF(iemOp_cwd)
6419{
6420 switch (pVCpu->iem.s.enmEffOpSize)
6421 {
6422 case IEMMODE_16BIT:
6423 IEMOP_MNEMONIC(cwd, "cwd");
6424 IEM_MC_BEGIN(0, 1, 0, 0);
6425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6426 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6427 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
6428 } IEM_MC_ELSE() {
6429 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
6430 } IEM_MC_ENDIF();
6431 IEM_MC_ADVANCE_RIP_AND_FINISH();
6432 IEM_MC_END();
6433 break;
6434
6435 case IEMMODE_32BIT:
6436 IEMOP_MNEMONIC(cdq, "cdq");
6437 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6439 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6440 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
6441 } IEM_MC_ELSE() {
6442 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
6443 } IEM_MC_ENDIF();
6444 IEM_MC_ADVANCE_RIP_AND_FINISH();
6445 IEM_MC_END();
6446 break;
6447
6448 case IEMMODE_64BIT:
6449 IEMOP_MNEMONIC(cqo, "cqo");
6450 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6452 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
6453 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
6454 } IEM_MC_ELSE() {
6455 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
6456 } IEM_MC_ENDIF();
6457 IEM_MC_ADVANCE_RIP_AND_FINISH();
6458 IEM_MC_END();
6459 break;
6460
6461 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6462 }
6463}
6464
6465
6466/**
6467 * @opcode 0x9a
6468 */
6469FNIEMOP_DEF(iemOp_call_Ap)
6470{
6471 IEMOP_MNEMONIC(call_Ap, "call Ap");
6472 IEMOP_HLP_NO_64BIT();
6473
6474 /* Decode the far pointer address and pass it on to the far call C implementation. */
6475 uint32_t off32Seg;
6476 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
6477 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
6478 else
6479 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
6480 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
6481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6482 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
6483 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, UINT64_MAX,
6484 iemCImpl_callf, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
6485 /** @todo make task-switches, ring-switches, ++ return non-zero status */
6486}
6487
6488
6489/** Opcode 0x9b. (aka fwait) */
6490FNIEMOP_DEF(iemOp_wait)
6491{
6492 IEMOP_MNEMONIC(wait, "wait");
6493 IEM_MC_BEGIN(0, 0, 0, 0);
6494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6495 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
6496 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6497 IEM_MC_ADVANCE_RIP_AND_FINISH();
6498 IEM_MC_END();
6499}
6500
6501
6502/**
6503 * @opcode 0x9c
6504 */
6505FNIEMOP_DEF(iemOp_pushf_Fv)
6506{
6507 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
6508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6509 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6510 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP),
6511 iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
6512}
6513
6514
6515/**
6516 * @opcode 0x9d
6517 */
6518FNIEMOP_DEF(iemOp_popf_Fv)
6519{
6520 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
6521 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6522 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6523 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE_AND_AFTER,
6524 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP),
6525 iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
6526}
6527
6528
6529/**
6530 * @opcode 0x9e
6531 * @opflmodify cf,pf,af,zf,sf
6532 */
6533FNIEMOP_DEF(iemOp_sahf)
6534{
6535 IEMOP_MNEMONIC(sahf, "sahf");
6536 if ( IEM_IS_64BIT_CODE(pVCpu)
6537 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6538 IEMOP_RAISE_INVALID_OPCODE_RET();
6539 IEM_MC_BEGIN(0, 2, 0, 0);
6540 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6541 IEM_MC_LOCAL(uint32_t, u32Flags);
6542 IEM_MC_LOCAL(uint32_t, EFlags);
6543 IEM_MC_FETCH_EFLAGS(EFlags);
6544 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
6545 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6546 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
6547 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
6548 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
6549 IEM_MC_COMMIT_EFLAGS(EFlags);
6550 IEM_MC_ADVANCE_RIP_AND_FINISH();
6551 IEM_MC_END();
6552}
6553
6554
6555/**
6556 * @opcode 0x9f
6557 * @opfltest cf,pf,af,zf,sf
6558 */
6559FNIEMOP_DEF(iemOp_lahf)
6560{
6561 IEMOP_MNEMONIC(lahf, "lahf");
6562 if ( IEM_IS_64BIT_CODE(pVCpu)
6563 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6564 IEMOP_RAISE_INVALID_OPCODE_RET();
6565 IEM_MC_BEGIN(0, 1, 0, 0);
6566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6567 IEM_MC_LOCAL(uint8_t, u8Flags);
6568 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
6569 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
6570 IEM_MC_ADVANCE_RIP_AND_FINISH();
6571 IEM_MC_END();
6572}
6573
6574
6575/**
6576 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
6577 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode.
6578 * Will return/throw on failures.
6579 * @param a_GCPtrMemOff The variable to store the offset in.
6580 */
6581#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
6582 do \
6583 { \
6584 switch (pVCpu->iem.s.enmEffAddrMode) \
6585 { \
6586 case IEMMODE_16BIT: \
6587 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
6588 break; \
6589 case IEMMODE_32BIT: \
6590 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
6591 break; \
6592 case IEMMODE_64BIT: \
6593 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
6594 break; \
6595 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
6596 } \
6597 } while (0)
6598
6599/**
6600 * @opcode 0xa0
6601 */
6602FNIEMOP_DEF(iemOp_mov_AL_Ob)
6603{
6604 /*
6605 * Get the offset.
6606 */
6607 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
6608 RTGCPTR GCPtrMemOffDecode;
6609 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6610
6611 /*
6612 * Fetch AL.
6613 */
6614 IEM_MC_BEGIN(0, 2, 0, 0);
6615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6616 IEM_MC_LOCAL(uint8_t, u8Tmp);
6617 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6618 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6619 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6620 IEM_MC_ADVANCE_RIP_AND_FINISH();
6621 IEM_MC_END();
6622}
6623
6624
6625/**
6626 * @opcode 0xa1
6627 */
6628FNIEMOP_DEF(iemOp_mov_rAX_Ov)
6629{
6630 /*
6631 * Get the offset.
6632 */
6633 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
6634 RTGCPTR GCPtrMemOffDecode;
6635 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6636
6637 /*
6638 * Fetch rAX.
6639 */
6640 switch (pVCpu->iem.s.enmEffOpSize)
6641 {
6642 case IEMMODE_16BIT:
6643 IEM_MC_BEGIN(0, 2, 0, 0);
6644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6645 IEM_MC_LOCAL(uint16_t, u16Tmp);
6646 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6647 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6648 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
6649 IEM_MC_ADVANCE_RIP_AND_FINISH();
6650 IEM_MC_END();
6651 break;
6652
6653 case IEMMODE_32BIT:
6654 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6656 IEM_MC_LOCAL(uint32_t, u32Tmp);
6657 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6658 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6659 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
6660 IEM_MC_ADVANCE_RIP_AND_FINISH();
6661 IEM_MC_END();
6662 break;
6663
6664 case IEMMODE_64BIT:
6665 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6667 IEM_MC_LOCAL(uint64_t, u64Tmp);
6668 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6669 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6670 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
6671 IEM_MC_ADVANCE_RIP_AND_FINISH();
6672 IEM_MC_END();
6673 break;
6674
6675 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6676 }
6677}
6678
6679
6680/**
6681 * @opcode 0xa2
6682 */
6683FNIEMOP_DEF(iemOp_mov_Ob_AL)
6684{
6685 /*
6686 * Get the offset.
6687 */
6688 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
6689 RTGCPTR GCPtrMemOffDecode;
6690 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6691
6692 /*
6693 * Store AL.
6694 */
6695 IEM_MC_BEGIN(0, 2, 0, 0);
6696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6697 IEM_MC_LOCAL(uint8_t, u8Tmp);
6698 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
6699 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6700 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
6701 IEM_MC_ADVANCE_RIP_AND_FINISH();
6702 IEM_MC_END();
6703}
6704
6705
6706/**
6707 * @opcode 0xa3
6708 */
6709FNIEMOP_DEF(iemOp_mov_Ov_rAX)
6710{
6711 /*
6712 * Get the offset.
6713 */
6714 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
6715 RTGCPTR GCPtrMemOffDecode;
6716 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6717
6718 /*
6719 * Store rAX.
6720 */
6721 switch (pVCpu->iem.s.enmEffOpSize)
6722 {
6723 case IEMMODE_16BIT:
6724 IEM_MC_BEGIN(0, 2, 0, 0);
6725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6726 IEM_MC_LOCAL(uint16_t, u16Tmp);
6727 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
6728 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6729 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
6730 IEM_MC_ADVANCE_RIP_AND_FINISH();
6731 IEM_MC_END();
6732 break;
6733
6734 case IEMMODE_32BIT:
6735 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6737 IEM_MC_LOCAL(uint32_t, u32Tmp);
6738 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
6739 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6740 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
6741 IEM_MC_ADVANCE_RIP_AND_FINISH();
6742 IEM_MC_END();
6743 break;
6744
6745 case IEMMODE_64BIT:
6746 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6748 IEM_MC_LOCAL(uint64_t, u64Tmp);
6749 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
6750 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6751 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
6752 IEM_MC_ADVANCE_RIP_AND_FINISH();
6753 IEM_MC_END();
6754 break;
6755
6756 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6757 }
6758}
6759
6760/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
6761#define IEM_MOVS_CASE(ValBits, AddrBits, a_fMcFlags) \
6762 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
6763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6764 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6765 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6766 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6767 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
6768 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6769 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
6770 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6771 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6772 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6773 } IEM_MC_ELSE() { \
6774 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6775 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6776 } IEM_MC_ENDIF(); \
6777 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6778 IEM_MC_END() \
6779
6780/**
6781 * @opcode 0xa4
6782 * @opfltest df
6783 */
6784FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
6785{
6786 /*
6787 * Use the C implementation if a repeat prefix is encountered.
6788 */
6789 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6790 {
6791 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
6792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6793 switch (pVCpu->iem.s.enmEffAddrMode)
6794 {
6795 case IEMMODE_16BIT:
6796 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6797 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6798 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6799 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6800 iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
6801 case IEMMODE_32BIT:
6802 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6803 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6804 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6805 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6806 iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
6807 case IEMMODE_64BIT:
6808 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6809 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6810 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6811 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6812 iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
6813 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6814 }
6815 }
6816
6817 /*
6818 * Sharing case implementation with movs[wdq] below.
6819 */
6820 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
6821 switch (pVCpu->iem.s.enmEffAddrMode)
6822 {
6823 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
6824 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32, IEM_MC_F_MIN_386); break;
6825 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64, IEM_MC_F_64BIT); break;
6826 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6827 }
6828}
6829
6830
6831/**
6832 * @opcode 0xa5
6833 * @opfltest df
6834 */
6835FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
6836{
6837
6838 /*
6839 * Use the C implementation if a repeat prefix is encountered.
6840 */
6841 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6842 {
6843 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
6844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6845 switch (pVCpu->iem.s.enmEffOpSize)
6846 {
6847 case IEMMODE_16BIT:
6848 switch (pVCpu->iem.s.enmEffAddrMode)
6849 {
6850 case IEMMODE_16BIT:
6851 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6852 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6853 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6854 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6855 iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
6856 case IEMMODE_32BIT:
6857 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6858 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6859 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6860 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6861 iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
6862 case IEMMODE_64BIT:
6863 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6864 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6865 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6866 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6867 iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
6868 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6869 }
6870 break;
6871 case IEMMODE_32BIT:
6872 switch (pVCpu->iem.s.enmEffAddrMode)
6873 {
6874 case IEMMODE_16BIT:
6875 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6876 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6877 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6878 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6879 iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
6880 case IEMMODE_32BIT:
6881 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6882 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6883 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6884 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6885 iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
6886 case IEMMODE_64BIT:
6887 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6888 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6889 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6890 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6891 iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
6892 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6893 }
6894 case IEMMODE_64BIT:
6895 switch (pVCpu->iem.s.enmEffAddrMode)
6896 {
6897 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
6898 case IEMMODE_32BIT:
6899 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6900 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6901 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6902 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6903 iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
6904 case IEMMODE_64BIT:
6905 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6906 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6907 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6908 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6909 iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
6910 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6911 }
6912 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6913 }
6914 }
6915
6916 /*
6917 * Annoying double switch here.
6918 * Using ugly macro for implementing the cases, sharing it with movsb.
6919 */
6920 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
6921 switch (pVCpu->iem.s.enmEffOpSize)
6922 {
6923 case IEMMODE_16BIT:
6924 switch (pVCpu->iem.s.enmEffAddrMode)
6925 {
6926 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
6927 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32, IEM_MC_F_MIN_386); break;
6928 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64, IEM_MC_F_64BIT); break;
6929 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6930 }
6931 break;
6932
6933 case IEMMODE_32BIT:
6934 switch (pVCpu->iem.s.enmEffAddrMode)
6935 {
6936 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
6937 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32, IEM_MC_F_MIN_386); break;
6938 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64, IEM_MC_F_64BIT); break;
6939 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6940 }
6941 break;
6942
6943 case IEMMODE_64BIT:
6944 switch (pVCpu->iem.s.enmEffAddrMode)
6945 {
6946 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6947 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32, IEM_MC_F_64BIT); break;
6948 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64, IEM_MC_F_64BIT); break;
6949 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6950 }
6951 break;
6952 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6953 }
6954}
6955
6956#undef IEM_MOVS_CASE
6957
6958/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
6959#define IEM_CMPS_CASE(ValBits, AddrBits, a_fMcFlags) \
6960 IEM_MC_BEGIN(3, 3, a_fMcFlags, 0); \
6961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6962 \
6963 IEM_MC_LOCAL(RTGCPTR, uAddr1); \
6964 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr1, X86_GREG_xSI); \
6965 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
6966 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr1); \
6967 \
6968 IEM_MC_LOCAL(RTGCPTR, uAddr2); \
6969 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr2, X86_GREG_xDI); \
6970 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
6971 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr2); \
6972 \
6973 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
6974 IEM_MC_REF_EFLAGS(pEFlags); \
6975 IEM_MC_ARG_LOCAL_REF(uint##ValBits##_t *, puValue1, uValue1, 0); \
6976 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
6977 \
6978 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6979 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6980 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6981 } IEM_MC_ELSE() { \
6982 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6983 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6984 } IEM_MC_ENDIF(); \
6985 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6986 IEM_MC_END() \
6987
6988/**
6989 * @opcode 0xa6
6990 * @opflclass arithmetic
6991 * @opfltest df
6992 */
6993FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
6994{
6995
6996 /*
6997 * Use the C implementation if a repeat prefix is encountered.
6998 */
6999 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7000 {
7001 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
7002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7003 switch (pVCpu->iem.s.enmEffAddrMode)
7004 {
7005 case IEMMODE_16BIT:
7006 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7007 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7008 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7009 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7010 iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
7011 case IEMMODE_32BIT:
7012 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7013 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7014 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7015 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7016 iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
7017 case IEMMODE_64BIT:
7018 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7019 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7020 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7021 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7022 iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
7023 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7024 }
7025 }
7026 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7027 {
7028 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
7029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7030 switch (pVCpu->iem.s.enmEffAddrMode)
7031 {
7032 case IEMMODE_16BIT:
7033 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7034 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7035 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7036 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7037 iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
7038 case IEMMODE_32BIT:
7039 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7040 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7041 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7042 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7043 iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
7044 case IEMMODE_64BIT:
7045 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7046 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7047 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7048 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7049 iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
7050 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7051 }
7052 }
7053
7054 /*
7055 * Sharing case implementation with cmps[wdq] below.
7056 */
7057 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
7058 switch (pVCpu->iem.s.enmEffAddrMode)
7059 {
7060 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7061 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7062 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64, IEM_MC_F_64BIT); break;
7063 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7064 }
7065}
7066
7067
7068/**
7069 * @opcode 0xa7
7070 * @opflclass arithmetic
7071 * @opfltest df
7072 */
7073FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
7074{
7075 /*
7076 * Use the C implementation if a repeat prefix is encountered.
7077 */
7078 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7079 {
7080 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
7081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7082 switch (pVCpu->iem.s.enmEffOpSize)
7083 {
7084 case IEMMODE_16BIT:
7085 switch (pVCpu->iem.s.enmEffAddrMode)
7086 {
7087 case IEMMODE_16BIT:
7088 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7089 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7090 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7091 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7092 iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
7093 case IEMMODE_32BIT:
7094 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7095 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7096 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7097 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7098 iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
7099 case IEMMODE_64BIT:
7100 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7101 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7102 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7103 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7104 iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
7105 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7106 }
7107 break;
7108 case IEMMODE_32BIT:
7109 switch (pVCpu->iem.s.enmEffAddrMode)
7110 {
7111 case IEMMODE_16BIT:
7112 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7113 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7114 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7115 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7116 iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
7117 case IEMMODE_32BIT:
7118 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7119 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7120 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7121 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7122 iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
7123 case IEMMODE_64BIT:
7124 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7125 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7126 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7127 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7128 iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
7129 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7130 }
7131 case IEMMODE_64BIT:
7132 switch (pVCpu->iem.s.enmEffAddrMode)
7133 {
7134 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
7135 case IEMMODE_32BIT:
7136 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7137 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7138 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7139 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7140 iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
7141 case IEMMODE_64BIT:
7142 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7143 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7144 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7145 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7146 iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
7147 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7148 }
7149 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7150 }
7151 }
7152
7153 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7154 {
7155 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
7156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7157 switch (pVCpu->iem.s.enmEffOpSize)
7158 {
7159 case IEMMODE_16BIT:
7160 switch (pVCpu->iem.s.enmEffAddrMode)
7161 {
7162 case IEMMODE_16BIT:
7163 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7164 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7165 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7166 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7167 iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
7168 case IEMMODE_32BIT:
7169 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7170 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7171 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7172 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7173 iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
7174 case IEMMODE_64BIT:
7175 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7176 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7177 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7178 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7179 iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
7180 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7181 }
7182 break;
7183 case IEMMODE_32BIT:
7184 switch (pVCpu->iem.s.enmEffAddrMode)
7185 {
7186 case IEMMODE_16BIT:
7187 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7188 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7189 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7190 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7191 iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
7192 case IEMMODE_32BIT:
7193 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7194 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7195 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7196 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7197 iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
7198 case IEMMODE_64BIT:
7199 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7200 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7201 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7202 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7203 iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
7204 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7205 }
7206 case IEMMODE_64BIT:
7207 switch (pVCpu->iem.s.enmEffAddrMode)
7208 {
7209 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
7210 case IEMMODE_32BIT:
7211 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7212 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7213 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7214 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7215 iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
7216 case IEMMODE_64BIT:
7217 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7218 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7219 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7220 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7221 iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
7222 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7223 }
7224 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7225 }
7226 }
7227
7228 /*
7229 * Annoying double switch here.
7230 * Using ugly macro for implementing the cases, sharing it with cmpsb.
7231 */
7232 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
7233 switch (pVCpu->iem.s.enmEffOpSize)
7234 {
7235 case IEMMODE_16BIT:
7236 switch (pVCpu->iem.s.enmEffAddrMode)
7237 {
7238 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7239 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7240 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64, IEM_MC_F_64BIT); break;
7241 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7242 }
7243 break;
7244
7245 case IEMMODE_32BIT:
7246 switch (pVCpu->iem.s.enmEffAddrMode)
7247 {
7248 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7249 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7250 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64, IEM_MC_F_64BIT); break;
7251 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7252 }
7253 break;
7254
7255 case IEMMODE_64BIT:
7256 switch (pVCpu->iem.s.enmEffAddrMode)
7257 {
7258 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7259 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32, IEM_MC_F_MIN_386); break;
7260 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64, IEM_MC_F_64BIT); break;
7261 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7262 }
7263 break;
7264 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7265 }
7266}
7267
7268#undef IEM_CMPS_CASE
7269
7270/**
7271 * @opcode 0xa8
7272 * @opflclass logical
7273 */
7274FNIEMOP_DEF(iemOp_test_AL_Ib)
7275{
7276 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
7277 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7278 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_test_u8);
7279}
7280
7281
7282/**
7283 * @opcode 0xa9
7284 * @opflclass logical
7285 */
7286FNIEMOP_DEF(iemOp_test_eAX_Iz)
7287{
7288 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
7289 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7290 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, 0);
7291}
7292
7293
7294/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
7295#define IEM_STOS_CASE(ValBits, AddrBits, a_fMcFlags) \
7296 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
7297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7298 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7299 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7300 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
7301 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7302 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
7303 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7304 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7305 } IEM_MC_ELSE() { \
7306 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7307 } IEM_MC_ENDIF(); \
7308 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7309 IEM_MC_END() \
7310
7311/**
7312 * @opcode 0xaa
7313 */
7314FNIEMOP_DEF(iemOp_stosb_Yb_AL)
7315{
7316 /*
7317 * Use the C implementation if a repeat prefix is encountered.
7318 */
7319 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7320 {
7321 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
7322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7323 switch (pVCpu->iem.s.enmEffAddrMode)
7324 {
7325 case IEMMODE_16BIT:
7326 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7327 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7328 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7329 iemCImpl_stos_al_m16);
7330 case IEMMODE_32BIT:
7331 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7332 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7333 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7334 iemCImpl_stos_al_m32);
7335 case IEMMODE_64BIT:
7336 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7337 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7338 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7339 iemCImpl_stos_al_m64);
7340 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7341 }
7342 }
7343
7344 /*
7345 * Sharing case implementation with stos[wdq] below.
7346 */
7347 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
7348 switch (pVCpu->iem.s.enmEffAddrMode)
7349 {
7350 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7351 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7352 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64, IEM_MC_F_64BIT); break;
7353 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7354 }
7355}
7356
7357
7358/**
7359 * @opcode 0xab
7360 */
7361FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
7362{
7363 /*
7364 * Use the C implementation if a repeat prefix is encountered.
7365 */
7366 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7367 {
7368 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
7369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7370 switch (pVCpu->iem.s.enmEffOpSize)
7371 {
7372 case IEMMODE_16BIT:
7373 switch (pVCpu->iem.s.enmEffAddrMode)
7374 {
7375 case IEMMODE_16BIT:
7376 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7377 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7378 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7379 iemCImpl_stos_ax_m16);
7380 case IEMMODE_32BIT:
7381 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7382 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7383 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7384 iemCImpl_stos_ax_m32);
7385 case IEMMODE_64BIT:
7386 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7387 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7388 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7389 iemCImpl_stos_ax_m64);
7390 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7391 }
7392 break;
7393 case IEMMODE_32BIT:
7394 switch (pVCpu->iem.s.enmEffAddrMode)
7395 {
7396 case IEMMODE_16BIT:
7397 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7398 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7399 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7400 iemCImpl_stos_eax_m16);
7401 case IEMMODE_32BIT:
7402 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7403 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7404 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7405 iemCImpl_stos_eax_m32);
7406 case IEMMODE_64BIT:
7407 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7408 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7409 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7410 iemCImpl_stos_eax_m64);
7411 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7412 }
7413 case IEMMODE_64BIT:
7414 switch (pVCpu->iem.s.enmEffAddrMode)
7415 {
7416 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
7417 case IEMMODE_32BIT:
7418 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7419 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7420 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7421 iemCImpl_stos_rax_m32);
7422 case IEMMODE_64BIT:
7423 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7424 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7425 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7426 iemCImpl_stos_rax_m64);
7427 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7428 }
7429 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7430 }
7431 }
7432
7433 /*
7434 * Annoying double switch here.
7435 * Using ugly macro for implementing the cases, sharing it with stosb.
7436 */
7437 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
7438 switch (pVCpu->iem.s.enmEffOpSize)
7439 {
7440 case IEMMODE_16BIT:
7441 switch (pVCpu->iem.s.enmEffAddrMode)
7442 {
7443 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7444 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7445 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64, IEM_MC_F_64BIT); break;
7446 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7447 }
7448 break;
7449
7450 case IEMMODE_32BIT:
7451 switch (pVCpu->iem.s.enmEffAddrMode)
7452 {
7453 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7454 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7455 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64, IEM_MC_F_64BIT); break;
7456 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7457 }
7458 break;
7459
7460 case IEMMODE_64BIT:
7461 switch (pVCpu->iem.s.enmEffAddrMode)
7462 {
7463 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7464 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32, IEM_MC_F_64BIT); break;
7465 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64, IEM_MC_F_64BIT); break;
7466 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7467 }
7468 break;
7469 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7470 }
7471}
7472
7473#undef IEM_STOS_CASE
7474
7475/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
7476#define IEM_LODS_CASE(ValBits, AddrBits, a_fMcFlags) \
7477 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
7478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7479 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7480 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7481 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
7482 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
7483 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
7484 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7485 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7486 } IEM_MC_ELSE() { \
7487 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7488 } IEM_MC_ENDIF(); \
7489 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7490 IEM_MC_END() \
7491
7492/**
7493 * @opcode 0xac
7494 * @opfltest df
7495 */
7496FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
7497{
7498 /*
7499 * Use the C implementation if a repeat prefix is encountered.
7500 */
7501 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7502 {
7503 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
7504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7505 switch (pVCpu->iem.s.enmEffAddrMode)
7506 {
7507 case IEMMODE_16BIT:
7508 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7509 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7510 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7511 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7512 iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
7513 case IEMMODE_32BIT:
7514 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7515 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7516 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7517 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7518 iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
7519 case IEMMODE_64BIT:
7520 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7521 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7522 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7523 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7524 iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
7525 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7526 }
7527 }
7528
7529 /*
7530 * Sharing case implementation with stos[wdq] below.
7531 */
7532 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
7533 switch (pVCpu->iem.s.enmEffAddrMode)
7534 {
7535 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7536 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7537 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64, IEM_MC_F_64BIT); break;
7538 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7539 }
7540}
7541
7542
7543/**
7544 * @opcode 0xad
7545 * @opfltest df
7546 */
7547FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
7548{
7549 /*
7550 * Use the C implementation if a repeat prefix is encountered.
7551 */
7552 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7553 {
7554 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
7555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7556 switch (pVCpu->iem.s.enmEffOpSize)
7557 {
7558 case IEMMODE_16BIT:
7559 switch (pVCpu->iem.s.enmEffAddrMode)
7560 {
7561 case IEMMODE_16BIT:
7562 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7563 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7564 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7565 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7566 iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
7567 case IEMMODE_32BIT:
7568 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7569 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7570 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7571 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7572 iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
7573 case IEMMODE_64BIT:
7574 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7575 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7576 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7577 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7578 iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
7579 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7580 }
7581 break;
7582 case IEMMODE_32BIT:
7583 switch (pVCpu->iem.s.enmEffAddrMode)
7584 {
7585 case IEMMODE_16BIT:
7586 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7587 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7588 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7589 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7590 iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
7591 case IEMMODE_32BIT:
7592 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7593 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7594 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7595 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7596 iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
7597 case IEMMODE_64BIT:
7598 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7599 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7600 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7601 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7602 iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
7603 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7604 }
7605 case IEMMODE_64BIT:
7606 switch (pVCpu->iem.s.enmEffAddrMode)
7607 {
7608 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
7609 case IEMMODE_32BIT:
7610 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7611 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7612 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7613 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7614 iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
7615 case IEMMODE_64BIT:
7616 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7617 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7618 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7619 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7620 iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
7621 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7622 }
7623 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7624 }
7625 }
7626
7627 /*
7628 * Annoying double switch here.
7629 * Using ugly macro for implementing the cases, sharing it with lodsb.
7630 */
7631 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
7632 switch (pVCpu->iem.s.enmEffOpSize)
7633 {
7634 case IEMMODE_16BIT:
7635 switch (pVCpu->iem.s.enmEffAddrMode)
7636 {
7637 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7638 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7639 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64, IEM_MC_F_64BIT); break;
7640 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7641 }
7642 break;
7643
7644 case IEMMODE_32BIT:
7645 switch (pVCpu->iem.s.enmEffAddrMode)
7646 {
7647 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7648 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7649 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64, IEM_MC_F_64BIT); break;
7650 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7651 }
7652 break;
7653
7654 case IEMMODE_64BIT:
7655 switch (pVCpu->iem.s.enmEffAddrMode)
7656 {
7657 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7658 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32, IEM_MC_F_64BIT); break;
7659 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64, IEM_MC_F_64BIT); break;
7660 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7661 }
7662 break;
7663 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7664 }
7665}
7666
7667#undef IEM_LODS_CASE
7668
7669/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
7670#define IEM_SCAS_CASE(ValBits, AddrBits, a_fMcFlags) \
7671 IEM_MC_BEGIN(3, 2, a_fMcFlags, 0); \
7672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7673 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
7674 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
7675 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
7676 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7677 \
7678 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7679 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
7680 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
7681 IEM_MC_REF_EFLAGS(pEFlags); \
7682 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
7683 \
7684 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7685 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7686 } IEM_MC_ELSE() { \
7687 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7688 } IEM_MC_ENDIF(); \
7689 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7690 IEM_MC_END();
7691
7692/**
7693 * @opcode 0xae
7694 * @opflclass arithmetic
7695 * @opfltest df
7696 */
7697FNIEMOP_DEF(iemOp_scasb_AL_Xb)
7698{
7699 /*
7700 * Use the C implementation if a repeat prefix is encountered.
7701 */
7702 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7703 {
7704 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
7705 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7706 switch (pVCpu->iem.s.enmEffAddrMode)
7707 {
7708 case IEMMODE_16BIT:
7709 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7710 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7711 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7712 iemCImpl_repe_scas_al_m16);
7713 case IEMMODE_32BIT:
7714 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7715 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7716 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7717 iemCImpl_repe_scas_al_m32);
7718 case IEMMODE_64BIT:
7719 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7720 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7721 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7722 iemCImpl_repe_scas_al_m64);
7723 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7724 }
7725 }
7726 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7727 {
7728 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
7729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7730 switch (pVCpu->iem.s.enmEffAddrMode)
7731 {
7732 case IEMMODE_16BIT:
7733 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7734 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7735 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7736 iemCImpl_repne_scas_al_m16);
7737 case IEMMODE_32BIT:
7738 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7739 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7740 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7741 iemCImpl_repne_scas_al_m32);
7742 case IEMMODE_64BIT:
7743 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7744 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7745 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7746 iemCImpl_repne_scas_al_m64);
7747 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7748 }
7749 }
7750
7751 /*
7752 * Sharing case implementation with stos[wdq] below.
7753 */
7754 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
7755 switch (pVCpu->iem.s.enmEffAddrMode)
7756 {
7757 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7758 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7759 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64, IEM_MC_F_64BIT); break;
7760 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7761 }
7762}
7763
7764
7765/**
7766 * @opcode 0xaf
7767 * @opflclass arithmetic
7768 * @opfltest df
7769 */
7770FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
7771{
7772 /*
7773 * Use the C implementation if a repeat prefix is encountered.
7774 */
7775 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7776 {
7777 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
7778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7779 switch (pVCpu->iem.s.enmEffOpSize)
7780 {
7781 case IEMMODE_16BIT:
7782 switch (pVCpu->iem.s.enmEffAddrMode)
7783 {
7784 case IEMMODE_16BIT:
7785 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7786 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7787 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7788 iemCImpl_repe_scas_ax_m16);
7789 case IEMMODE_32BIT:
7790 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7791 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7792 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7793 iemCImpl_repe_scas_ax_m32);
7794 case IEMMODE_64BIT:
7795 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7796 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7797 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7798 iemCImpl_repe_scas_ax_m64);
7799 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7800 }
7801 break;
7802 case IEMMODE_32BIT:
7803 switch (pVCpu->iem.s.enmEffAddrMode)
7804 {
7805 case IEMMODE_16BIT:
7806 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7807 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7808 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7809 iemCImpl_repe_scas_eax_m16);
7810 case IEMMODE_32BIT:
7811 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7812 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7813 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7814 iemCImpl_repe_scas_eax_m32);
7815 case IEMMODE_64BIT:
7816 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7817 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7818 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7819 iemCImpl_repe_scas_eax_m64);
7820 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7821 }
7822 case IEMMODE_64BIT:
7823 switch (pVCpu->iem.s.enmEffAddrMode)
7824 {
7825 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
7826 case IEMMODE_32BIT:
7827 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7828 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7829 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7830 iemCImpl_repe_scas_rax_m32);
7831 case IEMMODE_64BIT:
7832 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7833 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7834 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7835 iemCImpl_repe_scas_rax_m64);
7836 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7837 }
7838 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7839 }
7840 }
7841 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7842 {
7843 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
7844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7845 switch (pVCpu->iem.s.enmEffOpSize)
7846 {
7847 case IEMMODE_16BIT:
7848 switch (pVCpu->iem.s.enmEffAddrMode)
7849 {
7850 case IEMMODE_16BIT:
7851 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7852 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7853 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7854 iemCImpl_repne_scas_ax_m16);
7855 case IEMMODE_32BIT:
7856 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7857 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7858 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7859 iemCImpl_repne_scas_ax_m32);
7860 case IEMMODE_64BIT:
7861 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7862 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7863 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7864 iemCImpl_repne_scas_ax_m64);
7865 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7866 }
7867 break;
7868 case IEMMODE_32BIT:
7869 switch (pVCpu->iem.s.enmEffAddrMode)
7870 {
7871 case IEMMODE_16BIT:
7872 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7873 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7874 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7875 iemCImpl_repne_scas_eax_m16);
7876 case IEMMODE_32BIT:
7877 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7878 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7879 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7880 iemCImpl_repne_scas_eax_m32);
7881 case IEMMODE_64BIT:
7882 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7883 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7884 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7885 iemCImpl_repne_scas_eax_m64);
7886 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7887 }
7888 case IEMMODE_64BIT:
7889 switch (pVCpu->iem.s.enmEffAddrMode)
7890 {
7891 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
7892 case IEMMODE_32BIT:
7893 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7894 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7895 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7896 iemCImpl_repne_scas_rax_m32);
7897 case IEMMODE_64BIT:
7898 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7899 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7900 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7901 iemCImpl_repne_scas_rax_m64);
7902 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7903 }
7904 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7905 }
7906 }
7907
7908 /*
7909 * Annoying double switch here.
7910 * Using ugly macro for implementing the cases, sharing it with scasb.
7911 */
7912 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
7913 switch (pVCpu->iem.s.enmEffOpSize)
7914 {
7915 case IEMMODE_16BIT:
7916 switch (pVCpu->iem.s.enmEffAddrMode)
7917 {
7918 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7919 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7920 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64, IEM_MC_F_64BIT); break;
7921 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7922 }
7923 break;
7924
7925 case IEMMODE_32BIT:
7926 switch (pVCpu->iem.s.enmEffAddrMode)
7927 {
7928 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7929 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7930 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64, IEM_MC_F_64BIT); break;
7931 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7932 }
7933 break;
7934
7935 case IEMMODE_64BIT:
7936 switch (pVCpu->iem.s.enmEffAddrMode)
7937 {
7938 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7939 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32, IEM_MC_F_64BIT); break;
7940 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64, IEM_MC_F_64BIT); break;
7941 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7942 }
7943 break;
7944 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7945 }
7946}
7947
7948#undef IEM_SCAS_CASE
7949
7950/**
7951 * Common 'mov r8, imm8' helper.
7952 */
7953FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iFixedReg)
7954{
7955 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7956 IEM_MC_BEGIN(0, 0, 0, 0);
7957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7958 IEM_MC_STORE_GREG_U8_CONST(iFixedReg, u8Imm);
7959 IEM_MC_ADVANCE_RIP_AND_FINISH();
7960 IEM_MC_END();
7961}
7962
7963
7964/**
7965 * @opcode 0xb0
7966 */
7967FNIEMOP_DEF(iemOp_mov_AL_Ib)
7968{
7969 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
7970 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7971}
7972
7973
7974/**
7975 * @opcode 0xb1
7976 */
7977FNIEMOP_DEF(iemOp_CL_Ib)
7978{
7979 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
7980 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7981}
7982
7983
7984/**
7985 * @opcode 0xb2
7986 */
7987FNIEMOP_DEF(iemOp_DL_Ib)
7988{
7989 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
7990 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7991}
7992
7993
7994/**
7995 * @opcode 0xb3
7996 */
7997FNIEMOP_DEF(iemOp_BL_Ib)
7998{
7999 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
8000 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8001}
8002
8003
8004/**
8005 * @opcode 0xb4
8006 */
8007FNIEMOP_DEF(iemOp_mov_AH_Ib)
8008{
8009 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
8010 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8011}
8012
8013
8014/**
8015 * @opcode 0xb5
8016 */
8017FNIEMOP_DEF(iemOp_CH_Ib)
8018{
8019 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
8020 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8021}
8022
8023
8024/**
8025 * @opcode 0xb6
8026 */
8027FNIEMOP_DEF(iemOp_DH_Ib)
8028{
8029 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
8030 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8031}
8032
8033
8034/**
8035 * @opcode 0xb7
8036 */
8037FNIEMOP_DEF(iemOp_BH_Ib)
8038{
8039 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
8040 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8041}
8042
8043
8044/**
8045 * Common 'mov regX,immX' helper.
8046 */
8047FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iFixedReg)
8048{
8049 switch (pVCpu->iem.s.enmEffOpSize)
8050 {
8051 case IEMMODE_16BIT:
8052 IEM_MC_BEGIN(0, 0, 0, 0);
8053 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8054 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8055 IEM_MC_STORE_GREG_U16_CONST(iFixedReg, u16Imm);
8056 IEM_MC_ADVANCE_RIP_AND_FINISH();
8057 IEM_MC_END();
8058 break;
8059
8060 case IEMMODE_32BIT:
8061 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8062 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8064 IEM_MC_STORE_GREG_U32_CONST(iFixedReg, u32Imm);
8065 IEM_MC_ADVANCE_RIP_AND_FINISH();
8066 IEM_MC_END();
8067 break;
8068
8069 case IEMMODE_64BIT:
8070 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
8071 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
8072 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8073 IEM_MC_STORE_GREG_U64_CONST(iFixedReg, u64Imm);
8074 IEM_MC_ADVANCE_RIP_AND_FINISH();
8075 IEM_MC_END();
8076 break;
8077 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8078 }
8079}
8080
8081
8082/**
8083 * @opcode 0xb8
8084 */
8085FNIEMOP_DEF(iemOp_eAX_Iv)
8086{
8087 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
8088 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8089}
8090
8091
8092/**
8093 * @opcode 0xb9
8094 */
8095FNIEMOP_DEF(iemOp_eCX_Iv)
8096{
8097 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
8098 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8099}
8100
8101
8102/**
8103 * @opcode 0xba
8104 */
8105FNIEMOP_DEF(iemOp_eDX_Iv)
8106{
8107 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
8108 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8109}
8110
8111
8112/**
8113 * @opcode 0xbb
8114 */
8115FNIEMOP_DEF(iemOp_eBX_Iv)
8116{
8117 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
8118 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8119}
8120
8121
8122/**
8123 * @opcode 0xbc
8124 */
8125FNIEMOP_DEF(iemOp_eSP_Iv)
8126{
8127 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
8128 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8129}
8130
8131
8132/**
8133 * @opcode 0xbd
8134 */
8135FNIEMOP_DEF(iemOp_eBP_Iv)
8136{
8137 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
8138 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8139}
8140
8141
8142/**
8143 * @opcode 0xbe
8144 */
8145FNIEMOP_DEF(iemOp_eSI_Iv)
8146{
8147 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
8148 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8149}
8150
8151
8152/**
8153 * @opcode 0xbf
8154 */
8155FNIEMOP_DEF(iemOp_eDI_Iv)
8156{
8157 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
8158 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8159}
8160
8161
8162/**
8163 * @opcode 0xc0
8164 */
8165FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
8166{
8167 IEMOP_HLP_MIN_186();
8168 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8169
8170 /* Need to use a body macro here since the EFLAGS behaviour differs between
8171 the shifts, rotates and rotate w/ carry. Sigh. */
8172#define GRP2_BODY_Eb_Ib(a_pImplExpr) \
8173 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
8174 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8175 { \
8176 /* register */ \
8177 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8178 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0); \
8179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8180 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
8181 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8182 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8183 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8184 IEM_MC_REF_EFLAGS(pEFlags); \
8185 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
8186 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8187 IEM_MC_END(); \
8188 } \
8189 else \
8190 { \
8191 /* memory */ \
8192 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_186, 0); \
8193 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8194 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8195 \
8196 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8198 \
8199 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8200 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
8201 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8202 \
8203 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8204 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8205 IEM_MC_FETCH_EFLAGS(EFlags); \
8206 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
8207 \
8208 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8209 IEM_MC_COMMIT_EFLAGS(EFlags); \
8210 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8211 IEM_MC_END(); \
8212 } (void)0
8213
8214 switch (IEM_GET_MODRM_REG_8(bRm))
8215 {
8216 /**
8217 * @opdone
8218 * @opmaps grp2_c0
8219 * @opcode /0
8220 * @opflclass rotate_count
8221 */
8222 case 0:
8223 {
8224 IEMOP_MNEMONIC2(MI, ROL, rol, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8225 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8226 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
8227 break;
8228 }
8229 /**
8230 * @opdone
8231 * @opmaps grp2_c0
8232 * @opcode /1
8233 * @opflclass rotate_count
8234 */
8235 case 1:
8236 {
8237 IEMOP_MNEMONIC2(MI, ROR, ror, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8238 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8239 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
8240 break;
8241 }
8242 /**
8243 * @opdone
8244 * @opmaps grp2_c0
8245 * @opcode /2
8246 * @opflclass rotate_carry_count
8247 */
8248 case 2:
8249 {
8250 IEMOP_MNEMONIC2(MI, RCL, rcl, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8251 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8252 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
8253 break;
8254 }
8255 /**
8256 * @opdone
8257 * @opmaps grp2_c0
8258 * @opcode /3
8259 * @opflclass rotate_carry_count
8260 */
8261 case 3:
8262 {
8263 IEMOP_MNEMONIC2(MI, RCR, rcr, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8264 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8265 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
8266 break;
8267 }
8268 /**
8269 * @opdone
8270 * @opmaps grp2_c0
8271 * @opcode /4
8272 * @opflclass shift_count
8273 */
8274 case 4:
8275 {
8276 IEMOP_MNEMONIC2(MI, SHL, shl, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8277 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8278 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
8279 break;
8280 }
8281 /**
8282 * @opdone
8283 * @opmaps grp2_c0
8284 * @opcode /5
8285 * @opflclass shift_count
8286 */
8287 case 5:
8288 {
8289 IEMOP_MNEMONIC2(MI, SHR, shr, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8290 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8291 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
8292 break;
8293 }
8294 /**
8295 * @opdone
8296 * @opmaps grp2_c0
8297 * @opcode /7
8298 * @opflclass shift_count
8299 */
8300 case 7:
8301 {
8302 IEMOP_MNEMONIC2(MI, SAR, sar, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8303 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8304 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
8305 break;
8306 }
8307
8308 /** @opdone */
8309 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8310 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8311 }
8312#undef GRP2_BODY_Eb_Ib
8313}
8314
8315
8316/* Need to use a body macro here since the EFLAGS behaviour differs between
8317 the shifts, rotates and rotate w/ carry. Sigh. */
8318#define GRP2_BODY_Ev_Ib(a_pImplExpr) \
8319 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
8320 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8321 { \
8322 /* register */ \
8323 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8324 switch (pVCpu->iem.s.enmEffOpSize) \
8325 { \
8326 case IEMMODE_16BIT: \
8327 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0); \
8328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8329 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
8330 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8331 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8332 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8333 IEM_MC_REF_EFLAGS(pEFlags); \
8334 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
8335 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8336 IEM_MC_END(); \
8337 break; \
8338 \
8339 case IEMMODE_32BIT: \
8340 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
8341 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8342 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
8343 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8344 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8345 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8346 IEM_MC_REF_EFLAGS(pEFlags); \
8347 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
8348 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
8349 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8350 IEM_MC_END(); \
8351 break; \
8352 \
8353 case IEMMODE_64BIT: \
8354 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
8355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8356 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
8357 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8358 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8359 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8360 IEM_MC_REF_EFLAGS(pEFlags); \
8361 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
8362 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8363 IEM_MC_END(); \
8364 break; \
8365 \
8366 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8367 } \
8368 } \
8369 else \
8370 { \
8371 /* memory */ \
8372 switch (pVCpu->iem.s.enmEffOpSize) \
8373 { \
8374 case IEMMODE_16BIT: \
8375 IEM_MC_BEGIN(3, 3, 0, 0); \
8376 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8377 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8378 \
8379 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8381 \
8382 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8383 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
8384 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8385 \
8386 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8387 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8388 IEM_MC_FETCH_EFLAGS(EFlags); \
8389 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
8390 \
8391 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8392 IEM_MC_COMMIT_EFLAGS(EFlags); \
8393 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8394 IEM_MC_END(); \
8395 break; \
8396 \
8397 case IEMMODE_32BIT: \
8398 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
8399 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8400 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8401 \
8402 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8403 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8404 \
8405 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8406 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
8407 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8408 \
8409 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8410 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8411 IEM_MC_FETCH_EFLAGS(EFlags); \
8412 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
8413 \
8414 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8415 IEM_MC_COMMIT_EFLAGS(EFlags); \
8416 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8417 IEM_MC_END(); \
8418 break; \
8419 \
8420 case IEMMODE_64BIT: \
8421 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
8422 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8423 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8424 \
8425 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8427 \
8428 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8429 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
8430 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8431 \
8432 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8433 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8434 IEM_MC_FETCH_EFLAGS(EFlags); \
8435 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
8436 \
8437 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8438 IEM_MC_COMMIT_EFLAGS(EFlags); \
8439 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8440 IEM_MC_END(); \
8441 break; \
8442 \
8443 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8444 } \
8445 } (void)0
8446
8447/**
8448 * @opmaps grp2_c1
8449 * @opcode /0
8450 * @opflclass rotate_count
8451 */
8452FNIEMOP_DEF_1(iemOp_grp2_rol_Ev_Ib, uint8_t, bRm)
8453{
8454 IEMOP_MNEMONIC2(MI, ROL, rol, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8455 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
8456}
8457
8458
8459/**
8460 * @opmaps grp2_c1
8461 * @opcode /1
8462 * @opflclass rotate_count
8463 */
8464FNIEMOP_DEF_1(iemOp_grp2_ror_Ev_Ib, uint8_t, bRm)
8465{
8466 IEMOP_MNEMONIC2(MI, ROR, ror, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8467 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
8468}
8469
8470
8471/**
8472 * @opmaps grp2_c1
8473 * @opcode /2
8474 * @opflclass rotate_carry_count
8475 */
8476FNIEMOP_DEF_1(iemOp_grp2_rcl_Ev_Ib, uint8_t, bRm)
8477{
8478 IEMOP_MNEMONIC2(MI, RCL, rcl, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8479 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
8480}
8481
8482
8483/**
8484 * @opmaps grp2_c1
8485 * @opcode /3
8486 * @opflclass rotate_carry_count
8487 */
8488FNIEMOP_DEF_1(iemOp_grp2_rcr_Ev_Ib, uint8_t, bRm)
8489{
8490 IEMOP_MNEMONIC2(MI, RCR, rcr, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8491 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
8492}
8493
8494
8495/**
8496 * @opmaps grp2_c1
8497 * @opcode /4
8498 * @opflclass shift_count
8499 */
8500FNIEMOP_DEF_1(iemOp_grp2_shl_Ev_Ib, uint8_t, bRm)
8501{
8502 IEMOP_MNEMONIC2(MI, SHL, shl, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8503 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
8504}
8505
8506
8507/**
8508 * @opmaps grp2_c1
8509 * @opcode /5
8510 * @opflclass shift_count
8511 */
8512FNIEMOP_DEF_1(iemOp_grp2_shr_Ev_Ib, uint8_t, bRm)
8513{
8514 IEMOP_MNEMONIC2(MI, SHR, shr, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8515 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
8516}
8517
8518
8519/**
8520 * @opmaps grp2_c1
8521 * @opcode /7
8522 * @opflclass shift_count
8523 */
8524FNIEMOP_DEF_1(iemOp_grp2_sar_Ev_Ib, uint8_t, bRm)
8525{
8526 IEMOP_MNEMONIC2(MI, SAR, sar, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8527 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
8528}
8529
8530#undef GRP2_BODY_Ev_Ib
8531
8532/**
8533 * @opcode 0xc1
8534 */
8535FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
8536{
8537 IEMOP_HLP_MIN_186();
8538 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8539
8540 switch (IEM_GET_MODRM_REG_8(bRm))
8541 {
8542 case 0: return FNIEMOP_CALL_1(iemOp_grp2_rol_Ev_Ib, bRm);
8543 case 1: return FNIEMOP_CALL_1(iemOp_grp2_ror_Ev_Ib, bRm);
8544 case 2: return FNIEMOP_CALL_1(iemOp_grp2_rcl_Ev_Ib, bRm);
8545 case 3: return FNIEMOP_CALL_1(iemOp_grp2_rcr_Ev_Ib, bRm);
8546 case 4: return FNIEMOP_CALL_1(iemOp_grp2_shl_Ev_Ib, bRm);
8547 case 5: return FNIEMOP_CALL_1(iemOp_grp2_shr_Ev_Ib, bRm);
8548 case 7: return FNIEMOP_CALL_1(iemOp_grp2_sar_Ev_Ib, bRm);
8549 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8550 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8551 }
8552}
8553
8554
8555/**
8556 * @opcode 0xc2
8557 */
8558FNIEMOP_DEF(iemOp_retn_Iw)
8559{
8560 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
8561 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8562 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8564 switch (pVCpu->iem.s.enmEffOpSize)
8565 {
8566 case IEMMODE_16BIT:
8567 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8568 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_iw_16, u16Imm);
8569 case IEMMODE_32BIT:
8570 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8571 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_iw_32, u16Imm);
8572 case IEMMODE_64BIT:
8573 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8574 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_iw_64, u16Imm);
8575 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8576 }
8577}
8578
8579
8580/**
8581 * @opcode 0xc3
8582 */
8583FNIEMOP_DEF(iemOp_retn)
8584{
8585 IEMOP_MNEMONIC(retn, "retn");
8586 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8588 switch (pVCpu->iem.s.enmEffOpSize)
8589 {
8590 case IEMMODE_16BIT:
8591 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8592 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_16);
8593 case IEMMODE_32BIT:
8594 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8595 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_32);
8596 case IEMMODE_64BIT:
8597 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8598 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_64);
8599 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8600 }
8601}
8602
8603
8604/**
8605 * @opcode 0xc4
8606 */
8607FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
8608{
8609 /* The LDS instruction is invalid 64-bit mode. In legacy and
8610 compatability mode it is invalid with MOD=3.
8611 The use as a VEX prefix is made possible by assigning the inverted
8612 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
8613 outside of 64-bit mode. VEX is not available in real or v86 mode. */
8614 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8615 if ( IEM_IS_64BIT_CODE(pVCpu)
8616 || IEM_IS_MODRM_REG_MODE(bRm) )
8617 {
8618 IEMOP_MNEMONIC(vex3_prefix, "vex3");
8619 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
8620 {
8621 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
8622 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
8623 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
8624 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
8625 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
8626 if (IEM_IS_64BIT_CODE(pVCpu))
8627 {
8628#if 1
8629 AssertCompile(IEM_OP_PRF_SIZE_REX_W == RT_BIT_32(9));
8630 pVCpu->iem.s.fPrefixes |= (uint32_t)(bVex2 & 0x80) << (9 - 7);
8631 AssertCompile(IEM_OP_PRF_REX_B == RT_BIT_32(25) && IEM_OP_PRF_REX_X == RT_BIT_32(26) && IEM_OP_PRF_REX_R == RT_BIT_32(27));
8632 pVCpu->iem.s.fPrefixes |= (uint32_t)(~bRm & 0xe0) << (25 - 5);
8633#else
8634 if (bVex2 & 0x80 /* VEX.W */)
8635 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
8636 if (~bRm & 0x20 /* VEX.~B */)
8637 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_B;
8638 if (~bRm & 0x40 /* VEX.~X */)
8639 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_X;
8640 if (~bRm & 0x80 /* VEX.~R */)
8641 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_R;
8642#endif
8643 }
8644 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
8645 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
8646 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
8647 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
8648 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
8649 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
8650
8651 switch (bRm & 0x1f)
8652 {
8653 case 1: /* 0x0f lead opcode byte. */
8654#ifdef IEM_WITH_VEX
8655 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8656#else
8657 IEMOP_BITCH_ABOUT_STUB();
8658 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8659#endif
8660
8661 case 2: /* 0x0f 0x38 lead opcode bytes. */
8662#ifdef IEM_WITH_VEX
8663 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8664#else
8665 IEMOP_BITCH_ABOUT_STUB();
8666 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8667#endif
8668
8669 case 3: /* 0x0f 0x3a lead opcode bytes. */
8670#ifdef IEM_WITH_VEX
8671 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8672#else
8673 IEMOP_BITCH_ABOUT_STUB();
8674 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8675#endif
8676
8677 default:
8678 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
8679 IEMOP_RAISE_INVALID_OPCODE_RET();
8680 }
8681 }
8682 Log(("VEX3: VEX support disabled!\n"));
8683 IEMOP_RAISE_INVALID_OPCODE_RET();
8684 }
8685
8686 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
8687 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
8688}
8689
8690
8691/**
8692 * @opcode 0xc5
8693 */
8694FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
8695{
8696 /* The LES instruction is invalid 64-bit mode. In legacy and
8697 compatability mode it is invalid with MOD=3.
8698 The use as a VEX prefix is made possible by assigning the inverted
8699 REX.R to the top MOD bit, and the top bit in the inverted register
8700 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
8701 to accessing registers 0..7 in this VEX form. */
8702 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8703 if ( IEM_IS_64BIT_CODE(pVCpu)
8704 || IEM_IS_MODRM_REG_MODE(bRm))
8705 {
8706 IEMOP_MNEMONIC(vex2_prefix, "vex2");
8707 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
8708 {
8709 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
8710 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
8711 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
8712 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
8713 AssertCompile(IEM_OP_PRF_REX_R == RT_BIT_32(27));
8714 pVCpu->iem.s.fPrefixes |= (uint32_t)(~bRm & 0x80) << (27 - 7);
8715 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
8716 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
8717 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
8718 pVCpu->iem.s.idxPrefix = bRm & 0x3;
8719
8720#ifdef IEM_WITH_VEX
8721 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8722#else
8723 IEMOP_BITCH_ABOUT_STUB();
8724 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8725#endif
8726 }
8727
8728 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
8729 Log(("VEX2: VEX support disabled!\n"));
8730 IEMOP_RAISE_INVALID_OPCODE_RET();
8731 }
8732
8733 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
8734 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
8735}
8736
8737
8738/**
8739 * @opcode 0xc6
8740 */
8741FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
8742{
8743 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8744 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
8745 IEMOP_RAISE_INVALID_OPCODE_RET();
8746 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
8747
8748 if (IEM_IS_MODRM_REG_MODE(bRm))
8749 {
8750 /* register access */
8751 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8752 IEM_MC_BEGIN(0, 0, 0, 0);
8753 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8754 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u8Imm);
8755 IEM_MC_ADVANCE_RIP_AND_FINISH();
8756 IEM_MC_END();
8757 }
8758 else
8759 {
8760 /* memory access. */
8761 IEM_MC_BEGIN(0, 1, 0, 0);
8762 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8763 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8764 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8766 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
8767 IEM_MC_ADVANCE_RIP_AND_FINISH();
8768 IEM_MC_END();
8769 }
8770}
8771
8772
8773/**
8774 * @opcode 0xc7
8775 */
8776FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
8777{
8778 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8779 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Iz in this group. */
8780 IEMOP_RAISE_INVALID_OPCODE_RET();
8781 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
8782
8783 if (IEM_IS_MODRM_REG_MODE(bRm))
8784 {
8785 /* register access */
8786 switch (pVCpu->iem.s.enmEffOpSize)
8787 {
8788 case IEMMODE_16BIT:
8789 IEM_MC_BEGIN(0, 0, 0, 0);
8790 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8791 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8792 IEM_MC_STORE_GREG_U16_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u16Imm);
8793 IEM_MC_ADVANCE_RIP_AND_FINISH();
8794 IEM_MC_END();
8795 break;
8796
8797 case IEMMODE_32BIT:
8798 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8799 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8801 IEM_MC_STORE_GREG_U32_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u32Imm);
8802 IEM_MC_ADVANCE_RIP_AND_FINISH();
8803 IEM_MC_END();
8804 break;
8805
8806 case IEMMODE_64BIT:
8807 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
8808 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8810 IEM_MC_STORE_GREG_U64_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u64Imm);
8811 IEM_MC_ADVANCE_RIP_AND_FINISH();
8812 IEM_MC_END();
8813 break;
8814
8815 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8816 }
8817 }
8818 else
8819 {
8820 /* memory access. */
8821 switch (pVCpu->iem.s.enmEffOpSize)
8822 {
8823 case IEMMODE_16BIT:
8824 IEM_MC_BEGIN(0, 1, 0, 0);
8825 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8826 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
8827 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8829 IEM_MC_STORE_MEM_U16_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
8830 IEM_MC_ADVANCE_RIP_AND_FINISH();
8831 IEM_MC_END();
8832 break;
8833
8834 case IEMMODE_32BIT:
8835 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8836 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8837 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8838 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8840 IEM_MC_STORE_MEM_U32_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
8841 IEM_MC_ADVANCE_RIP_AND_FINISH();
8842 IEM_MC_END();
8843 break;
8844
8845 case IEMMODE_64BIT:
8846 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
8847 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8849 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8851 IEM_MC_STORE_MEM_U64_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
8852 IEM_MC_ADVANCE_RIP_AND_FINISH();
8853 IEM_MC_END();
8854 break;
8855
8856 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8857 }
8858 }
8859}
8860
8861
8862
8863
8864/**
8865 * @opcode 0xc8
8866 */
8867FNIEMOP_DEF(iemOp_enter_Iw_Ib)
8868{
8869 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
8870 IEMOP_HLP_MIN_186();
8871 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8872 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
8873 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
8874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8875 IEM_MC_DEFER_TO_CIMPL_3_RET(0,
8876 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8877 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP),
8878 iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
8879}
8880
8881
8882/**
8883 * @opcode 0xc9
8884 */
8885FNIEMOP_DEF(iemOp_leave)
8886{
8887 IEMOP_MNEMONIC(leave, "leave");
8888 IEMOP_HLP_MIN_186();
8889 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8891 IEM_MC_DEFER_TO_CIMPL_1_RET(0,
8892 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8893 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP),
8894 iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
8895}
8896
8897
8898/**
8899 * @opcode 0xca
8900 */
8901FNIEMOP_DEF(iemOp_retf_Iw)
8902{
8903 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
8904 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8906 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
8907 | IEM_CIMPL_F_MODE,
8908 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8909 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
8910 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
8911 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8912 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
8913 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
8914 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
8915 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8916 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
8917 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
8918 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
8919 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8920 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
8921 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
8922 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
8923 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
8924 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
8925 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
8926}
8927
8928
8929/**
8930 * @opcode 0xcb
8931 */
8932FNIEMOP_DEF(iemOp_retf)
8933{
8934 IEMOP_MNEMONIC(retf, "retf");
8935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8936 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
8937 | IEM_CIMPL_F_MODE,
8938 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8939 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
8940 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
8941 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8942 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
8943 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
8944 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
8945 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8946 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
8947 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
8948 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
8949 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8950 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
8951 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
8952 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
8953 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
8954 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
8955 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
8956}
8957
8958
8959/**
8960 * @opcode 0xcc
8961 */
8962FNIEMOP_DEF(iemOp_int3)
8963{
8964 IEMOP_MNEMONIC(int3, "int3");
8965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8966 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8967 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
8968 iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
8969}
8970
8971
8972/**
8973 * @opcode 0xcd
8974 */
8975FNIEMOP_DEF(iemOp_int_Ib)
8976{
8977 IEMOP_MNEMONIC(int_Ib, "int Ib");
8978 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
8979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8980 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8981 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS, UINT64_MAX,
8982 iemCImpl_int, u8Int, IEMINT_INTN);
8983 /** @todo make task-switches, ring-switches, ++ return non-zero status */
8984}
8985
8986
8987/**
8988 * @opcode 0xce
8989 */
8990FNIEMOP_DEF(iemOp_into)
8991{
8992 IEMOP_MNEMONIC(into, "into");
8993 IEMOP_HLP_NO_64BIT();
8994 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8995 | IEM_CIMPL_F_BRANCH_CONDITIONAL | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
8996 UINT64_MAX,
8997 iemCImpl_int, X86_XCPT_OF, IEMINT_INTO);
8998 /** @todo make task-switches, ring-switches, ++ return non-zero status */
8999}
9000
9001
9002/**
9003 * @opcode 0xcf
9004 */
9005FNIEMOP_DEF(iemOp_iret)
9006{
9007 IEMOP_MNEMONIC(iret, "iret");
9008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9009 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9010 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE | IEM_CIMPL_F_VMEXIT,
9011 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9012 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
9013 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
9014 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
9015 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
9016 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
9017 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
9018 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
9019 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
9020 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
9021 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
9022 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
9023 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
9024 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
9025 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
9026 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
9027 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
9028 iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
9029 /* Segment registers are sanitized when returning to an outer ring, or fully
9030 reloaded when returning to v86 mode. Thus the large flush list above. */
9031}
9032
9033
9034/**
9035 * @opcode 0xd0
9036 */
9037FNIEMOP_DEF(iemOp_Grp2_Eb_1)
9038{
9039 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9040
9041 /* Need to use a body macro here since the EFLAGS behaviour differs between
9042 the shifts, rotates and rotate w/ carry. Sigh. */
9043#define GRP2_BODY_Eb_1(a_pImplExpr) \
9044 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
9045 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9046 { \
9047 /* register */ \
9048 IEM_MC_BEGIN(3, 0, 0, 0); \
9049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9050 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
9051 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1); \
9052 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9053 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9054 IEM_MC_REF_EFLAGS(pEFlags); \
9055 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
9056 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9057 IEM_MC_END(); \
9058 } \
9059 else \
9060 { \
9061 /* memory */ \
9062 IEM_MC_BEGIN(3, 3, 0, 0); \
9063 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
9064 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1); \
9065 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9066 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9067 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9068 \
9069 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9070 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9071 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9072 IEM_MC_FETCH_EFLAGS(EFlags); \
9073 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
9074 \
9075 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9076 IEM_MC_COMMIT_EFLAGS(EFlags); \
9077 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9078 IEM_MC_END(); \
9079 } (void)0
9080
9081 switch (IEM_GET_MODRM_REG_8(bRm))
9082 {
9083 /**
9084 * @opdone
9085 * @opmaps grp2_d0
9086 * @opcode /0
9087 * @opflclass rotate_1
9088 */
9089 case 0:
9090 {
9091 IEMOP_MNEMONIC2(M1, ROL, rol, Eb, 1, DISOPTYPE_HARMLESS, 0);
9092 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9093 break;
9094 }
9095 /**
9096 * @opdone
9097 * @opmaps grp2_d0
9098 * @opcode /1
9099 * @opflclass rotate_1
9100 */
9101 case 1:
9102 {
9103 IEMOP_MNEMONIC2(M1, ROR, ror, Eb, 1, DISOPTYPE_HARMLESS, 0);
9104 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9105 break;
9106 }
9107 /**
9108 * @opdone
9109 * @opmaps grp2_d0
9110 * @opcode /2
9111 * @opflclass rotate_carry_1
9112 */
9113 case 2:
9114 {
9115 IEMOP_MNEMONIC2(M1, RCL, rcl, Eb, 1, DISOPTYPE_HARMLESS, 0);
9116 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9117 break;
9118 }
9119 /**
9120 * @opdone
9121 * @opmaps grp2_d0
9122 * @opcode /3
9123 * @opflclass rotate_carry_1
9124 */
9125 case 3:
9126 {
9127 IEMOP_MNEMONIC2(M1, RCR, rcr, Eb, 1, DISOPTYPE_HARMLESS, 0);
9128 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9129 break;
9130 }
9131 /**
9132 * @opdone
9133 * @opmaps grp2_d0
9134 * @opcode /4
9135 * @opflclass shift_1
9136 */
9137 case 4:
9138 {
9139 IEMOP_MNEMONIC2(M1, SHL, shl, Eb, 1, DISOPTYPE_HARMLESS, 0);
9140 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9141 break;
9142 }
9143 /**
9144 * @opdone
9145 * @opmaps grp2_d0
9146 * @opcode /5
9147 * @opflclass shift_1
9148 */
9149 case 5:
9150 {
9151 IEMOP_MNEMONIC2(M1, SHR, shr, Eb, 1, DISOPTYPE_HARMLESS, 0);
9152 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9153 break;
9154 }
9155 /**
9156 * @opdone
9157 * @opmaps grp2_d0
9158 * @opcode /7
9159 * @opflclass shift_1
9160 */
9161 case 7:
9162 {
9163 IEMOP_MNEMONIC2(M1, SAR, sar, Eb, 1, DISOPTYPE_HARMLESS, 0);
9164 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9165 break;
9166 }
9167 /** @opdone */
9168 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9169 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
9170 }
9171#undef GRP2_BODY_Eb_1
9172}
9173
9174
9175/* Need to use a body macro here since the EFLAGS behaviour differs between
9176 the shifts, rotates and rotate w/ carry. Sigh. */
9177#define GRP2_BODY_Ev_1(a_pImplExpr) \
9178 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
9179 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9180 { \
9181 /* register */ \
9182 switch (pVCpu->iem.s.enmEffOpSize) \
9183 { \
9184 case IEMMODE_16BIT: \
9185 IEM_MC_BEGIN(3, 0, 0, 0); \
9186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9187 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9188 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9189 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9190 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9191 IEM_MC_REF_EFLAGS(pEFlags); \
9192 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
9193 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9194 IEM_MC_END(); \
9195 break; \
9196 \
9197 case IEMMODE_32BIT: \
9198 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
9199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9200 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9201 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9202 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9203 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9204 IEM_MC_REF_EFLAGS(pEFlags); \
9205 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
9206 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9207 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9208 IEM_MC_END(); \
9209 break; \
9210 \
9211 case IEMMODE_64BIT: \
9212 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
9213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9214 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9215 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9216 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9217 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9218 IEM_MC_REF_EFLAGS(pEFlags); \
9219 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
9220 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9221 IEM_MC_END(); \
9222 break; \
9223 \
9224 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9225 } \
9226 } \
9227 else \
9228 { \
9229 /* memory */ \
9230 switch (pVCpu->iem.s.enmEffOpSize) \
9231 { \
9232 case IEMMODE_16BIT: \
9233 IEM_MC_BEGIN(3, 3, 0, 0); \
9234 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9235 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9236 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9238 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9239 \
9240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9242 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9243 IEM_MC_FETCH_EFLAGS(EFlags); \
9244 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
9245 \
9246 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9247 IEM_MC_COMMIT_EFLAGS(EFlags); \
9248 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9249 IEM_MC_END(); \
9250 break; \
9251 \
9252 case IEMMODE_32BIT: \
9253 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
9254 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9255 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9256 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9257 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9258 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9259 \
9260 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9262 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9263 IEM_MC_FETCH_EFLAGS(EFlags); \
9264 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
9265 \
9266 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9267 IEM_MC_COMMIT_EFLAGS(EFlags); \
9268 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9269 IEM_MC_END(); \
9270 break; \
9271 \
9272 case IEMMODE_64BIT: \
9273 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
9274 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9275 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9276 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9277 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9278 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9279 \
9280 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9282 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9283 IEM_MC_FETCH_EFLAGS(EFlags); \
9284 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
9285 \
9286 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9287 IEM_MC_COMMIT_EFLAGS(EFlags); \
9288 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9289 IEM_MC_END(); \
9290 break; \
9291 \
9292 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9293 } \
9294 } (void)0
9295
9296/**
9297 * @opmaps grp2_d1
9298 * @opcode /0
9299 * @opflclass rotate_1
9300 */
9301FNIEMOP_DEF_1(iemOp_grp2_rol_Ev_1, uint8_t, bRm)
9302{
9303 IEMOP_MNEMONIC2(M1, ROL, rol, Ev, 1, DISOPTYPE_HARMLESS, 0);
9304 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9305}
9306
9307
9308/**
9309 * @opmaps grp2_d1
9310 * @opcode /1
9311 * @opflclass rotate_1
9312 */
9313FNIEMOP_DEF_1(iemOp_grp2_ror_Ev_1, uint8_t, bRm)
9314{
9315 IEMOP_MNEMONIC2(M1, ROR, ror, Ev, 1, DISOPTYPE_HARMLESS, 0);
9316 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9317}
9318
9319
9320/**
9321 * @opmaps grp2_d1
9322 * @opcode /2
9323 * @opflclass rotate_carry_1
9324 */
9325FNIEMOP_DEF_1(iemOp_grp2_rcl_Ev_1, uint8_t, bRm)
9326{
9327 IEMOP_MNEMONIC2(M1, RCL, rcl, Ev, 1, DISOPTYPE_HARMLESS, 0);
9328 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9329}
9330
9331
9332/**
9333 * @opmaps grp2_d1
9334 * @opcode /3
9335 * @opflclass rotate_carry_1
9336 */
9337FNIEMOP_DEF_1(iemOp_grp2_rcr_Ev_1, uint8_t, bRm)
9338{
9339 IEMOP_MNEMONIC2(M1, RCR, rcr, Ev, 1, DISOPTYPE_HARMLESS, 0);
9340 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9341}
9342
9343
9344/**
9345 * @opmaps grp2_d1
9346 * @opcode /4
9347 * @opflclass shift_1
9348 */
9349FNIEMOP_DEF_1(iemOp_grp2_shl_Ev_1, uint8_t, bRm)
9350{
9351 IEMOP_MNEMONIC2(M1, SHL, shl, Ev, 1, DISOPTYPE_HARMLESS, 0);
9352 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9353}
9354
9355
9356/**
9357 * @opmaps grp2_d1
9358 * @opcode /5
9359 * @opflclass shift_1
9360 */
9361FNIEMOP_DEF_1(iemOp_grp2_shr_Ev_1, uint8_t, bRm)
9362{
9363 IEMOP_MNEMONIC2(M1, SHR, shr, Ev, 1, DISOPTYPE_HARMLESS, 0);
9364 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9365}
9366
9367
9368/**
9369 * @opmaps grp2_d1
9370 * @opcode /7
9371 * @opflclass shift_1
9372 */
9373FNIEMOP_DEF_1(iemOp_grp2_sar_Ev_1, uint8_t, bRm)
9374{
9375 IEMOP_MNEMONIC2(M1, SAR, sar, Ev, 1, DISOPTYPE_HARMLESS, 0);
9376 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9377}
9378
9379#undef GRP2_BODY_Ev_1
9380
9381/**
9382 * @opcode 0xd1
9383 */
9384FNIEMOP_DEF(iemOp_Grp2_Ev_1)
9385{
9386 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9387 switch (IEM_GET_MODRM_REG_8(bRm))
9388 {
9389 case 0: return FNIEMOP_CALL_1(iemOp_grp2_rol_Ev_1, bRm);
9390 case 1: return FNIEMOP_CALL_1(iemOp_grp2_ror_Ev_1, bRm);
9391 case 2: return FNIEMOP_CALL_1(iemOp_grp2_rcl_Ev_1, bRm);
9392 case 3: return FNIEMOP_CALL_1(iemOp_grp2_rcr_Ev_1, bRm);
9393 case 4: return FNIEMOP_CALL_1(iemOp_grp2_shl_Ev_1, bRm);
9394 case 5: return FNIEMOP_CALL_1(iemOp_grp2_shr_Ev_1, bRm);
9395 case 7: return FNIEMOP_CALL_1(iemOp_grp2_sar_Ev_1, bRm);
9396 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9397 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
9398 }
9399}
9400
9401
9402/**
9403 * @opcode 0xd2
9404 */
9405FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
9406{
9407 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9408
9409 /* Need to use a body macro here since the EFLAGS behaviour differs between
9410 the shifts, rotates and rotate w/ carry. Sigh. */
9411#define GRP2_BODY_Eb_CL(a_pImplExpr) \
9412 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
9413 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9414 { \
9415 /* register */ \
9416 IEM_MC_BEGIN(3, 0, 0, 0); \
9417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9418 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
9419 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9420 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9421 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9422 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9423 IEM_MC_REF_EFLAGS(pEFlags); \
9424 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
9425 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9426 IEM_MC_END(); \
9427 } \
9428 else \
9429 { \
9430 /* memory */ \
9431 IEM_MC_BEGIN(3, 3, 0, 0); \
9432 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
9433 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9434 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9436 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9437 \
9438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9440 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9441 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9442 IEM_MC_FETCH_EFLAGS(EFlags); \
9443 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
9444 \
9445 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9446 IEM_MC_COMMIT_EFLAGS(EFlags); \
9447 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9448 IEM_MC_END(); \
9449 } (void)0
9450
9451 switch (IEM_GET_MODRM_REG_8(bRm))
9452 {
9453 /**
9454 * @opdone
9455 * @opmaps grp2_d0
9456 * @opcode /0
9457 * @opflclass rotate_count
9458 */
9459 case 0:
9460 {
9461 IEMOP_MNEMONIC2EX(rol_Eb_CL, "rol Eb,CL", M_CL, ROL, rol, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9462 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9463 break;
9464 }
9465 /**
9466 * @opdone
9467 * @opmaps grp2_d0
9468 * @opcode /1
9469 * @opflclass rotate_count
9470 */
9471 case 1:
9472 {
9473 IEMOP_MNEMONIC2EX(ror_Eb_CL, "ror Eb,CL", M_CL, ROR, ror, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9474 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9475 break;
9476 }
9477 /**
9478 * @opdone
9479 * @opmaps grp2_d0
9480 * @opcode /2
9481 * @opflclass rotate_carry_count
9482 */
9483 case 2:
9484 {
9485 IEMOP_MNEMONIC2EX(rcl_Eb_CL, "rcl Eb,CL", M_CL, RCL, rcl, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9486 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9487 break;
9488 }
9489 /**
9490 * @opdone
9491 * @opmaps grp2_d0
9492 * @opcode /3
9493 * @opflclass rotate_carry_count
9494 */
9495 case 3:
9496 {
9497 IEMOP_MNEMONIC2EX(rcr_Eb_CL, "rcr Eb,CL", M_CL, RCR, rcr, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9498 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9499 break;
9500 }
9501 /**
9502 * @opdone
9503 * @opmaps grp2_d0
9504 * @opcode /4
9505 * @opflclass shift_count
9506 */
9507 case 4:
9508 {
9509 IEMOP_MNEMONIC2EX(shl_Eb_CL, "shl Eb,CL", M_CL, SHL, shl, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9510 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9511 break;
9512 }
9513 /**
9514 * @opdone
9515 * @opmaps grp2_d0
9516 * @opcode /5
9517 * @opflclass shift_count
9518 */
9519 case 5:
9520 {
9521 IEMOP_MNEMONIC2EX(shr_Eb_CL, "shr Eb,CL", M_CL, SHR, shr, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9522 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9523 break;
9524 }
9525 /**
9526 * @opdone
9527 * @opmaps grp2_d0
9528 * @opcode /7
9529 * @opflclass shift_count
9530 */
9531 case 7:
9532 {
9533 IEMOP_MNEMONIC2EX(sar_Eb_CL, "sar Eb,CL", M_CL, SAR, sar, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9534 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9535 break;
9536 }
9537 /** @opdone */
9538 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9539 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
9540 }
9541#undef GRP2_BODY_Eb_CL
9542}
9543
9544
9545/* Need to use a body macro here since the EFLAGS behaviour differs between
9546 the shifts, rotates and rotate w/ carry. Sigh. */
9547#define GRP2_BODY_Ev_CL(a_pImplExpr) \
9548 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
9549 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9550 { \
9551 /* register */ \
9552 switch (pVCpu->iem.s.enmEffOpSize) \
9553 { \
9554 case IEMMODE_16BIT: \
9555 IEM_MC_BEGIN(3, 0, 0, 0); \
9556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9557 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9558 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9559 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9560 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9561 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9562 IEM_MC_REF_EFLAGS(pEFlags); \
9563 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
9564 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9565 IEM_MC_END(); \
9566 break; \
9567 \
9568 case IEMMODE_32BIT: \
9569 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
9570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9571 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9572 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9573 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9574 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9575 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9576 IEM_MC_REF_EFLAGS(pEFlags); \
9577 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
9578 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9579 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9580 IEM_MC_END(); \
9581 break; \
9582 \
9583 case IEMMODE_64BIT: \
9584 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
9585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9586 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9587 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9588 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9589 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9590 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9591 IEM_MC_REF_EFLAGS(pEFlags); \
9592 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
9593 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9594 IEM_MC_END(); \
9595 break; \
9596 \
9597 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9598 } \
9599 } \
9600 else \
9601 { \
9602 /* memory */ \
9603 switch (pVCpu->iem.s.enmEffOpSize) \
9604 { \
9605 case IEMMODE_16BIT: \
9606 IEM_MC_BEGIN(3, 3, 0, 0); \
9607 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9608 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9609 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9610 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9611 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9612 \
9613 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9615 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9616 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9617 IEM_MC_FETCH_EFLAGS(EFlags); \
9618 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
9619 \
9620 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9621 IEM_MC_COMMIT_EFLAGS(EFlags); \
9622 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9623 IEM_MC_END(); \
9624 break; \
9625 \
9626 case IEMMODE_32BIT: \
9627 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
9628 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9629 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9630 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9631 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9632 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9633 \
9634 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9636 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9637 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9638 IEM_MC_FETCH_EFLAGS(EFlags); \
9639 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
9640 \
9641 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9642 IEM_MC_COMMIT_EFLAGS(EFlags); \
9643 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9644 IEM_MC_END(); \
9645 break; \
9646 \
9647 case IEMMODE_64BIT: \
9648 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
9649 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9650 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9651 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9652 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9653 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9654 \
9655 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9657 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9658 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9659 IEM_MC_FETCH_EFLAGS(EFlags); \
9660 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
9661 \
9662 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9663 IEM_MC_COMMIT_EFLAGS(EFlags); \
9664 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9665 IEM_MC_END(); \
9666 break; \
9667 \
9668 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9669 } \
9670 } (void)0
9671
9672
9673/**
9674 * @opmaps grp2_d0
9675 * @opcode /0
9676 * @opflclass rotate_count
9677 */
9678FNIEMOP_DEF_1(iemOp_grp2_rol_Ev_CL, uint8_t, bRm)
9679{
9680 IEMOP_MNEMONIC2EX(rol_Ev_CL, "rol Ev,CL", M_CL, ROL, rol, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9681 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9682}
9683
9684
9685/**
9686 * @opmaps grp2_d0
9687 * @opcode /1
9688 * @opflclass rotate_count
9689 */
9690FNIEMOP_DEF_1(iemOp_grp2_ror_Ev_CL, uint8_t, bRm)
9691{
9692 IEMOP_MNEMONIC2EX(ror_Ev_CL, "ror Ev,CL", M_CL, ROR, ror, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9693 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9694}
9695
9696
9697/**
9698 * @opmaps grp2_d0
9699 * @opcode /2
9700 * @opflclass rotate_carry_count
9701 */
9702FNIEMOP_DEF_1(iemOp_grp2_rcl_Ev_CL, uint8_t, bRm)
9703{
9704 IEMOP_MNEMONIC2EX(rcl_Ev_CL, "rcl Ev,CL", M_CL, RCL, rcl, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9705 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9706}
9707
9708
9709/**
9710 * @opmaps grp2_d0
9711 * @opcode /3
9712 * @opflclass rotate_carry_count
9713 */
9714FNIEMOP_DEF_1(iemOp_grp2_rcr_Ev_CL, uint8_t, bRm)
9715{
9716 IEMOP_MNEMONIC2EX(rcr_Ev_CL, "rcr Ev,CL", M_CL, RCR, rcr, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9717 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9718}
9719
9720
9721/**
9722 * @opmaps grp2_d0
9723 * @opcode /4
9724 * @opflclass shift_count
9725 */
9726FNIEMOP_DEF_1(iemOp_grp2_shl_Ev_CL, uint8_t, bRm)
9727{
9728 IEMOP_MNEMONIC2EX(shl_Ev_CL, "shl Ev,CL", M_CL, SHL, shl, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9729 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9730}
9731
9732
9733/**
9734 * @opmaps grp2_d0
9735 * @opcode /5
9736 * @opflclass shift_count
9737 */
9738FNIEMOP_DEF_1(iemOp_grp2_shr_Ev_CL, uint8_t, bRm)
9739{
9740 IEMOP_MNEMONIC2EX(shr_Ev_CL, "shr Ev,CL", M_CL, SHR, shr, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9741 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9742}
9743
9744
9745/**
9746 * @opmaps grp2_d0
9747 * @opcode /7
9748 * @opflclass shift_count
9749 */
9750FNIEMOP_DEF_1(iemOp_grp2_sar_Ev_CL, uint8_t, bRm)
9751{
9752 IEMOP_MNEMONIC2EX(sar_Ev_CL, "sar Ev,CL", M_CL, SAR, sar, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9753 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9754}
9755
9756#undef GRP2_BODY_Ev_CL
9757
9758/**
9759 * @opcode 0xd3
9760 */
9761FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
9762{
9763 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9764 switch (IEM_GET_MODRM_REG_8(bRm))
9765 {
9766 case 0: return FNIEMOP_CALL_1(iemOp_grp2_rol_Ev_CL, bRm);
9767 case 1: return FNIEMOP_CALL_1(iemOp_grp2_ror_Ev_CL, bRm);
9768 case 2: return FNIEMOP_CALL_1(iemOp_grp2_rcl_Ev_CL, bRm);
9769 case 3: return FNIEMOP_CALL_1(iemOp_grp2_rcr_Ev_CL, bRm);
9770 case 4: return FNIEMOP_CALL_1(iemOp_grp2_shl_Ev_CL, bRm);
9771 case 5: return FNIEMOP_CALL_1(iemOp_grp2_shr_Ev_CL, bRm);
9772 case 7: return FNIEMOP_CALL_1(iemOp_grp2_sar_Ev_CL, bRm);
9773 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9774 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
9775 }
9776}
9777
9778
9779/**
9780 * @opcode 0xd4
9781 * @opflmodify cf,pf,af,zf,sf,of
9782 * @opflundef cf,af,of
9783 */
9784FNIEMOP_DEF(iemOp_aam_Ib)
9785{
9786/** @todo testcase: aam */
9787 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
9788 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
9789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9790 IEMOP_HLP_NO_64BIT();
9791 if (!bImm)
9792 IEMOP_RAISE_DIVIDE_ERROR_RET();
9793 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aam, bImm);
9794}
9795
9796
9797/**
9798 * @opcode 0xd5
9799 * @opflmodify cf,pf,af,zf,sf,of
9800 * @opflundef cf,af,of
9801 */
9802FNIEMOP_DEF(iemOp_aad_Ib)
9803{
9804/** @todo testcase: aad? */
9805 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
9806 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
9807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9808 IEMOP_HLP_NO_64BIT();
9809 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aad, bImm);
9810}
9811
9812
9813/**
9814 * @opcode 0xd6
9815 */
9816FNIEMOP_DEF(iemOp_salc)
9817{
9818 IEMOP_MNEMONIC(salc, "salc");
9819 IEMOP_HLP_NO_64BIT();
9820
9821 IEM_MC_BEGIN(0, 0, 0, 0);
9822 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9823 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9824 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
9825 } IEM_MC_ELSE() {
9826 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
9827 } IEM_MC_ENDIF();
9828 IEM_MC_ADVANCE_RIP_AND_FINISH();
9829 IEM_MC_END();
9830}
9831
9832
9833/**
9834 * @opcode 0xd7
9835 */
9836FNIEMOP_DEF(iemOp_xlat)
9837{
9838 IEMOP_MNEMONIC(xlat, "xlat");
9839 switch (pVCpu->iem.s.enmEffAddrMode)
9840 {
9841 case IEMMODE_16BIT:
9842 IEM_MC_BEGIN(2, 0, 0, 0);
9843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9844 IEM_MC_LOCAL(uint8_t, u8Tmp);
9845 IEM_MC_LOCAL(uint16_t, u16Addr);
9846 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
9847 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
9848 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
9849 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9850 IEM_MC_ADVANCE_RIP_AND_FINISH();
9851 IEM_MC_END();
9852 break;
9853
9854 case IEMMODE_32BIT:
9855 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0);
9856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9857 IEM_MC_LOCAL(uint8_t, u8Tmp);
9858 IEM_MC_LOCAL(uint32_t, u32Addr);
9859 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
9860 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
9861 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
9862 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9863 IEM_MC_ADVANCE_RIP_AND_FINISH();
9864 IEM_MC_END();
9865 break;
9866
9867 case IEMMODE_64BIT:
9868 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
9869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9870 IEM_MC_LOCAL(uint8_t, u8Tmp);
9871 IEM_MC_LOCAL(uint64_t, u64Addr);
9872 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
9873 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
9874 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
9875 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9876 IEM_MC_ADVANCE_RIP_AND_FINISH();
9877 IEM_MC_END();
9878 break;
9879
9880 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9881 }
9882}
9883
9884
9885/**
9886 * Common worker for FPU instructions working on ST0 and STn, and storing the
9887 * result in ST0.
9888 *
9889 * @param bRm Mod R/M byte.
9890 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9891 */
9892FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
9893{
9894 IEM_MC_BEGIN(3, 1, 0, 0);
9895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9896 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9897 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9898 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9899 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9900
9901 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9902 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9903 IEM_MC_PREPARE_FPU_USAGE();
9904 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9905 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
9906 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9907 } IEM_MC_ELSE() {
9908 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9909 } IEM_MC_ENDIF();
9910 IEM_MC_ADVANCE_RIP_AND_FINISH();
9911
9912 IEM_MC_END();
9913}
9914
9915
9916/**
9917 * Common worker for FPU instructions working on ST0 and STn, and only affecting
9918 * flags.
9919 *
9920 * @param bRm Mod R/M byte.
9921 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9922 */
9923FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
9924{
9925 IEM_MC_BEGIN(3, 1, 0, 0);
9926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9927 IEM_MC_LOCAL(uint16_t, u16Fsw);
9928 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9929 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9930 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9931
9932 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9933 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9934 IEM_MC_PREPARE_FPU_USAGE();
9935 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9936 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
9937 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9938 } IEM_MC_ELSE() {
9939 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
9940 } IEM_MC_ENDIF();
9941 IEM_MC_ADVANCE_RIP_AND_FINISH();
9942
9943 IEM_MC_END();
9944}
9945
9946
9947/**
9948 * Common worker for FPU instructions working on ST0 and STn, only affecting
9949 * flags, and popping when done.
9950 *
9951 * @param bRm Mod R/M byte.
9952 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9953 */
9954FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
9955{
9956 IEM_MC_BEGIN(3, 1, 0, 0);
9957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9958 IEM_MC_LOCAL(uint16_t, u16Fsw);
9959 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9960 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9961 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9962
9963 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9964 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9965 IEM_MC_PREPARE_FPU_USAGE();
9966 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9967 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
9968 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9969 } IEM_MC_ELSE() {
9970 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
9971 } IEM_MC_ENDIF();
9972 IEM_MC_ADVANCE_RIP_AND_FINISH();
9973
9974 IEM_MC_END();
9975}
9976
9977
9978/** Opcode 0xd8 11/0. */
9979FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
9980{
9981 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
9982 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
9983}
9984
9985
9986/** Opcode 0xd8 11/1. */
9987FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
9988{
9989 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
9990 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
9991}
9992
9993
9994/** Opcode 0xd8 11/2. */
9995FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
9996{
9997 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
9998 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
9999}
10000
10001
10002/** Opcode 0xd8 11/3. */
10003FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
10004{
10005 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
10006 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
10007}
10008
10009
10010/** Opcode 0xd8 11/4. */
10011FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
10012{
10013 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
10014 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
10015}
10016
10017
10018/** Opcode 0xd8 11/5. */
10019FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
10020{
10021 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
10022 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
10023}
10024
10025
10026/** Opcode 0xd8 11/6. */
10027FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
10028{
10029 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
10030 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
10031}
10032
10033
10034/** Opcode 0xd8 11/7. */
10035FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
10036{
10037 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
10038 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
10039}
10040
10041
10042/**
10043 * Common worker for FPU instructions working on ST0 and an m32r, and storing
10044 * the result in ST0.
10045 *
10046 * @param bRm Mod R/M byte.
10047 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10048 */
10049FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
10050{
10051 IEM_MC_BEGIN(3, 3, 0, 0);
10052 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10053 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10054 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
10055 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10056 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10057 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
10058
10059 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10061
10062 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10063 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10064 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10065
10066 IEM_MC_PREPARE_FPU_USAGE();
10067 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10068 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
10069 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10070 } IEM_MC_ELSE() {
10071 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10072 } IEM_MC_ENDIF();
10073 IEM_MC_ADVANCE_RIP_AND_FINISH();
10074
10075 IEM_MC_END();
10076}
10077
10078
10079/** Opcode 0xd8 !11/0. */
10080FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
10081{
10082 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
10083 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
10084}
10085
10086
10087/** Opcode 0xd8 !11/1. */
10088FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
10089{
10090 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
10091 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
10092}
10093
10094
10095/** Opcode 0xd8 !11/2. */
10096FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
10097{
10098 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
10099
10100 IEM_MC_BEGIN(3, 3, 0, 0);
10101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10102 IEM_MC_LOCAL(uint16_t, u16Fsw);
10103 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
10104 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10105 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10106 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
10107
10108 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10110
10111 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10112 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10113 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10114
10115 IEM_MC_PREPARE_FPU_USAGE();
10116 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10117 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
10118 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10119 } IEM_MC_ELSE() {
10120 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10121 } IEM_MC_ENDIF();
10122 IEM_MC_ADVANCE_RIP_AND_FINISH();
10123
10124 IEM_MC_END();
10125}
10126
10127
10128/** Opcode 0xd8 !11/3. */
10129FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
10130{
10131 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
10132
10133 IEM_MC_BEGIN(3, 3, 0, 0);
10134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10135 IEM_MC_LOCAL(uint16_t, u16Fsw);
10136 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
10137 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10138 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10139 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
10140
10141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10143
10144 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10145 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10146 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10147
10148 IEM_MC_PREPARE_FPU_USAGE();
10149 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10150 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
10151 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10152 } IEM_MC_ELSE() {
10153 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10154 } IEM_MC_ENDIF();
10155 IEM_MC_ADVANCE_RIP_AND_FINISH();
10156
10157 IEM_MC_END();
10158}
10159
10160
10161/** Opcode 0xd8 !11/4. */
10162FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
10163{
10164 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
10165 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
10166}
10167
10168
10169/** Opcode 0xd8 !11/5. */
10170FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
10171{
10172 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
10173 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
10174}
10175
10176
10177/** Opcode 0xd8 !11/6. */
10178FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
10179{
10180 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
10181 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
10182}
10183
10184
10185/** Opcode 0xd8 !11/7. */
10186FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
10187{
10188 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
10189 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
10190}
10191
10192
10193/**
10194 * @opcode 0xd8
10195 */
10196FNIEMOP_DEF(iemOp_EscF0)
10197{
10198 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10199 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
10200
10201 if (IEM_IS_MODRM_REG_MODE(bRm))
10202 {
10203 switch (IEM_GET_MODRM_REG_8(bRm))
10204 {
10205 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
10206 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
10207 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
10208 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
10209 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
10210 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
10211 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
10212 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
10213 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10214 }
10215 }
10216 else
10217 {
10218 switch (IEM_GET_MODRM_REG_8(bRm))
10219 {
10220 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
10221 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
10222 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
10223 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
10224 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
10225 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
10226 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
10227 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
10228 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10229 }
10230 }
10231}
10232
10233
10234/** Opcode 0xd9 /0 mem32real
10235 * @sa iemOp_fld_m64r */
10236FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
10237{
10238 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
10239
10240 IEM_MC_BEGIN(2, 3, 0, 0);
10241 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10242 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10243 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
10244 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10245 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
10246
10247 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10249
10250 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10251 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10252 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10253 IEM_MC_PREPARE_FPU_USAGE();
10254 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10255 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r32, pFpuRes, pr32Val);
10256 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10257 } IEM_MC_ELSE() {
10258 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10259 } IEM_MC_ENDIF();
10260 IEM_MC_ADVANCE_RIP_AND_FINISH();
10261
10262 IEM_MC_END();
10263}
10264
10265
10266/** Opcode 0xd9 !11/2 mem32real */
10267FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
10268{
10269 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
10270 IEM_MC_BEGIN(3, 3, 0, 0);
10271 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10272 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10273
10274 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10275 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10276 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10277 IEM_MC_PREPARE_FPU_USAGE();
10278
10279 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10280 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
10281 IEM_MC_MEM_MAP_R32_WO(pr32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10282
10283 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10284 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10285 IEM_MC_LOCAL(uint16_t, u16Fsw);
10286 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
10287 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
10288 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
10289 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10290 } IEM_MC_ELSE() {
10291 IEM_MC_IF_FCW_IM() {
10292 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
10293 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
10294 } IEM_MC_ELSE() {
10295 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
10296 } IEM_MC_ENDIF();
10297 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10298 } IEM_MC_ENDIF();
10299 IEM_MC_ADVANCE_RIP_AND_FINISH();
10300
10301 IEM_MC_END();
10302}
10303
10304
10305/** Opcode 0xd9 !11/3 */
10306FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
10307{
10308 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
10309 IEM_MC_BEGIN(3, 3, 0, 0);
10310 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10311 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10312
10313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10314 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10315 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10316 IEM_MC_PREPARE_FPU_USAGE();
10317
10318 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10319 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
10320 IEM_MC_MEM_MAP_R32_WO(pr32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10321
10322 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10323 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10324 IEM_MC_LOCAL(uint16_t, u16Fsw);
10325 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
10326 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
10327 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
10328 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10329 } IEM_MC_ELSE() {
10330 IEM_MC_IF_FCW_IM() {
10331 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
10332 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
10333 } IEM_MC_ELSE() {
10334 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
10335 } IEM_MC_ENDIF();
10336 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10337 } IEM_MC_ENDIF();
10338 IEM_MC_ADVANCE_RIP_AND_FINISH();
10339
10340 IEM_MC_END();
10341}
10342
10343
10344/** Opcode 0xd9 !11/4 */
10345FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
10346{
10347 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
10348 IEM_MC_BEGIN(3, 0, 0, 0);
10349 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
10350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10351
10352 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10353 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10354 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10355
10356 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
10357 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
10358 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw),
10359 iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
10360 IEM_MC_END();
10361}
10362
10363
10364/** Opcode 0xd9 !11/5 */
10365FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
10366{
10367 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
10368 IEM_MC_BEGIN(1, 1, 0, 0);
10369 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10370 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10371
10372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10373 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10374 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10375
10376 IEM_MC_ARG(uint16_t, u16Fsw, 0);
10377 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10378
10379 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw),
10380 iemCImpl_fldcw, u16Fsw);
10381 IEM_MC_END();
10382}
10383
10384
10385/** Opcode 0xd9 !11/6 */
10386FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
10387{
10388 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
10389 IEM_MC_BEGIN(3, 0, 0, 0);
10390 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
10391 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10392
10393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10394 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10395 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
10396
10397 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
10398 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
10399 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
10400 iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
10401 IEM_MC_END();
10402}
10403
10404
10405/** Opcode 0xd9 !11/7 */
10406FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
10407{
10408 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
10409 IEM_MC_BEGIN(2, 0, 0, 0);
10410 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10411 IEM_MC_LOCAL(uint16_t, u16Fcw);
10412 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10414 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10415 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
10416 IEM_MC_FETCH_FCW(u16Fcw);
10417 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
10418 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
10419 IEM_MC_END();
10420}
10421
10422
10423/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
10424FNIEMOP_DEF(iemOp_fnop)
10425{
10426 IEMOP_MNEMONIC(fnop, "fnop");
10427 IEM_MC_BEGIN(0, 0, 0, 0);
10428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10429 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10430 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10431 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10432 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
10433 * intel optimizations. Investigate. */
10434 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10435 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
10436 IEM_MC_END();
10437}
10438
10439
10440/** Opcode 0xd9 11/0 stN */
10441FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
10442{
10443 IEMOP_MNEMONIC(fld_stN, "fld stN");
10444 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
10445 * indicates that it does. */
10446 IEM_MC_BEGIN(0, 2, 0, 0);
10447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10448 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
10449 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10450 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10451 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10452
10453 IEM_MC_PREPARE_FPU_USAGE();
10454 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, IEM_GET_MODRM_RM_8(bRm)) {
10455 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
10456 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
10457 } IEM_MC_ELSE() {
10458 IEM_MC_FPU_STACK_PUSH_UNDERFLOW(pVCpu->iem.s.uFpuOpcode);
10459 } IEM_MC_ENDIF();
10460
10461 IEM_MC_ADVANCE_RIP_AND_FINISH();
10462 IEM_MC_END();
10463}
10464
10465
10466/** Opcode 0xd9 11/3 stN */
10467FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
10468{
10469 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
10470 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
10471 * indicates that it does. */
10472 IEM_MC_BEGIN(2, 3, 0, 0);
10473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10474 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
10475 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
10476 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10477 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ IEM_GET_MODRM_RM_8(bRm), 0);
10478 IEM_MC_ARG_CONST(uint16_t, uFpuOpcode, /*=*/ pVCpu->iem.s.uFpuOpcode, 1);
10479 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10480 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10481
10482 IEM_MC_PREPARE_FPU_USAGE();
10483 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
10484 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
10485 IEM_MC_STORE_FPUREG_R80_SRC_REF(IEM_GET_MODRM_RM_8(bRm), pr80Value1);
10486 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10487 } IEM_MC_ELSE() {
10488 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, 0, iemCImpl_fxch_underflow, iStReg, uFpuOpcode);
10489 } IEM_MC_ENDIF();
10490
10491 IEM_MC_ADVANCE_RIP_AND_FINISH();
10492 IEM_MC_END();
10493}
10494
10495
10496/** Opcode 0xd9 11/4, 0xdd 11/2. */
10497FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
10498{
10499 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
10500
10501 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
10502 uint8_t const iDstReg = IEM_GET_MODRM_RM_8(bRm);
10503 if (!iDstReg)
10504 {
10505 IEM_MC_BEGIN(0, 1, 0, 0);
10506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10507 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
10508 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10509 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10510
10511 IEM_MC_PREPARE_FPU_USAGE();
10512 IEM_MC_IF_FPUREG_NOT_EMPTY(0) {
10513 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10514 } IEM_MC_ELSE() {
10515 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0, pVCpu->iem.s.uFpuOpcode);
10516 } IEM_MC_ENDIF();
10517
10518 IEM_MC_ADVANCE_RIP_AND_FINISH();
10519 IEM_MC_END();
10520 }
10521 else
10522 {
10523 IEM_MC_BEGIN(0, 2, 0, 0);
10524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10525 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
10526 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10527 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10528 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10529
10530 IEM_MC_PREPARE_FPU_USAGE();
10531 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10532 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
10533 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg, pVCpu->iem.s.uFpuOpcode);
10534 } IEM_MC_ELSE() {
10535 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg, pVCpu->iem.s.uFpuOpcode);
10536 } IEM_MC_ENDIF();
10537
10538 IEM_MC_ADVANCE_RIP_AND_FINISH();
10539 IEM_MC_END();
10540 }
10541}
10542
10543
10544/**
10545 * Common worker for FPU instructions working on ST0 and replaces it with the
10546 * result, i.e. unary operators.
10547 *
10548 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10549 */
10550FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
10551{
10552 IEM_MC_BEGIN(2, 1, 0, 0);
10553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10554 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10555 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10556 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10557
10558 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10559 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10560 IEM_MC_PREPARE_FPU_USAGE();
10561 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10562 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
10563 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10564 } IEM_MC_ELSE() {
10565 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10566 } IEM_MC_ENDIF();
10567 IEM_MC_ADVANCE_RIP_AND_FINISH();
10568
10569 IEM_MC_END();
10570}
10571
10572
10573/** Opcode 0xd9 0xe0. */
10574FNIEMOP_DEF(iemOp_fchs)
10575{
10576 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
10577 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
10578}
10579
10580
10581/** Opcode 0xd9 0xe1. */
10582FNIEMOP_DEF(iemOp_fabs)
10583{
10584 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
10585 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
10586}
10587
10588
10589/** Opcode 0xd9 0xe4. */
10590FNIEMOP_DEF(iemOp_ftst)
10591{
10592 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
10593 IEM_MC_BEGIN(2, 1, 0, 0);
10594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10595 IEM_MC_LOCAL(uint16_t, u16Fsw);
10596 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10597 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10598
10599 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10600 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10601 IEM_MC_PREPARE_FPU_USAGE();
10602 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10603 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_ftst_r80, pu16Fsw, pr80Value);
10604 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10605 } IEM_MC_ELSE() {
10606 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
10607 } IEM_MC_ENDIF();
10608 IEM_MC_ADVANCE_RIP_AND_FINISH();
10609
10610 IEM_MC_END();
10611}
10612
10613
10614/** Opcode 0xd9 0xe5. */
10615FNIEMOP_DEF(iemOp_fxam)
10616{
10617 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
10618 IEM_MC_BEGIN(2, 1, 0, 0);
10619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10620 IEM_MC_LOCAL(uint16_t, u16Fsw);
10621 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10622 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10623
10624 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10625 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10626 IEM_MC_PREPARE_FPU_USAGE();
10627 IEM_MC_REF_FPUREG(pr80Value, 0);
10628 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fxam_r80, pu16Fsw, pr80Value);
10629 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10630 IEM_MC_ADVANCE_RIP_AND_FINISH();
10631
10632 IEM_MC_END();
10633}
10634
10635
10636/**
10637 * Common worker for FPU instructions pushing a constant onto the FPU stack.
10638 *
10639 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10640 */
10641FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
10642{
10643 IEM_MC_BEGIN(1, 1, 0, 0);
10644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10645 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10646 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10647
10648 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10649 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10650 IEM_MC_PREPARE_FPU_USAGE();
10651 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10652 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
10653 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
10654 } IEM_MC_ELSE() {
10655 IEM_MC_FPU_STACK_PUSH_OVERFLOW(pVCpu->iem.s.uFpuOpcode);
10656 } IEM_MC_ENDIF();
10657 IEM_MC_ADVANCE_RIP_AND_FINISH();
10658
10659 IEM_MC_END();
10660}
10661
10662
10663/** Opcode 0xd9 0xe8. */
10664FNIEMOP_DEF(iemOp_fld1)
10665{
10666 IEMOP_MNEMONIC(fld1, "fld1");
10667 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
10668}
10669
10670
10671/** Opcode 0xd9 0xe9. */
10672FNIEMOP_DEF(iemOp_fldl2t)
10673{
10674 IEMOP_MNEMONIC(fldl2t, "fldl2t");
10675 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
10676}
10677
10678
10679/** Opcode 0xd9 0xea. */
10680FNIEMOP_DEF(iemOp_fldl2e)
10681{
10682 IEMOP_MNEMONIC(fldl2e, "fldl2e");
10683 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
10684}
10685
10686/** Opcode 0xd9 0xeb. */
10687FNIEMOP_DEF(iemOp_fldpi)
10688{
10689 IEMOP_MNEMONIC(fldpi, "fldpi");
10690 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
10691}
10692
10693
10694/** Opcode 0xd9 0xec. */
10695FNIEMOP_DEF(iemOp_fldlg2)
10696{
10697 IEMOP_MNEMONIC(fldlg2, "fldlg2");
10698 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
10699}
10700
10701/** Opcode 0xd9 0xed. */
10702FNIEMOP_DEF(iemOp_fldln2)
10703{
10704 IEMOP_MNEMONIC(fldln2, "fldln2");
10705 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
10706}
10707
10708
10709/** Opcode 0xd9 0xee. */
10710FNIEMOP_DEF(iemOp_fldz)
10711{
10712 IEMOP_MNEMONIC(fldz, "fldz");
10713 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
10714}
10715
10716
10717/** Opcode 0xd9 0xf0.
10718 *
10719 * The f2xm1 instruction works on values +1.0 thru -1.0, currently (the range on
10720 * 287 & 8087 was +0.5 thru 0.0 according to docs). In addition is does appear
10721 * to produce proper results for +Inf and -Inf.
10722 *
10723 * This is probably usful in the implementation pow() and similar.
10724 */
10725FNIEMOP_DEF(iemOp_f2xm1)
10726{
10727 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
10728 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
10729}
10730
10731
10732/**
10733 * Common worker for FPU instructions working on STn and ST0, storing the result
10734 * in STn, and popping the stack unless IE, DE or ZE was raised.
10735 *
10736 * @param bRm Mod R/M byte.
10737 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10738 */
10739FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
10740{
10741 IEM_MC_BEGIN(3, 1, 0, 0);
10742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10743 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10744 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10745 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10746 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10747
10748 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10749 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10750
10751 IEM_MC_PREPARE_FPU_USAGE();
10752 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
10753 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
10754 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10755 } IEM_MC_ELSE() {
10756 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10757 } IEM_MC_ENDIF();
10758 IEM_MC_ADVANCE_RIP_AND_FINISH();
10759
10760 IEM_MC_END();
10761}
10762
10763
10764/** Opcode 0xd9 0xf1. */
10765FNIEMOP_DEF(iemOp_fyl2x)
10766{
10767 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
10768 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
10769}
10770
10771
10772/**
10773 * Common worker for FPU instructions working on ST0 and having two outputs, one
10774 * replacing ST0 and one pushed onto the stack.
10775 *
10776 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10777 */
10778FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
10779{
10780 IEM_MC_BEGIN(2, 1, 0, 0);
10781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10782 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
10783 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
10784 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10785
10786 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10787 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10788 IEM_MC_PREPARE_FPU_USAGE();
10789 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10790 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
10791 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo, pVCpu->iem.s.uFpuOpcode);
10792 } IEM_MC_ELSE() {
10793 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO(pVCpu->iem.s.uFpuOpcode);
10794 } IEM_MC_ENDIF();
10795 IEM_MC_ADVANCE_RIP_AND_FINISH();
10796
10797 IEM_MC_END();
10798}
10799
10800
10801/** Opcode 0xd9 0xf2. */
10802FNIEMOP_DEF(iemOp_fptan)
10803{
10804 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
10805 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
10806}
10807
10808
10809/** Opcode 0xd9 0xf3. */
10810FNIEMOP_DEF(iemOp_fpatan)
10811{
10812 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
10813 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
10814}
10815
10816
10817/** Opcode 0xd9 0xf4. */
10818FNIEMOP_DEF(iemOp_fxtract)
10819{
10820 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
10821 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
10822}
10823
10824
10825/** Opcode 0xd9 0xf5. */
10826FNIEMOP_DEF(iemOp_fprem1)
10827{
10828 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
10829 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
10830}
10831
10832
10833/** Opcode 0xd9 0xf6. */
10834FNIEMOP_DEF(iemOp_fdecstp)
10835{
10836 IEMOP_MNEMONIC(fdecstp, "fdecstp");
10837 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
10838 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
10839 * FINCSTP and FDECSTP. */
10840 IEM_MC_BEGIN(0, 0, 0, 0);
10841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10842
10843 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10844 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10845
10846 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10847 IEM_MC_FPU_STACK_DEC_TOP();
10848 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
10849
10850 IEM_MC_ADVANCE_RIP_AND_FINISH();
10851 IEM_MC_END();
10852}
10853
10854
10855/** Opcode 0xd9 0xf7. */
10856FNIEMOP_DEF(iemOp_fincstp)
10857{
10858 IEMOP_MNEMONIC(fincstp, "fincstp");
10859 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
10860 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
10861 * FINCSTP and FDECSTP. */
10862 IEM_MC_BEGIN(0, 0, 0, 0);
10863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10864
10865 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10866 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10867
10868 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10869 IEM_MC_FPU_STACK_INC_TOP();
10870 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
10871
10872 IEM_MC_ADVANCE_RIP_AND_FINISH();
10873 IEM_MC_END();
10874}
10875
10876
10877/** Opcode 0xd9 0xf8. */
10878FNIEMOP_DEF(iemOp_fprem)
10879{
10880 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
10881 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
10882}
10883
10884
10885/** Opcode 0xd9 0xf9. */
10886FNIEMOP_DEF(iemOp_fyl2xp1)
10887{
10888 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
10889 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
10890}
10891
10892
10893/** Opcode 0xd9 0xfa. */
10894FNIEMOP_DEF(iemOp_fsqrt)
10895{
10896 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
10897 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
10898}
10899
10900
10901/** Opcode 0xd9 0xfb. */
10902FNIEMOP_DEF(iemOp_fsincos)
10903{
10904 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
10905 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
10906}
10907
10908
10909/** Opcode 0xd9 0xfc. */
10910FNIEMOP_DEF(iemOp_frndint)
10911{
10912 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
10913 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
10914}
10915
10916
10917/** Opcode 0xd9 0xfd. */
10918FNIEMOP_DEF(iemOp_fscale)
10919{
10920 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
10921 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
10922}
10923
10924
10925/** Opcode 0xd9 0xfe. */
10926FNIEMOP_DEF(iemOp_fsin)
10927{
10928 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
10929 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
10930}
10931
10932
10933/** Opcode 0xd9 0xff. */
10934FNIEMOP_DEF(iemOp_fcos)
10935{
10936 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
10937 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
10938}
10939
10940
10941/** Used by iemOp_EscF1. */
10942IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
10943{
10944 /* 0xe0 */ iemOp_fchs,
10945 /* 0xe1 */ iemOp_fabs,
10946 /* 0xe2 */ iemOp_Invalid,
10947 /* 0xe3 */ iemOp_Invalid,
10948 /* 0xe4 */ iemOp_ftst,
10949 /* 0xe5 */ iemOp_fxam,
10950 /* 0xe6 */ iemOp_Invalid,
10951 /* 0xe7 */ iemOp_Invalid,
10952 /* 0xe8 */ iemOp_fld1,
10953 /* 0xe9 */ iemOp_fldl2t,
10954 /* 0xea */ iemOp_fldl2e,
10955 /* 0xeb */ iemOp_fldpi,
10956 /* 0xec */ iemOp_fldlg2,
10957 /* 0xed */ iemOp_fldln2,
10958 /* 0xee */ iemOp_fldz,
10959 /* 0xef */ iemOp_Invalid,
10960 /* 0xf0 */ iemOp_f2xm1,
10961 /* 0xf1 */ iemOp_fyl2x,
10962 /* 0xf2 */ iemOp_fptan,
10963 /* 0xf3 */ iemOp_fpatan,
10964 /* 0xf4 */ iemOp_fxtract,
10965 /* 0xf5 */ iemOp_fprem1,
10966 /* 0xf6 */ iemOp_fdecstp,
10967 /* 0xf7 */ iemOp_fincstp,
10968 /* 0xf8 */ iemOp_fprem,
10969 /* 0xf9 */ iemOp_fyl2xp1,
10970 /* 0xfa */ iemOp_fsqrt,
10971 /* 0xfb */ iemOp_fsincos,
10972 /* 0xfc */ iemOp_frndint,
10973 /* 0xfd */ iemOp_fscale,
10974 /* 0xfe */ iemOp_fsin,
10975 /* 0xff */ iemOp_fcos
10976};
10977
10978
10979/**
10980 * @opcode 0xd9
10981 */
10982FNIEMOP_DEF(iemOp_EscF1)
10983{
10984 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10985 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
10986
10987 if (IEM_IS_MODRM_REG_MODE(bRm))
10988 {
10989 switch (IEM_GET_MODRM_REG_8(bRm))
10990 {
10991 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
10992 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
10993 case 2:
10994 if (bRm == 0xd0)
10995 return FNIEMOP_CALL(iemOp_fnop);
10996 IEMOP_RAISE_INVALID_OPCODE_RET();
10997 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
10998 case 4:
10999 case 5:
11000 case 6:
11001 case 7:
11002 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
11003 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
11004 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11005 }
11006 }
11007 else
11008 {
11009 switch (IEM_GET_MODRM_REG_8(bRm))
11010 {
11011 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
11012 case 1: IEMOP_RAISE_INVALID_OPCODE_RET();
11013 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
11014 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
11015 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
11016 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
11017 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
11018 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
11019 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11020 }
11021 }
11022}
11023
11024
11025/** Opcode 0xda 11/0. */
11026FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
11027{
11028 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
11029 IEM_MC_BEGIN(0, 1, 0, 0);
11030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11031 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11032
11033 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11034 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11035
11036 IEM_MC_PREPARE_FPU_USAGE();
11037 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11038 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
11039 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11040 } IEM_MC_ENDIF();
11041 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11042 } IEM_MC_ELSE() {
11043 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11044 } IEM_MC_ENDIF();
11045 IEM_MC_ADVANCE_RIP_AND_FINISH();
11046
11047 IEM_MC_END();
11048}
11049
11050
11051/** Opcode 0xda 11/1. */
11052FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
11053{
11054 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
11055 IEM_MC_BEGIN(0, 1, 0, 0);
11056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11057 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11058
11059 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11060 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11061
11062 IEM_MC_PREPARE_FPU_USAGE();
11063 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11064 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
11065 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11066 } IEM_MC_ENDIF();
11067 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11068 } IEM_MC_ELSE() {
11069 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11070 } IEM_MC_ENDIF();
11071 IEM_MC_ADVANCE_RIP_AND_FINISH();
11072
11073 IEM_MC_END();
11074}
11075
11076
11077/** Opcode 0xda 11/2. */
11078FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
11079{
11080 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
11081 IEM_MC_BEGIN(0, 1, 0, 0);
11082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11083 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11084
11085 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11086 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11087
11088 IEM_MC_PREPARE_FPU_USAGE();
11089 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11090 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
11091 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11092 } IEM_MC_ENDIF();
11093 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11094 } IEM_MC_ELSE() {
11095 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11096 } IEM_MC_ENDIF();
11097 IEM_MC_ADVANCE_RIP_AND_FINISH();
11098
11099 IEM_MC_END();
11100}
11101
11102
11103/** Opcode 0xda 11/3. */
11104FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
11105{
11106 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
11107 IEM_MC_BEGIN(0, 1, 0, 0);
11108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11109 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11110
11111 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11112 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11113
11114 IEM_MC_PREPARE_FPU_USAGE();
11115 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11116 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
11117 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11118 } IEM_MC_ENDIF();
11119 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11120 } IEM_MC_ELSE() {
11121 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11122 } IEM_MC_ENDIF();
11123 IEM_MC_ADVANCE_RIP_AND_FINISH();
11124
11125 IEM_MC_END();
11126}
11127
11128
11129/**
11130 * Common worker for FPU instructions working on ST0 and ST1, only affecting
11131 * flags, and popping twice when done.
11132 *
11133 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11134 */
11135FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
11136{
11137 IEM_MC_BEGIN(3, 1, 0, 0);
11138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11139 IEM_MC_LOCAL(uint16_t, u16Fsw);
11140 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11141 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11142 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11143
11144 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11145 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11146
11147 IEM_MC_PREPARE_FPU_USAGE();
11148 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1) {
11149 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
11150 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
11151 } IEM_MC_ELSE() {
11152 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP(pVCpu->iem.s.uFpuOpcode);
11153 } IEM_MC_ENDIF();
11154 IEM_MC_ADVANCE_RIP_AND_FINISH();
11155
11156 IEM_MC_END();
11157}
11158
11159
11160/** Opcode 0xda 0xe9. */
11161FNIEMOP_DEF(iemOp_fucompp)
11162{
11163 IEMOP_MNEMONIC(fucompp, "fucompp");
11164 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fucom_r80_by_r80);
11165}
11166
11167
11168/**
11169 * Common worker for FPU instructions working on ST0 and an m32i, and storing
11170 * the result in ST0.
11171 *
11172 * @param bRm Mod R/M byte.
11173 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11174 */
11175FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
11176{
11177 IEM_MC_BEGIN(3, 3, 0, 0);
11178 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11179 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11180 IEM_MC_LOCAL(int32_t, i32Val2);
11181 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11182 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11183 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
11184
11185 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11187
11188 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11189 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11190 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11191
11192 IEM_MC_PREPARE_FPU_USAGE();
11193 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11194 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
11195 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
11196 } IEM_MC_ELSE() {
11197 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11198 } IEM_MC_ENDIF();
11199 IEM_MC_ADVANCE_RIP_AND_FINISH();
11200
11201 IEM_MC_END();
11202}
11203
11204
11205/** Opcode 0xda !11/0. */
11206FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
11207{
11208 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
11209 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
11210}
11211
11212
11213/** Opcode 0xda !11/1. */
11214FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
11215{
11216 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
11217 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
11218}
11219
11220
11221/** Opcode 0xda !11/2. */
11222FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
11223{
11224 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
11225
11226 IEM_MC_BEGIN(3, 3, 0, 0);
11227 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11228 IEM_MC_LOCAL(uint16_t, u16Fsw);
11229 IEM_MC_LOCAL(int32_t, i32Val2);
11230 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11231 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11232 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
11233
11234 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11236
11237 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11238 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11239 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11240
11241 IEM_MC_PREPARE_FPU_USAGE();
11242 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11243 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
11244 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11245 } IEM_MC_ELSE() {
11246 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11247 } IEM_MC_ENDIF();
11248 IEM_MC_ADVANCE_RIP_AND_FINISH();
11249
11250 IEM_MC_END();
11251}
11252
11253
11254/** Opcode 0xda !11/3. */
11255FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
11256{
11257 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
11258
11259 IEM_MC_BEGIN(3, 3, 0, 0);
11260 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11261 IEM_MC_LOCAL(uint16_t, u16Fsw);
11262 IEM_MC_LOCAL(int32_t, i32Val2);
11263 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11264 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11265 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
11266
11267 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11269
11270 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11271 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11272 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11273
11274 IEM_MC_PREPARE_FPU_USAGE();
11275 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11276 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
11277 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11278 } IEM_MC_ELSE() {
11279 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11280 } IEM_MC_ENDIF();
11281 IEM_MC_ADVANCE_RIP_AND_FINISH();
11282
11283 IEM_MC_END();
11284}
11285
11286
11287/** Opcode 0xda !11/4. */
11288FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
11289{
11290 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
11291 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
11292}
11293
11294
11295/** Opcode 0xda !11/5. */
11296FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
11297{
11298 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
11299 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
11300}
11301
11302
11303/** Opcode 0xda !11/6. */
11304FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
11305{
11306 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
11307 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
11308}
11309
11310
11311/** Opcode 0xda !11/7. */
11312FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
11313{
11314 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
11315 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
11316}
11317
11318
11319/**
11320 * @opcode 0xda
11321 */
11322FNIEMOP_DEF(iemOp_EscF2)
11323{
11324 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11325 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
11326 if (IEM_IS_MODRM_REG_MODE(bRm))
11327 {
11328 switch (IEM_GET_MODRM_REG_8(bRm))
11329 {
11330 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
11331 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
11332 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
11333 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
11334 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
11335 case 5:
11336 if (bRm == 0xe9)
11337 return FNIEMOP_CALL(iemOp_fucompp);
11338 IEMOP_RAISE_INVALID_OPCODE_RET();
11339 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
11340 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11341 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11342 }
11343 }
11344 else
11345 {
11346 switch (IEM_GET_MODRM_REG_8(bRm))
11347 {
11348 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
11349 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
11350 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
11351 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
11352 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
11353 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
11354 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
11355 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
11356 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11357 }
11358 }
11359}
11360
11361
11362/** Opcode 0xdb !11/0. */
11363FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
11364{
11365 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
11366
11367 IEM_MC_BEGIN(2, 3, 0, 0);
11368 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11369 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11370 IEM_MC_LOCAL(int32_t, i32Val);
11371 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11372 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
11373
11374 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11376
11377 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11378 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11379 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11380
11381 IEM_MC_PREPARE_FPU_USAGE();
11382 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11383 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i32, pFpuRes, pi32Val);
11384 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11385 } IEM_MC_ELSE() {
11386 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11387 } IEM_MC_ENDIF();
11388 IEM_MC_ADVANCE_RIP_AND_FINISH();
11389
11390 IEM_MC_END();
11391}
11392
11393
11394/** Opcode 0xdb !11/1. */
11395FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
11396{
11397 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
11398 IEM_MC_BEGIN(3, 3, 0, 0);
11399 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11400 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11401
11402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11403 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11404 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11405 IEM_MC_PREPARE_FPU_USAGE();
11406
11407 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11408 IEM_MC_ARG(int32_t *, pi32Dst, 1);
11409 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11410
11411 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11412 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11413 IEM_MC_LOCAL(uint16_t, u16Fsw);
11414 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11415 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
11416 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11417 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11418 } IEM_MC_ELSE() {
11419 IEM_MC_IF_FCW_IM() {
11420 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
11421 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11422 } IEM_MC_ELSE() {
11423 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11424 } IEM_MC_ENDIF();
11425 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11426 } IEM_MC_ENDIF();
11427 IEM_MC_ADVANCE_RIP_AND_FINISH();
11428
11429 IEM_MC_END();
11430}
11431
11432
11433/** Opcode 0xdb !11/2. */
11434FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
11435{
11436 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
11437 IEM_MC_BEGIN(3, 3, 0, 0);
11438 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11439 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11440
11441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11442 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11443 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11444 IEM_MC_PREPARE_FPU_USAGE();
11445
11446 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11447 IEM_MC_ARG(int32_t *, pi32Dst, 1);
11448 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11449
11450 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11451 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11452 IEM_MC_LOCAL(uint16_t, u16Fsw);
11453 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11454 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
11455 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11456 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11457 } IEM_MC_ELSE() {
11458 IEM_MC_IF_FCW_IM() {
11459 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
11460 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11461 } IEM_MC_ELSE() {
11462 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11463 } IEM_MC_ENDIF();
11464 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11465 } IEM_MC_ENDIF();
11466 IEM_MC_ADVANCE_RIP_AND_FINISH();
11467
11468 IEM_MC_END();
11469}
11470
11471
11472/** Opcode 0xdb !11/3. */
11473FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
11474{
11475 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
11476 IEM_MC_BEGIN(3, 2, 0, 0);
11477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11478 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11479
11480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11481 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11482 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11483 IEM_MC_PREPARE_FPU_USAGE();
11484
11485 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11486 IEM_MC_ARG(int32_t *, pi32Dst, 1);
11487 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11488
11489 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11490 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11491 IEM_MC_LOCAL(uint16_t, u16Fsw);
11492 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11493 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
11494 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11495 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11496 } IEM_MC_ELSE() {
11497 IEM_MC_IF_FCW_IM() {
11498 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
11499 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11500 } IEM_MC_ELSE() {
11501 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11502 } IEM_MC_ENDIF();
11503 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11504 } IEM_MC_ENDIF();
11505 IEM_MC_ADVANCE_RIP_AND_FINISH();
11506
11507 IEM_MC_END();
11508}
11509
11510
11511/** Opcode 0xdb !11/5. */
11512FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
11513{
11514 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
11515
11516 IEM_MC_BEGIN(2, 3, 0, 0);
11517 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11518 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11519 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
11520 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11521 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
11522
11523 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11525
11526 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11527 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11528 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11529
11530 IEM_MC_PREPARE_FPU_USAGE();
11531 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11532 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
11533 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11534 } IEM_MC_ELSE() {
11535 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11536 } IEM_MC_ENDIF();
11537 IEM_MC_ADVANCE_RIP_AND_FINISH();
11538
11539 IEM_MC_END();
11540}
11541
11542
11543/** Opcode 0xdb !11/7. */
11544FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
11545{
11546 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
11547 IEM_MC_BEGIN(3, 3, 0, 0);
11548 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11549 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11550
11551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11552 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11553 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11554 IEM_MC_PREPARE_FPU_USAGE();
11555
11556 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11557 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
11558 IEM_MC_MEM_MAP_R80_WO(pr80Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11559
11560 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11561 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11562 IEM_MC_LOCAL(uint16_t, u16Fsw);
11563 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11564 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
11565 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11566 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11567 } IEM_MC_ELSE() {
11568 IEM_MC_IF_FCW_IM() {
11569 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
11570 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11571 } IEM_MC_ELSE() {
11572 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11573 } IEM_MC_ENDIF();
11574 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11575 } IEM_MC_ENDIF();
11576 IEM_MC_ADVANCE_RIP_AND_FINISH();
11577
11578 IEM_MC_END();
11579}
11580
11581
11582/** Opcode 0xdb 11/0. */
11583FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
11584{
11585 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
11586 IEM_MC_BEGIN(0, 1, 0, 0);
11587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11588 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11589
11590 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11591 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11592
11593 IEM_MC_PREPARE_FPU_USAGE();
11594 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11595 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF) {
11596 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11597 } IEM_MC_ENDIF();
11598 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11599 } IEM_MC_ELSE() {
11600 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11601 } IEM_MC_ENDIF();
11602 IEM_MC_ADVANCE_RIP_AND_FINISH();
11603
11604 IEM_MC_END();
11605}
11606
11607
11608/** Opcode 0xdb 11/1. */
11609FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
11610{
11611 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
11612 IEM_MC_BEGIN(0, 1, 0, 0);
11613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11614 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11615
11616 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11617 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11618
11619 IEM_MC_PREPARE_FPU_USAGE();
11620 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11621 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11622 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11623 } IEM_MC_ENDIF();
11624 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11625 } IEM_MC_ELSE() {
11626 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11627 } IEM_MC_ENDIF();
11628 IEM_MC_ADVANCE_RIP_AND_FINISH();
11629
11630 IEM_MC_END();
11631}
11632
11633
11634/** Opcode 0xdb 11/2. */
11635FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
11636{
11637 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
11638 IEM_MC_BEGIN(0, 1, 0, 0);
11639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11640 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11641
11642 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11643 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11644
11645 IEM_MC_PREPARE_FPU_USAGE();
11646 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11647 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
11648 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11649 } IEM_MC_ENDIF();
11650 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11651 } IEM_MC_ELSE() {
11652 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11653 } IEM_MC_ENDIF();
11654 IEM_MC_ADVANCE_RIP_AND_FINISH();
11655
11656 IEM_MC_END();
11657}
11658
11659
11660/** Opcode 0xdb 11/3. */
11661FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
11662{
11663 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
11664 IEM_MC_BEGIN(0, 1, 0, 0);
11665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11666 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11667
11668 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11669 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11670
11671 IEM_MC_PREPARE_FPU_USAGE();
11672 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11673 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF) {
11674 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11675 } IEM_MC_ENDIF();
11676 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11677 } IEM_MC_ELSE() {
11678 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11679 } IEM_MC_ENDIF();
11680 IEM_MC_ADVANCE_RIP_AND_FINISH();
11681
11682 IEM_MC_END();
11683}
11684
11685
11686/** Opcode 0xdb 0xe0. */
11687FNIEMOP_DEF(iemOp_fneni)
11688{
11689 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
11690 IEM_MC_BEGIN(0, 0, 0, 0);
11691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11692 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11693 IEM_MC_ADVANCE_RIP_AND_FINISH();
11694 IEM_MC_END();
11695}
11696
11697
11698/** Opcode 0xdb 0xe1. */
11699FNIEMOP_DEF(iemOp_fndisi)
11700{
11701 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
11702 IEM_MC_BEGIN(0, 0, 0, 0);
11703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11704 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11705 IEM_MC_ADVANCE_RIP_AND_FINISH();
11706 IEM_MC_END();
11707}
11708
11709
11710/** Opcode 0xdb 0xe2. */
11711FNIEMOP_DEF(iemOp_fnclex)
11712{
11713 IEMOP_MNEMONIC(fnclex, "fnclex");
11714 IEM_MC_BEGIN(0, 0, 0, 0);
11715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11716 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11717 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11718 IEM_MC_CLEAR_FSW_EX();
11719 IEM_MC_ADVANCE_RIP_AND_FINISH();
11720 IEM_MC_END();
11721}
11722
11723
11724/** Opcode 0xdb 0xe3. */
11725FNIEMOP_DEF(iemOp_fninit)
11726{
11727 IEMOP_MNEMONIC(fninit, "fninit");
11728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11729 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
11730 iemCImpl_finit, false /*fCheckXcpts*/);
11731}
11732
11733
11734/** Opcode 0xdb 0xe4. */
11735FNIEMOP_DEF(iemOp_fnsetpm)
11736{
11737 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
11738 IEM_MC_BEGIN(0, 0, 0, 0);
11739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11740 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11741 IEM_MC_ADVANCE_RIP_AND_FINISH();
11742 IEM_MC_END();
11743}
11744
11745
11746/** Opcode 0xdb 0xe5. */
11747FNIEMOP_DEF(iemOp_frstpm)
11748{
11749 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
11750#if 0 /* #UDs on newer CPUs */
11751 IEM_MC_BEGIN(0, 0, 0, 0);
11752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11753 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11754 IEM_MC_ADVANCE_RIP_AND_FINISH();
11755 IEM_MC_END();
11756 return VINF_SUCCESS;
11757#else
11758 IEMOP_RAISE_INVALID_OPCODE_RET();
11759#endif
11760}
11761
11762
11763/** Opcode 0xdb 11/5. */
11764FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
11765{
11766 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
11767 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
11768 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), true /*fUCmp*/,
11769 0 /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11770}
11771
11772
11773/** Opcode 0xdb 11/6. */
11774FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
11775{
11776 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
11777 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
11778 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
11779 false /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11780}
11781
11782
11783/**
11784 * @opcode 0xdb
11785 */
11786FNIEMOP_DEF(iemOp_EscF3)
11787{
11788 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11789 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
11790 if (IEM_IS_MODRM_REG_MODE(bRm))
11791 {
11792 switch (IEM_GET_MODRM_REG_8(bRm))
11793 {
11794 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
11795 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
11796 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
11797 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
11798 case 4:
11799 switch (bRm)
11800 {
11801 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
11802 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
11803 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
11804 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
11805 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
11806 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
11807 case 0xe6: IEMOP_RAISE_INVALID_OPCODE_RET();
11808 case 0xe7: IEMOP_RAISE_INVALID_OPCODE_RET();
11809 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11810 }
11811 break;
11812 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
11813 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
11814 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11815 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11816 }
11817 }
11818 else
11819 {
11820 switch (IEM_GET_MODRM_REG_8(bRm))
11821 {
11822 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
11823 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
11824 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
11825 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
11826 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
11827 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
11828 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
11829 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
11830 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11831 }
11832 }
11833}
11834
11835
11836/**
11837 * Common worker for FPU instructions working on STn and ST0, and storing the
11838 * result in STn unless IE, DE or ZE was raised.
11839 *
11840 * @param bRm Mod R/M byte.
11841 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11842 */
11843FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
11844{
11845 IEM_MC_BEGIN(3, 1, 0, 0);
11846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11847 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11848 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11849 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11850 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11851
11852 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11853 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11854
11855 IEM_MC_PREPARE_FPU_USAGE();
11856 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
11857 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
11858 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11859 } IEM_MC_ELSE() {
11860 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11861 } IEM_MC_ENDIF();
11862 IEM_MC_ADVANCE_RIP_AND_FINISH();
11863
11864 IEM_MC_END();
11865}
11866
11867
11868/** Opcode 0xdc 11/0. */
11869FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
11870{
11871 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
11872 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
11873}
11874
11875
11876/** Opcode 0xdc 11/1. */
11877FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
11878{
11879 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
11880 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
11881}
11882
11883
11884/** Opcode 0xdc 11/4. */
11885FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
11886{
11887 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
11888 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
11889}
11890
11891
11892/** Opcode 0xdc 11/5. */
11893FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
11894{
11895 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
11896 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
11897}
11898
11899
11900/** Opcode 0xdc 11/6. */
11901FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
11902{
11903 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
11904 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
11905}
11906
11907
11908/** Opcode 0xdc 11/7. */
11909FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
11910{
11911 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
11912 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
11913}
11914
11915
11916/**
11917 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
11918 * memory operand, and storing the result in ST0.
11919 *
11920 * @param bRm Mod R/M byte.
11921 * @param pfnImpl Pointer to the instruction implementation (assembly).
11922 */
11923FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
11924{
11925 IEM_MC_BEGIN(3, 3, 0, 0);
11926 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11927 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11928 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
11929 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11930 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
11931 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
11932
11933 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11935 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11936 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11937
11938 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11939 IEM_MC_PREPARE_FPU_USAGE();
11940 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0) {
11941 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
11942 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11943 } IEM_MC_ELSE() {
11944 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11945 } IEM_MC_ENDIF();
11946 IEM_MC_ADVANCE_RIP_AND_FINISH();
11947
11948 IEM_MC_END();
11949}
11950
11951
11952/** Opcode 0xdc !11/0. */
11953FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
11954{
11955 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
11956 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
11957}
11958
11959
11960/** Opcode 0xdc !11/1. */
11961FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
11962{
11963 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
11964 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
11965}
11966
11967
11968/** Opcode 0xdc !11/2. */
11969FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
11970{
11971 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
11972
11973 IEM_MC_BEGIN(3, 3, 0, 0);
11974 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11975 IEM_MC_LOCAL(uint16_t, u16Fsw);
11976 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
11977 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11978 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11979 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
11980
11981 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11983
11984 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11985 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11986 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11987
11988 IEM_MC_PREPARE_FPU_USAGE();
11989 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11990 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
11991 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11992 } IEM_MC_ELSE() {
11993 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11994 } IEM_MC_ENDIF();
11995 IEM_MC_ADVANCE_RIP_AND_FINISH();
11996
11997 IEM_MC_END();
11998}
11999
12000
12001/** Opcode 0xdc !11/3. */
12002FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
12003{
12004 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
12005
12006 IEM_MC_BEGIN(3, 3, 0, 0);
12007 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12008 IEM_MC_LOCAL(uint16_t, u16Fsw);
12009 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
12010 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12011 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12012 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
12013
12014 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12016
12017 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12018 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12019 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12020
12021 IEM_MC_PREPARE_FPU_USAGE();
12022 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12023 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
12024 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12025 } IEM_MC_ELSE() {
12026 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12027 } IEM_MC_ENDIF();
12028 IEM_MC_ADVANCE_RIP_AND_FINISH();
12029
12030 IEM_MC_END();
12031}
12032
12033
12034/** Opcode 0xdc !11/4. */
12035FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
12036{
12037 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
12038 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
12039}
12040
12041
12042/** Opcode 0xdc !11/5. */
12043FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
12044{
12045 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
12046 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
12047}
12048
12049
12050/** Opcode 0xdc !11/6. */
12051FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
12052{
12053 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
12054 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
12055}
12056
12057
12058/** Opcode 0xdc !11/7. */
12059FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
12060{
12061 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
12062 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
12063}
12064
12065
12066/**
12067 * @opcode 0xdc
12068 */
12069FNIEMOP_DEF(iemOp_EscF4)
12070{
12071 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12072 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
12073 if (IEM_IS_MODRM_REG_MODE(bRm))
12074 {
12075 switch (IEM_GET_MODRM_REG_8(bRm))
12076 {
12077 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
12078 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
12079 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
12080 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
12081 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
12082 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
12083 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
12084 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
12085 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12086 }
12087 }
12088 else
12089 {
12090 switch (IEM_GET_MODRM_REG_8(bRm))
12091 {
12092 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
12093 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
12094 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
12095 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
12096 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
12097 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
12098 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
12099 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
12100 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12101 }
12102 }
12103}
12104
12105
12106/** Opcode 0xdd !11/0.
12107 * @sa iemOp_fld_m32r */
12108FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
12109{
12110 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
12111
12112 IEM_MC_BEGIN(2, 3, 0, 0);
12113 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12114 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12115 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
12116 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12117 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
12118
12119 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12121 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12122 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12123
12124 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12125 IEM_MC_PREPARE_FPU_USAGE();
12126 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12127 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r64, pFpuRes, pr64Val);
12128 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12129 } IEM_MC_ELSE() {
12130 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12131 } IEM_MC_ENDIF();
12132 IEM_MC_ADVANCE_RIP_AND_FINISH();
12133
12134 IEM_MC_END();
12135}
12136
12137
12138/** Opcode 0xdd !11/0. */
12139FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
12140{
12141 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
12142 IEM_MC_BEGIN(3, 3, 0, 0);
12143 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12144 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12145
12146 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12147 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12148 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12149 IEM_MC_PREPARE_FPU_USAGE();
12150
12151 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12152 IEM_MC_ARG(int64_t *, pi64Dst, 1);
12153 IEM_MC_MEM_MAP_I64_WO(pi64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12154
12155 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12156 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12157 IEM_MC_LOCAL(uint16_t, u16Fsw);
12158 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12159 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
12160 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12161 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12162 } IEM_MC_ELSE() {
12163 IEM_MC_IF_FCW_IM() {
12164 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
12165 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12166 } IEM_MC_ELSE() {
12167 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12168 } IEM_MC_ENDIF();
12169 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12170 } IEM_MC_ENDIF();
12171 IEM_MC_ADVANCE_RIP_AND_FINISH();
12172
12173 IEM_MC_END();
12174}
12175
12176
12177/** Opcode 0xdd !11/0. */
12178FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
12179{
12180 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
12181 IEM_MC_BEGIN(3, 3, 0, 0);
12182 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12183 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12184
12185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12186 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12187 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12188 IEM_MC_PREPARE_FPU_USAGE();
12189
12190 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12191 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
12192 IEM_MC_MEM_MAP_R64_WO(pr64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12193
12194 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12195 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12196 IEM_MC_LOCAL(uint16_t, u16Fsw);
12197 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12198 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
12199 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12200 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12201 } IEM_MC_ELSE() {
12202 IEM_MC_IF_FCW_IM() {
12203 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
12204 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12205 } IEM_MC_ELSE() {
12206 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12207 } IEM_MC_ENDIF();
12208 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12209 } IEM_MC_ENDIF();
12210 IEM_MC_ADVANCE_RIP_AND_FINISH();
12211
12212 IEM_MC_END();
12213}
12214
12215
12216
12217
12218/** Opcode 0xdd !11/0. */
12219FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
12220{
12221 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
12222 IEM_MC_BEGIN(3, 3, 0, 0);
12223 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12224 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12225
12226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12227 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12228 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12229 IEM_MC_PREPARE_FPU_USAGE();
12230
12231 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12232 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
12233 IEM_MC_MEM_MAP_R64_WO(pr64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12234
12235 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12236 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12237 IEM_MC_LOCAL(uint16_t, u16Fsw);
12238 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12239 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
12240 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12241 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12242 } IEM_MC_ELSE() {
12243 IEM_MC_IF_FCW_IM() {
12244 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
12245 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12246 } IEM_MC_ELSE() {
12247 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12248 } IEM_MC_ENDIF();
12249 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12250 } IEM_MC_ENDIF();
12251 IEM_MC_ADVANCE_RIP_AND_FINISH();
12252
12253 IEM_MC_END();
12254}
12255
12256
12257/** Opcode 0xdd !11/0. */
12258FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
12259{
12260 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
12261 IEM_MC_BEGIN(3, 0, 0, 0);
12262 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
12263 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12264
12265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12266 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12267 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12268
12269 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
12270 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
12271 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
12272 iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
12273 IEM_MC_END();
12274}
12275
12276
12277/** Opcode 0xdd !11/0. */
12278FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
12279{
12280 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
12281 IEM_MC_BEGIN(3, 0, 0, 0);
12282 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
12283 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12284
12285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12286 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12287 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); /* Note! Implicit fninit after the save, do not use FOR_READ here! */
12288
12289 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
12290 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
12291 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
12292 iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
12293 IEM_MC_END();
12294}
12295
12296/** Opcode 0xdd !11/0. */
12297FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
12298{
12299 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
12300
12301 IEM_MC_BEGIN(0, 2, 0, 0);
12302 IEM_MC_LOCAL(uint16_t, u16Tmp);
12303 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12304
12305 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12307 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12308
12309 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
12310 IEM_MC_FETCH_FSW(u16Tmp);
12311 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
12312 IEM_MC_ADVANCE_RIP_AND_FINISH();
12313
12314/** @todo Debug / drop a hint to the verifier that things may differ
12315 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
12316 * NT4SP1. (X86_FSW_PE) */
12317 IEM_MC_END();
12318}
12319
12320
12321/** Opcode 0xdd 11/0. */
12322FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
12323{
12324 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
12325 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
12326 unmodified. */
12327 IEM_MC_BEGIN(0, 0, 0, 0);
12328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12329
12330 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12331 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12332
12333 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12334 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
12335 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12336
12337 IEM_MC_ADVANCE_RIP_AND_FINISH();
12338 IEM_MC_END();
12339}
12340
12341
12342/** Opcode 0xdd 11/1. */
12343FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
12344{
12345 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
12346 IEM_MC_BEGIN(0, 2, 0, 0);
12347 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12348 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
12349 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12350 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12351 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12352
12353 IEM_MC_PREPARE_FPU_USAGE();
12354 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12355 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
12356 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
12357 } IEM_MC_ELSE() {
12358 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
12359 } IEM_MC_ENDIF();
12360
12361 IEM_MC_ADVANCE_RIP_AND_FINISH();
12362 IEM_MC_END();
12363}
12364
12365
12366/** Opcode 0xdd 11/3. */
12367FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
12368{
12369 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
12370 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
12371}
12372
12373
12374/** Opcode 0xdd 11/4. */
12375FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
12376{
12377 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
12378 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
12379}
12380
12381
12382/**
12383 * @opcode 0xdd
12384 */
12385FNIEMOP_DEF(iemOp_EscF5)
12386{
12387 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12388 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
12389 if (IEM_IS_MODRM_REG_MODE(bRm))
12390 {
12391 switch (IEM_GET_MODRM_REG_8(bRm))
12392 {
12393 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
12394 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
12395 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
12396 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
12397 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
12398 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
12399 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
12400 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
12401 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12402 }
12403 }
12404 else
12405 {
12406 switch (IEM_GET_MODRM_REG_8(bRm))
12407 {
12408 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
12409 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
12410 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
12411 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
12412 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
12413 case 5: IEMOP_RAISE_INVALID_OPCODE_RET();
12414 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
12415 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
12416 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12417 }
12418 }
12419}
12420
12421
12422/** Opcode 0xde 11/0. */
12423FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
12424{
12425 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
12426 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
12427}
12428
12429
12430/** Opcode 0xde 11/0. */
12431FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
12432{
12433 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
12434 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
12435}
12436
12437
12438/** Opcode 0xde 0xd9. */
12439FNIEMOP_DEF(iemOp_fcompp)
12440{
12441 IEMOP_MNEMONIC(fcompp, "fcompp");
12442 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fcom_r80_by_r80);
12443}
12444
12445
12446/** Opcode 0xde 11/4. */
12447FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
12448{
12449 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
12450 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
12451}
12452
12453
12454/** Opcode 0xde 11/5. */
12455FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
12456{
12457 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
12458 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
12459}
12460
12461
12462/** Opcode 0xde 11/6. */
12463FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
12464{
12465 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
12466 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
12467}
12468
12469
12470/** Opcode 0xde 11/7. */
12471FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
12472{
12473 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
12474 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
12475}
12476
12477
12478/**
12479 * Common worker for FPU instructions working on ST0 and an m16i, and storing
12480 * the result in ST0.
12481 *
12482 * @param bRm Mod R/M byte.
12483 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12484 */
12485FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
12486{
12487 IEM_MC_BEGIN(3, 3, 0, 0);
12488 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12489 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12490 IEM_MC_LOCAL(int16_t, i16Val2);
12491 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12492 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12493 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
12494
12495 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12497
12498 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12499 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12500 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12501
12502 IEM_MC_PREPARE_FPU_USAGE();
12503 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12504 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
12505 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
12506 } IEM_MC_ELSE() {
12507 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
12508 } IEM_MC_ENDIF();
12509 IEM_MC_ADVANCE_RIP_AND_FINISH();
12510
12511 IEM_MC_END();
12512}
12513
12514
12515/** Opcode 0xde !11/0. */
12516FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
12517{
12518 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
12519 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
12520}
12521
12522
12523/** Opcode 0xde !11/1. */
12524FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
12525{
12526 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
12527 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
12528}
12529
12530
12531/** Opcode 0xde !11/2. */
12532FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
12533{
12534 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
12535
12536 IEM_MC_BEGIN(3, 3, 0, 0);
12537 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12538 IEM_MC_LOCAL(uint16_t, u16Fsw);
12539 IEM_MC_LOCAL(int16_t, i16Val2);
12540 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12541 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12542 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
12543
12544 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12546
12547 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12548 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12549 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12550
12551 IEM_MC_PREPARE_FPU_USAGE();
12552 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12553 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
12554 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12555 } IEM_MC_ELSE() {
12556 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12557 } IEM_MC_ENDIF();
12558 IEM_MC_ADVANCE_RIP_AND_FINISH();
12559
12560 IEM_MC_END();
12561}
12562
12563
12564/** Opcode 0xde !11/3. */
12565FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
12566{
12567 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
12568
12569 IEM_MC_BEGIN(3, 3, 0, 0);
12570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12571 IEM_MC_LOCAL(uint16_t, u16Fsw);
12572 IEM_MC_LOCAL(int16_t, i16Val2);
12573 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12574 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12575 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
12576
12577 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12579
12580 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12581 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12582 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12583
12584 IEM_MC_PREPARE_FPU_USAGE();
12585 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12586 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
12587 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12588 } IEM_MC_ELSE() {
12589 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12590 } IEM_MC_ENDIF();
12591 IEM_MC_ADVANCE_RIP_AND_FINISH();
12592
12593 IEM_MC_END();
12594}
12595
12596
12597/** Opcode 0xde !11/4. */
12598FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
12599{
12600 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
12601 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
12602}
12603
12604
12605/** Opcode 0xde !11/5. */
12606FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
12607{
12608 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
12609 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
12610}
12611
12612
12613/** Opcode 0xde !11/6. */
12614FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
12615{
12616 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
12617 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
12618}
12619
12620
12621/** Opcode 0xde !11/7. */
12622FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
12623{
12624 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
12625 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
12626}
12627
12628
12629/**
12630 * @opcode 0xde
12631 */
12632FNIEMOP_DEF(iemOp_EscF6)
12633{
12634 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12635 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
12636 if (IEM_IS_MODRM_REG_MODE(bRm))
12637 {
12638 switch (IEM_GET_MODRM_REG_8(bRm))
12639 {
12640 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
12641 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
12642 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
12643 case 3: if (bRm == 0xd9)
12644 return FNIEMOP_CALL(iemOp_fcompp);
12645 IEMOP_RAISE_INVALID_OPCODE_RET();
12646 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
12647 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
12648 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
12649 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
12650 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12651 }
12652 }
12653 else
12654 {
12655 switch (IEM_GET_MODRM_REG_8(bRm))
12656 {
12657 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
12658 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
12659 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
12660 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
12661 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
12662 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
12663 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
12664 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
12665 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12666 }
12667 }
12668}
12669
12670
12671/** Opcode 0xdf 11/0.
12672 * Undocument instruction, assumed to work like ffree + fincstp. */
12673FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
12674{
12675 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
12676 IEM_MC_BEGIN(0, 0, 0, 0);
12677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12678
12679 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12680 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12681
12682 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12683 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
12684 IEM_MC_FPU_STACK_INC_TOP();
12685 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12686
12687 IEM_MC_ADVANCE_RIP_AND_FINISH();
12688 IEM_MC_END();
12689}
12690
12691
12692/** Opcode 0xdf 0xe0. */
12693FNIEMOP_DEF(iemOp_fnstsw_ax)
12694{
12695 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
12696 IEM_MC_BEGIN(0, 1, 0, 0);
12697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12698 IEM_MC_LOCAL(uint16_t, u16Tmp);
12699 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12700 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
12701 IEM_MC_FETCH_FSW(u16Tmp);
12702 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
12703 IEM_MC_ADVANCE_RIP_AND_FINISH();
12704 IEM_MC_END();
12705}
12706
12707
12708/** Opcode 0xdf 11/5. */
12709FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
12710{
12711 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
12712 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
12713 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
12714 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
12715}
12716
12717
12718/** Opcode 0xdf 11/6. */
12719FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
12720{
12721 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
12722 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
12723 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
12724 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
12725}
12726
12727
12728/** Opcode 0xdf !11/0. */
12729FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
12730{
12731 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
12732
12733 IEM_MC_BEGIN(2, 3, 0, 0);
12734 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12735 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12736 IEM_MC_LOCAL(int16_t, i16Val);
12737 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12738 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
12739
12740 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12742
12743 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12744 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12745 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12746
12747 IEM_MC_PREPARE_FPU_USAGE();
12748 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12749 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i16, pFpuRes, pi16Val);
12750 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12751 } IEM_MC_ELSE() {
12752 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12753 } IEM_MC_ENDIF();
12754 IEM_MC_ADVANCE_RIP_AND_FINISH();
12755
12756 IEM_MC_END();
12757}
12758
12759
12760/** Opcode 0xdf !11/1. */
12761FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
12762{
12763 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
12764 IEM_MC_BEGIN(3, 3, 0, 0);
12765 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12766 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12767
12768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12769 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12770 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12771 IEM_MC_PREPARE_FPU_USAGE();
12772
12773 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12774 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12775 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12776
12777 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12778 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12779 IEM_MC_LOCAL(uint16_t, u16Fsw);
12780 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12781 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12782 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12783 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12784 } IEM_MC_ELSE() {
12785 IEM_MC_IF_FCW_IM() {
12786 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12787 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12788 } IEM_MC_ELSE() {
12789 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12790 } IEM_MC_ENDIF();
12791 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12792 } IEM_MC_ENDIF();
12793 IEM_MC_ADVANCE_RIP_AND_FINISH();
12794
12795 IEM_MC_END();
12796}
12797
12798
12799/** Opcode 0xdf !11/2. */
12800FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
12801{
12802 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
12803 IEM_MC_BEGIN(3, 3, 0, 0);
12804 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12805 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12806
12807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12808 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12809 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12810 IEM_MC_PREPARE_FPU_USAGE();
12811
12812 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12813 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12814 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12815
12816 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12817 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12818 IEM_MC_LOCAL(uint16_t, u16Fsw);
12819 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12820 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12821 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12822 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12823 } IEM_MC_ELSE() {
12824 IEM_MC_IF_FCW_IM() {
12825 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12826 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12827 } IEM_MC_ELSE() {
12828 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12829 } IEM_MC_ENDIF();
12830 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12831 } IEM_MC_ENDIF();
12832 IEM_MC_ADVANCE_RIP_AND_FINISH();
12833
12834 IEM_MC_END();
12835}
12836
12837
12838/** Opcode 0xdf !11/3. */
12839FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
12840{
12841 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
12842 IEM_MC_BEGIN(3, 3, 0, 0);
12843 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12844 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12845
12846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12847 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12848 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12849 IEM_MC_PREPARE_FPU_USAGE();
12850
12851 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12852 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12853 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12854
12855 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12856 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12857 IEM_MC_LOCAL(uint16_t, u16Fsw);
12858 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12859 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12860 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12861 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12862 } IEM_MC_ELSE() {
12863 IEM_MC_IF_FCW_IM() {
12864 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12865 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12866 } IEM_MC_ELSE() {
12867 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12868 } IEM_MC_ENDIF();
12869 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12870 } IEM_MC_ENDIF();
12871 IEM_MC_ADVANCE_RIP_AND_FINISH();
12872
12873 IEM_MC_END();
12874}
12875
12876
12877/** Opcode 0xdf !11/4. */
12878FNIEMOP_DEF_1(iemOp_fbld_m80d, uint8_t, bRm)
12879{
12880 IEMOP_MNEMONIC(fbld_m80d, "fbld m80d");
12881
12882 IEM_MC_BEGIN(2, 3, 0, 0);
12883 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12884 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12885 IEM_MC_LOCAL(RTPBCD80U, d80Val);
12886 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12887 IEM_MC_ARG_LOCAL_REF(PCRTPBCD80U, pd80Val, d80Val, 1);
12888
12889 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12891
12892 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12893 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12894 IEM_MC_FETCH_MEM_D80(d80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12895
12896 IEM_MC_PREPARE_FPU_USAGE();
12897 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12898 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_d80, pFpuRes, pd80Val);
12899 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12900 } IEM_MC_ELSE() {
12901 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12902 } IEM_MC_ENDIF();
12903 IEM_MC_ADVANCE_RIP_AND_FINISH();
12904
12905 IEM_MC_END();
12906}
12907
12908
12909/** Opcode 0xdf !11/5. */
12910FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
12911{
12912 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
12913
12914 IEM_MC_BEGIN(2, 3, 0, 0);
12915 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12916 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12917 IEM_MC_LOCAL(int64_t, i64Val);
12918 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12919 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
12920
12921 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12923
12924 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12925 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12926 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12927
12928 IEM_MC_PREPARE_FPU_USAGE();
12929 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12930 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i64, pFpuRes, pi64Val);
12931 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12932 } IEM_MC_ELSE() {
12933 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12934 } IEM_MC_ENDIF();
12935 IEM_MC_ADVANCE_RIP_AND_FINISH();
12936
12937 IEM_MC_END();
12938}
12939
12940
12941/** Opcode 0xdf !11/6. */
12942FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
12943{
12944 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
12945 IEM_MC_BEGIN(3, 3, 0, 0);
12946 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12947 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12948
12949 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12950 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12951 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12952 IEM_MC_PREPARE_FPU_USAGE();
12953
12954 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12955 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
12956 IEM_MC_MEM_MAP_D80_WO(pd80Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12957
12958 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12959 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12960 IEM_MC_LOCAL(uint16_t, u16Fsw);
12961 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12962 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
12963 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12964 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12965 } IEM_MC_ELSE() {
12966 IEM_MC_IF_FCW_IM() {
12967 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
12968 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12969 } IEM_MC_ELSE() {
12970 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12971 } IEM_MC_ENDIF();
12972 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12973 } IEM_MC_ENDIF();
12974 IEM_MC_ADVANCE_RIP_AND_FINISH();
12975
12976 IEM_MC_END();
12977}
12978
12979
12980/** Opcode 0xdf !11/7. */
12981FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
12982{
12983 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
12984 IEM_MC_BEGIN(3, 3, 0, 0);
12985 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12986 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12987
12988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12989 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12990 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12991 IEM_MC_PREPARE_FPU_USAGE();
12992
12993 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12994 IEM_MC_ARG(int64_t *, pi64Dst, 1);
12995 IEM_MC_MEM_MAP_I64_WO(pi64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12996
12997 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12998 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12999 IEM_MC_LOCAL(uint16_t, u16Fsw);
13000 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
13001 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
13002 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
13003 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13004 } IEM_MC_ELSE() {
13005 IEM_MC_IF_FCW_IM() {
13006 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
13007 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
13008 } IEM_MC_ELSE() {
13009 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
13010 } IEM_MC_ENDIF();
13011 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13012 } IEM_MC_ENDIF();
13013 IEM_MC_ADVANCE_RIP_AND_FINISH();
13014
13015 IEM_MC_END();
13016}
13017
13018
13019/**
13020 * @opcode 0xdf
13021 */
13022FNIEMOP_DEF(iemOp_EscF7)
13023{
13024 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13025 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdf & 0x7);
13026 if (IEM_IS_MODRM_REG_MODE(bRm))
13027 {
13028 switch (IEM_GET_MODRM_REG_8(bRm))
13029 {
13030 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
13031 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
13032 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
13033 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
13034 case 4: if (bRm == 0xe0)
13035 return FNIEMOP_CALL(iemOp_fnstsw_ax);
13036 IEMOP_RAISE_INVALID_OPCODE_RET();
13037 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
13038 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
13039 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
13040 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13041 }
13042 }
13043 else
13044 {
13045 switch (IEM_GET_MODRM_REG_8(bRm))
13046 {
13047 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
13048 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
13049 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
13050 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
13051 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
13052 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
13053 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
13054 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
13055 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13056 }
13057 }
13058}
13059
13060
13061/**
13062 * @opcode 0xe0
13063 * @opfltest zf
13064 */
13065FNIEMOP_DEF(iemOp_loopne_Jb)
13066{
13067 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
13068 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13069 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13070
13071 switch (pVCpu->iem.s.enmEffAddrMode)
13072 {
13073 case IEMMODE_16BIT:
13074 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13076 IEM_MC_IF_CX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
13077 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13078 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13079 } IEM_MC_ELSE() {
13080 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13081 IEM_MC_ADVANCE_RIP_AND_FINISH();
13082 } IEM_MC_ENDIF();
13083 IEM_MC_END();
13084 break;
13085
13086 case IEMMODE_32BIT:
13087 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13089 IEM_MC_IF_ECX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
13090 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13091 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13092 } IEM_MC_ELSE() {
13093 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13094 IEM_MC_ADVANCE_RIP_AND_FINISH();
13095 } IEM_MC_ENDIF();
13096 IEM_MC_END();
13097 break;
13098
13099 case IEMMODE_64BIT:
13100 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13102 IEM_MC_IF_RCX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
13103 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13104 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13105 } IEM_MC_ELSE() {
13106 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13107 IEM_MC_ADVANCE_RIP_AND_FINISH();
13108 } IEM_MC_ENDIF();
13109 IEM_MC_END();
13110 break;
13111
13112 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13113 }
13114}
13115
13116
13117/**
13118 * @opcode 0xe1
13119 * @opfltest zf
13120 */
13121FNIEMOP_DEF(iemOp_loope_Jb)
13122{
13123 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
13124 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13125 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13126
13127 switch (pVCpu->iem.s.enmEffAddrMode)
13128 {
13129 case IEMMODE_16BIT:
13130 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13131 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13132 IEM_MC_IF_CX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
13133 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13134 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13135 } IEM_MC_ELSE() {
13136 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13137 IEM_MC_ADVANCE_RIP_AND_FINISH();
13138 } IEM_MC_ENDIF();
13139 IEM_MC_END();
13140 break;
13141
13142 case IEMMODE_32BIT:
13143 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13145 IEM_MC_IF_ECX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
13146 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13147 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13148 } IEM_MC_ELSE() {
13149 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13150 IEM_MC_ADVANCE_RIP_AND_FINISH();
13151 } IEM_MC_ENDIF();
13152 IEM_MC_END();
13153 break;
13154
13155 case IEMMODE_64BIT:
13156 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13158 IEM_MC_IF_RCX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
13159 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13160 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13161 } IEM_MC_ELSE() {
13162 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13163 IEM_MC_ADVANCE_RIP_AND_FINISH();
13164 } IEM_MC_ENDIF();
13165 IEM_MC_END();
13166 break;
13167
13168 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13169 }
13170}
13171
13172
13173/**
13174 * @opcode 0xe2
13175 */
13176FNIEMOP_DEF(iemOp_loop_Jb)
13177{
13178 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
13179 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13180 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13181
13182 /** @todo Check out the \#GP case if EIP < CS.Base or EIP > CS.Limit when
13183 * using the 32-bit operand size override. How can that be restarted? See
13184 * weird pseudo code in intel manual. */
13185
13186 /* NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
13187 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
13188 * the loop causes guest crashes, but when logging it's nice to skip a few million
13189 * lines of useless output. */
13190#if defined(LOG_ENABLED)
13191 if ((LogIs3Enabled() || LogIs4Enabled()) && -(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm)
13192 switch (pVCpu->iem.s.enmEffAddrMode)
13193 {
13194 case IEMMODE_16BIT:
13195 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13197 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
13198 IEM_MC_ADVANCE_RIP_AND_FINISH();
13199 IEM_MC_END();
13200 break;
13201
13202 case IEMMODE_32BIT:
13203 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13205 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
13206 IEM_MC_ADVANCE_RIP_AND_FINISH();
13207 IEM_MC_END();
13208 break;
13209
13210 case IEMMODE_64BIT:
13211 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13213 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
13214 IEM_MC_ADVANCE_RIP_AND_FINISH();
13215 IEM_MC_END();
13216 break;
13217
13218 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13219 }
13220#endif
13221
13222 switch (pVCpu->iem.s.enmEffAddrMode)
13223 {
13224 case IEMMODE_16BIT:
13225 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13227 IEM_MC_IF_CX_IS_NOT_ONE() {
13228 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13229 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13230 } IEM_MC_ELSE() {
13231 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
13232 IEM_MC_ADVANCE_RIP_AND_FINISH();
13233 } IEM_MC_ENDIF();
13234 IEM_MC_END();
13235 break;
13236
13237 case IEMMODE_32BIT:
13238 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13240 IEM_MC_IF_ECX_IS_NOT_ONE() {
13241 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13242 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13243 } IEM_MC_ELSE() {
13244 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
13245 IEM_MC_ADVANCE_RIP_AND_FINISH();
13246 } IEM_MC_ENDIF();
13247 IEM_MC_END();
13248 break;
13249
13250 case IEMMODE_64BIT:
13251 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13253 IEM_MC_IF_RCX_IS_NOT_ONE() {
13254 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13255 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13256 } IEM_MC_ELSE() {
13257 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
13258 IEM_MC_ADVANCE_RIP_AND_FINISH();
13259 } IEM_MC_ENDIF();
13260 IEM_MC_END();
13261 break;
13262
13263 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13264 }
13265}
13266
13267
13268/**
13269 * @opcode 0xe3
13270 */
13271FNIEMOP_DEF(iemOp_jecxz_Jb)
13272{
13273 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
13274 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13275 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13276
13277 switch (pVCpu->iem.s.enmEffAddrMode)
13278 {
13279 case IEMMODE_16BIT:
13280 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13282 IEM_MC_IF_CX_IS_NZ() {
13283 IEM_MC_ADVANCE_RIP_AND_FINISH();
13284 } IEM_MC_ELSE() {
13285 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13286 } IEM_MC_ENDIF();
13287 IEM_MC_END();
13288 break;
13289
13290 case IEMMODE_32BIT:
13291 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13292 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13293 IEM_MC_IF_ECX_IS_NZ() {
13294 IEM_MC_ADVANCE_RIP_AND_FINISH();
13295 } IEM_MC_ELSE() {
13296 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13297 } IEM_MC_ENDIF();
13298 IEM_MC_END();
13299 break;
13300
13301 case IEMMODE_64BIT:
13302 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13304 IEM_MC_IF_RCX_IS_NZ() {
13305 IEM_MC_ADVANCE_RIP_AND_FINISH();
13306 } IEM_MC_ELSE() {
13307 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13308 } IEM_MC_ENDIF();
13309 IEM_MC_END();
13310 break;
13311
13312 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13313 }
13314}
13315
13316
13317/**
13318 * @opcode 0xe4
13319 * @opfltest iopl
13320 */
13321FNIEMOP_DEF(iemOp_in_AL_Ib)
13322{
13323 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
13324 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13326 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13327 iemCImpl_in, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13328}
13329
13330
13331/**
13332 * @opcode 0xe5
13333 * @opfltest iopl
13334 */
13335FNIEMOP_DEF(iemOp_in_eAX_Ib)
13336{
13337 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
13338 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13340 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13341 iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
13342 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13343}
13344
13345
13346/**
13347 * @opcode 0xe6
13348 * @opfltest iopl
13349 */
13350FNIEMOP_DEF(iemOp_out_Ib_AL)
13351{
13352 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
13353 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13355 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
13356 iemCImpl_out, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13357}
13358
13359
13360/**
13361 * @opcode 0xe7
13362 * @opfltest iopl
13363 */
13364FNIEMOP_DEF(iemOp_out_Ib_eAX)
13365{
13366 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
13367 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13368 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13369 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
13370 iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
13371 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13372}
13373
13374
13375/**
13376 * @opcode 0xe8
13377 */
13378FNIEMOP_DEF(iemOp_call_Jv)
13379{
13380 IEMOP_MNEMONIC(call_Jv, "call Jv");
13381 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13382 switch (pVCpu->iem.s.enmEffOpSize)
13383 {
13384 case IEMMODE_16BIT:
13385 {
13386 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13387 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
13388 iemCImpl_call_rel_16, (int16_t)u16Imm);
13389 }
13390
13391 case IEMMODE_32BIT:
13392 {
13393 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13394 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
13395 iemCImpl_call_rel_32, (int32_t)u32Imm);
13396 }
13397
13398 case IEMMODE_64BIT:
13399 {
13400 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13401 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
13402 iemCImpl_call_rel_64, u64Imm);
13403 }
13404
13405 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13406 }
13407}
13408
13409
13410/**
13411 * @opcode 0xe9
13412 */
13413FNIEMOP_DEF(iemOp_jmp_Jv)
13414{
13415 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
13416 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13417 switch (pVCpu->iem.s.enmEffOpSize)
13418 {
13419 case IEMMODE_16BIT:
13420 IEM_MC_BEGIN(0, 0, 0, 0);
13421 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
13422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13423 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
13424 IEM_MC_END();
13425 break;
13426
13427 case IEMMODE_64BIT:
13428 case IEMMODE_32BIT:
13429 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13430 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
13431 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13432 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
13433 IEM_MC_END();
13434 break;
13435
13436 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13437 }
13438}
13439
13440
13441/**
13442 * @opcode 0xea
13443 */
13444FNIEMOP_DEF(iemOp_jmp_Ap)
13445{
13446 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
13447 IEMOP_HLP_NO_64BIT();
13448
13449 /* Decode the far pointer address and pass it on to the far call C implementation. */
13450 uint32_t off32Seg;
13451 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
13452 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
13453 else
13454 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
13455 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
13456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13457 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
13458 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, UINT64_MAX,
13459 iemCImpl_FarJmp, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
13460 /** @todo make task-switches, ring-switches, ++ return non-zero status */
13461}
13462
13463
13464/**
13465 * @opcode 0xeb
13466 */
13467FNIEMOP_DEF(iemOp_jmp_Jb)
13468{
13469 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
13470 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13471 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13472
13473 IEM_MC_BEGIN(0, 0, 0, 0);
13474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13475 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13476 IEM_MC_END();
13477}
13478
13479
13480/**
13481 * @opcode 0xec
13482 * @opfltest iopl
13483 */
13484FNIEMOP_DEF(iemOp_in_AL_DX)
13485{
13486 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
13487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13488 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
13489 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13490 iemCImpl_in_eAX_DX, 1, pVCpu->iem.s.enmEffAddrMode);
13491}
13492
13493
13494/**
13495 * @opcode 0xed
13496 * @opfltest iopl
13497 */
13498FNIEMOP_DEF(iemOp_in_eAX_DX)
13499{
13500 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
13501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13502 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
13503 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13504 iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
13505 pVCpu->iem.s.enmEffAddrMode);
13506}
13507
13508
13509/**
13510 * @opcode 0xee
13511 * @opfltest iopl
13512 */
13513FNIEMOP_DEF(iemOp_out_DX_AL)
13514{
13515 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
13516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13517 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
13518 iemCImpl_out_DX_eAX, 1, pVCpu->iem.s.enmEffAddrMode);
13519}
13520
13521
13522/**
13523 * @opcode 0xef
13524 * @opfltest iopl
13525 */
13526FNIEMOP_DEF(iemOp_out_DX_eAX)
13527{
13528 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
13529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13530 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
13531 iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
13532 pVCpu->iem.s.enmEffAddrMode);
13533}
13534
13535
13536/**
13537 * @opcode 0xf0
13538 */
13539FNIEMOP_DEF(iemOp_lock)
13540{
13541 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
13542 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
13543
13544 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
13545 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
13546}
13547
13548
13549/**
13550 * @opcode 0xf1
13551 */
13552FNIEMOP_DEF(iemOp_int1)
13553{
13554 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
13555 /** @todo Does not generate \#UD on 286, or so they say... Was allegedly a
13556 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
13557 * LOADALL memo. Needs some testing. */
13558 IEMOP_HLP_MIN_386();
13559 /** @todo testcase! */
13560 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
13561 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
13562 iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
13563}
13564
13565
13566/**
13567 * @opcode 0xf2
13568 */
13569FNIEMOP_DEF(iemOp_repne)
13570{
13571 /* This overrides any previous REPE prefix. */
13572 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
13573 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
13574 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
13575
13576 /* For the 4 entry opcode tables, REPNZ overrides any previous
13577 REPZ and operand size prefixes. */
13578 pVCpu->iem.s.idxPrefix = 3;
13579
13580 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
13581 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
13582}
13583
13584
13585/**
13586 * @opcode 0xf3
13587 */
13588FNIEMOP_DEF(iemOp_repe)
13589{
13590 /* This overrides any previous REPNE prefix. */
13591 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
13592 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
13593 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
13594
13595 /* For the 4 entry opcode tables, REPNZ overrides any previous
13596 REPNZ and operand size prefixes. */
13597 pVCpu->iem.s.idxPrefix = 2;
13598
13599 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
13600 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
13601}
13602
13603
13604/**
13605 * @opcode 0xf4
13606 */
13607FNIEMOP_DEF(iemOp_hlt)
13608{
13609 IEMOP_MNEMONIC(hlt, "hlt");
13610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13611 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, 0, iemCImpl_hlt);
13612}
13613
13614
13615/**
13616 * @opcode 0xf5
13617 * @opflmodify cf
13618 */
13619FNIEMOP_DEF(iemOp_cmc)
13620{
13621 IEMOP_MNEMONIC(cmc, "cmc");
13622 IEM_MC_BEGIN(0, 0, 0, 0);
13623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13624 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
13625 IEM_MC_ADVANCE_RIP_AND_FINISH();
13626 IEM_MC_END();
13627}
13628
13629
13630/**
13631 * Body for of 'inc/dec/not/neg Eb'.
13632 */
13633#define IEMOP_BODY_UNARY_Eb(a_bRm, a_fnNormalU8, a_fnLockedU8) \
13634 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
13635 { \
13636 /* register access */ \
13637 IEM_MC_BEGIN(2, 0, 0, 0); \
13638 IEMOP_HLP_DONE_DECODING(); \
13639 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
13640 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13641 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
13642 IEM_MC_REF_EFLAGS(pEFlags); \
13643 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
13644 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13645 IEM_MC_END(); \
13646 } \
13647 else \
13648 { \
13649 /* memory access. */ \
13650 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
13651 { \
13652 IEM_MC_BEGIN(2, 2, 0, 0); \
13653 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
13654 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13655 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13656 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13657 \
13658 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
13659 IEMOP_HLP_DONE_DECODING(); \
13660 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13661 IEM_MC_FETCH_EFLAGS(EFlags); \
13662 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
13663 \
13664 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13665 IEM_MC_COMMIT_EFLAGS(EFlags); \
13666 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13667 IEM_MC_END(); \
13668 } \
13669 else \
13670 { \
13671 IEM_MC_BEGIN(2, 2, 0, 0); \
13672 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
13673 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13674 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13675 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13676 \
13677 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
13678 IEMOP_HLP_DONE_DECODING(); \
13679 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13680 IEM_MC_FETCH_EFLAGS(EFlags); \
13681 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU8, pu8Dst, pEFlags); \
13682 \
13683 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
13684 IEM_MC_COMMIT_EFLAGS(EFlags); \
13685 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13686 IEM_MC_END(); \
13687 } \
13688 } \
13689 (void)0
13690
13691
13692/**
13693 * Body for 'inc/dec/not/neg Ev' (groups 3 and 5).
13694 */
13695#define IEMOP_BODY_UNARY_Ev(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
13696 if (IEM_IS_MODRM_REG_MODE(bRm)) \
13697 { \
13698 /* \
13699 * Register target \
13700 */ \
13701 switch (pVCpu->iem.s.enmEffOpSize) \
13702 { \
13703 case IEMMODE_16BIT: \
13704 IEM_MC_BEGIN(2, 0, 0, 0); \
13705 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13706 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13707 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13708 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13709 IEM_MC_REF_EFLAGS(pEFlags); \
13710 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
13711 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13712 IEM_MC_END(); \
13713 break; \
13714 \
13715 case IEMMODE_32BIT: \
13716 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0); \
13717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13718 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13719 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13720 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13721 IEM_MC_REF_EFLAGS(pEFlags); \
13722 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
13723 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
13724 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13725 IEM_MC_END(); \
13726 break; \
13727 \
13728 case IEMMODE_64BIT: \
13729 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0); \
13730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13731 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13732 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13733 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13734 IEM_MC_REF_EFLAGS(pEFlags); \
13735 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
13736 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13737 IEM_MC_END(); \
13738 break; \
13739 \
13740 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13741 } \
13742 } \
13743 else \
13744 { \
13745 /* \
13746 * Memory target. \
13747 */ \
13748 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
13749 { \
13750 switch (pVCpu->iem.s.enmEffOpSize) \
13751 { \
13752 case IEMMODE_16BIT: \
13753 IEM_MC_BEGIN(2, 3, 0, 0); \
13754 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13755 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13756 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13757 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13758 \
13759 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13760 IEMOP_HLP_DONE_DECODING(); \
13761 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13762 IEM_MC_FETCH_EFLAGS(EFlags); \
13763 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
13764 \
13765 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13766 IEM_MC_COMMIT_EFLAGS(EFlags); \
13767 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13768 IEM_MC_END(); \
13769 break; \
13770 \
13771 case IEMMODE_32BIT: \
13772 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386, 0); \
13773 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13774 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13775 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13776 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13777 \
13778 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13779 IEMOP_HLP_DONE_DECODING(); \
13780 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13781 IEM_MC_FETCH_EFLAGS(EFlags); \
13782 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
13783 \
13784 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13785 IEM_MC_COMMIT_EFLAGS(EFlags); \
13786 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13787 IEM_MC_END(); \
13788 break; \
13789 \
13790 case IEMMODE_64BIT: \
13791 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT, 0); \
13792 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13793 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13794 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13795 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13796 \
13797 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13798 IEMOP_HLP_DONE_DECODING(); \
13799 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13800 IEM_MC_FETCH_EFLAGS(EFlags); \
13801 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
13802 \
13803 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13804 IEM_MC_COMMIT_EFLAGS(EFlags); \
13805 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13806 IEM_MC_END(); \
13807 break; \
13808 \
13809 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13810 } \
13811 } \
13812 else \
13813 { \
13814 (void)0
13815
13816#define IEMOP_BODY_UNARY_Ev_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
13817 switch (pVCpu->iem.s.enmEffOpSize) \
13818 { \
13819 case IEMMODE_16BIT: \
13820 IEM_MC_BEGIN(2, 3, 0, 0); \
13821 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13822 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13823 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13824 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13825 \
13826 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13827 IEMOP_HLP_DONE_DECODING(); \
13828 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13829 IEM_MC_FETCH_EFLAGS(EFlags); \
13830 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU16, pu16Dst, pEFlags); \
13831 \
13832 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
13833 IEM_MC_COMMIT_EFLAGS(EFlags); \
13834 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13835 IEM_MC_END(); \
13836 break; \
13837 \
13838 case IEMMODE_32BIT: \
13839 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386, 0); \
13840 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13841 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13842 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13843 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13844 \
13845 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13846 IEMOP_HLP_DONE_DECODING(); \
13847 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13848 IEM_MC_FETCH_EFLAGS(EFlags); \
13849 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU32, pu32Dst, pEFlags); \
13850 \
13851 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
13852 IEM_MC_COMMIT_EFLAGS(EFlags); \
13853 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13854 IEM_MC_END(); \
13855 break; \
13856 \
13857 case IEMMODE_64BIT: \
13858 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT, 0); \
13859 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13860 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13861 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13862 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13863 \
13864 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13865 IEMOP_HLP_DONE_DECODING(); \
13866 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13867 IEM_MC_FETCH_EFLAGS(EFlags); \
13868 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU64, pu64Dst, pEFlags); \
13869 \
13870 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
13871 IEM_MC_COMMIT_EFLAGS(EFlags); \
13872 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13873 IEM_MC_END(); \
13874 break; \
13875 \
13876 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13877 } \
13878 } \
13879 } \
13880 (void)0
13881
13882
13883/**
13884 * @opmaps grp3_f6
13885 * @opcode /0
13886 * @opflclass logical
13887 * @todo also /1
13888 */
13889FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
13890{
13891 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
13892 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
13893
13894 if (IEM_IS_MODRM_REG_MODE(bRm))
13895 {
13896 /* register access */
13897 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13898 IEM_MC_BEGIN(3, 0, 0, 0);
13899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13900 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13901 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
13902 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13903 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13904 IEM_MC_REF_EFLAGS(pEFlags);
13905 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
13906 IEM_MC_ADVANCE_RIP_AND_FINISH();
13907 IEM_MC_END();
13908 }
13909 else
13910 {
13911 /* memory access. */
13912 IEM_MC_BEGIN(3, 3, 0, 0);
13913 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13914 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13915
13916 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13917 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13918
13919 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13920 IEM_MC_ARG(uint8_t const *, pu8Dst, 0);
13921 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13922
13923 IEM_MC_ARG_CONST(uint8_t, u8Src, u8Imm, 1);
13924 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13925 IEM_MC_FETCH_EFLAGS(EFlags);
13926 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
13927
13928 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
13929 IEM_MC_COMMIT_EFLAGS(EFlags);
13930 IEM_MC_ADVANCE_RIP_AND_FINISH();
13931 IEM_MC_END();
13932 }
13933}
13934
13935
13936/* Body for opcode 0xf6 variations /4, /5, /6 and /7. */
13937#define IEMOP_GRP3_MUL_DIV_EB(bRm, a_pfnU8Expr) \
13938 PFNIEMAIMPLMULDIVU8 const pfnU8 = (a_pfnU8Expr); \
13939 if (IEM_IS_MODRM_REG_MODE(bRm)) \
13940 { \
13941 /* register access */ \
13942 IEM_MC_BEGIN(3, 1, 0, 0); \
13943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13944 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
13945 IEM_MC_ARG(uint8_t, u8Value, 1); \
13946 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
13947 IEM_MC_LOCAL(int32_t, rc); \
13948 \
13949 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13950 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
13951 IEM_MC_REF_EFLAGS(pEFlags); \
13952 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags); \
13953 IEM_MC_IF_LOCAL_IS_Z(rc) { \
13954 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13955 } IEM_MC_ELSE() { \
13956 IEM_MC_RAISE_DIVIDE_ERROR(); \
13957 } IEM_MC_ENDIF(); \
13958 \
13959 IEM_MC_END(); \
13960 } \
13961 else \
13962 { \
13963 /* memory access. */ \
13964 IEM_MC_BEGIN(3, 2, 0, 0); \
13965 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
13966 IEM_MC_ARG(uint8_t, u8Value, 1); \
13967 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
13968 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13969 IEM_MC_LOCAL(int32_t, rc); \
13970 \
13971 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13973 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13974 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
13975 IEM_MC_REF_EFLAGS(pEFlags); \
13976 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags); \
13977 IEM_MC_IF_LOCAL_IS_Z(rc) { \
13978 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13979 } IEM_MC_ELSE() { \
13980 IEM_MC_RAISE_DIVIDE_ERROR(); \
13981 } IEM_MC_ENDIF(); \
13982 \
13983 IEM_MC_END(); \
13984 } (void)0
13985
13986
13987/* Body for opcode 0xf7 variant /4, /5, /6 and /7. */
13988#define IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, a_pImplExpr) \
13989 PCIEMOPMULDIVSIZES const pImpl = (a_pImplExpr); \
13990 if (IEM_IS_MODRM_REG_MODE(bRm)) \
13991 { \
13992 /* register access */ \
13993 switch (pVCpu->iem.s.enmEffOpSize) \
13994 { \
13995 case IEMMODE_16BIT: \
13996 IEM_MC_BEGIN(4, 1, 0, 0); \
13997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13998 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
13999 IEM_MC_ARG(uint16_t *, pu16DX, 1); \
14000 IEM_MC_ARG(uint16_t, u16Value, 2); \
14001 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14002 IEM_MC_LOCAL(int32_t, rc); \
14003 \
14004 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14005 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
14006 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX); \
14007 IEM_MC_REF_EFLAGS(pEFlags); \
14008 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags); \
14009 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14010 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14011 } IEM_MC_ELSE() { \
14012 IEM_MC_RAISE_DIVIDE_ERROR(); \
14013 } IEM_MC_ENDIF(); \
14014 \
14015 IEM_MC_END(); \
14016 break; \
14017 \
14018 case IEMMODE_32BIT: \
14019 IEM_MC_BEGIN(4, 1, IEM_MC_F_MIN_386, 0); \
14020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14021 IEM_MC_ARG(uint32_t *, pu32AX, 0); \
14022 IEM_MC_ARG(uint32_t *, pu32DX, 1); \
14023 IEM_MC_ARG(uint32_t, u32Value, 2); \
14024 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14025 IEM_MC_LOCAL(int32_t, rc); \
14026 \
14027 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14028 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX); \
14029 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX); \
14030 IEM_MC_REF_EFLAGS(pEFlags); \
14031 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags); \
14032 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14033 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
14034 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX); \
14035 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14036 } IEM_MC_ELSE() { \
14037 IEM_MC_RAISE_DIVIDE_ERROR(); \
14038 } IEM_MC_ENDIF(); \
14039 \
14040 IEM_MC_END(); \
14041 break; \
14042 \
14043 case IEMMODE_64BIT: \
14044 IEM_MC_BEGIN(4, 1, IEM_MC_F_64BIT, 0); \
14045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14046 IEM_MC_ARG(uint64_t *, pu64AX, 0); \
14047 IEM_MC_ARG(uint64_t *, pu64DX, 1); \
14048 IEM_MC_ARG(uint64_t, u64Value, 2); \
14049 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14050 IEM_MC_LOCAL(int32_t, rc); \
14051 \
14052 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14053 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX); \
14054 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX); \
14055 IEM_MC_REF_EFLAGS(pEFlags); \
14056 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags); \
14057 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14058 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14059 } IEM_MC_ELSE() { \
14060 IEM_MC_RAISE_DIVIDE_ERROR(); \
14061 } IEM_MC_ENDIF(); \
14062 \
14063 IEM_MC_END(); \
14064 break; \
14065 \
14066 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14067 } \
14068 } \
14069 else \
14070 { \
14071 /* memory access. */ \
14072 switch (pVCpu->iem.s.enmEffOpSize) \
14073 { \
14074 case IEMMODE_16BIT: \
14075 IEM_MC_BEGIN(4, 2, 0, 0); \
14076 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
14077 IEM_MC_ARG(uint16_t *, pu16DX, 1); \
14078 IEM_MC_ARG(uint16_t, u16Value, 2); \
14079 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14080 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14081 IEM_MC_LOCAL(int32_t, rc); \
14082 \
14083 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14085 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14086 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
14087 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX); \
14088 IEM_MC_REF_EFLAGS(pEFlags); \
14089 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags); \
14090 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14091 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14092 } IEM_MC_ELSE() { \
14093 IEM_MC_RAISE_DIVIDE_ERROR(); \
14094 } IEM_MC_ENDIF(); \
14095 \
14096 IEM_MC_END(); \
14097 break; \
14098 \
14099 case IEMMODE_32BIT: \
14100 IEM_MC_BEGIN(4, 2, IEM_MC_F_MIN_386, 0); \
14101 IEM_MC_ARG(uint32_t *, pu32AX, 0); \
14102 IEM_MC_ARG(uint32_t *, pu32DX, 1); \
14103 IEM_MC_ARG(uint32_t, u32Value, 2); \
14104 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14105 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14106 IEM_MC_LOCAL(int32_t, rc); \
14107 \
14108 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14109 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14110 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14111 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX); \
14112 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX); \
14113 IEM_MC_REF_EFLAGS(pEFlags); \
14114 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags); \
14115 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14116 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
14117 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX); \
14118 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14119 } IEM_MC_ELSE() { \
14120 IEM_MC_RAISE_DIVIDE_ERROR(); \
14121 } IEM_MC_ENDIF(); \
14122 \
14123 IEM_MC_END(); \
14124 break; \
14125 \
14126 case IEMMODE_64BIT: \
14127 IEM_MC_BEGIN(4, 2, IEM_MC_F_64BIT, 0); \
14128 IEM_MC_ARG(uint64_t *, pu64AX, 0); \
14129 IEM_MC_ARG(uint64_t *, pu64DX, 1); \
14130 IEM_MC_ARG(uint64_t, u64Value, 2); \
14131 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14133 IEM_MC_LOCAL(int32_t, rc); \
14134 \
14135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14137 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14138 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX); \
14139 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX); \
14140 IEM_MC_REF_EFLAGS(pEFlags); \
14141 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags); \
14142 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14143 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14144 } IEM_MC_ELSE() { \
14145 IEM_MC_RAISE_DIVIDE_ERROR(); \
14146 } IEM_MC_ENDIF(); \
14147 \
14148 IEM_MC_END(); \
14149 break; \
14150 \
14151 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14152 } \
14153 } (void)0
14154
14155
14156/**
14157 * @opmaps grp3_f6
14158 * @opcode /2
14159 * @opflclass unchanged
14160 */
14161FNIEMOP_DEF_1(iemOp_grp3_not_Eb, uint8_t, bRm)
14162{
14163/** @todo does not modify EFLAGS. */
14164 IEMOP_MNEMONIC(not_Eb, "not Eb");
14165 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_not_u8, iemAImpl_not_u8_locked);
14166}
14167
14168
14169/**
14170 * @opmaps grp3_f6
14171 * @opcode /3
14172 * @opflclass arithmetic
14173 */
14174FNIEMOP_DEF_1(iemOp_grp3_neg_Eb, uint8_t, bRm)
14175{
14176 IEMOP_MNEMONIC(net_Eb, "neg Eb");
14177 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_neg_u8, iemAImpl_neg_u8_locked);
14178}
14179
14180
14181/**
14182 * @opcode 0xf6
14183 */
14184FNIEMOP_DEF(iemOp_Grp3_Eb)
14185{
14186 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14187 switch (IEM_GET_MODRM_REG_8(bRm))
14188 {
14189 case 0:
14190 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
14191 case 1:
14192 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
14193 case 2:
14194 return FNIEMOP_CALL_1(iemOp_grp3_not_Eb, bRm);
14195 case 3:
14196 return FNIEMOP_CALL_1(iemOp_grp3_neg_Eb, bRm);
14197 case 4:
14198 {
14199 /**
14200 * @opdone
14201 * @opmaps grp3_f6
14202 * @opcode /4
14203 * @opflclass multiply
14204 */
14205 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
14206 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14207 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
14208 break;
14209 }
14210 case 5:
14211 {
14212 /**
14213 * @opdone
14214 * @opmaps grp3_f6
14215 * @opcode /5
14216 * @opflclass multiply
14217 */
14218 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
14219 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14220 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
14221 break;
14222 }
14223 case 6:
14224 {
14225 /**
14226 * @opdone
14227 * @opmaps grp3_f6
14228 * @opcode /6
14229 * @opflclass division
14230 */
14231 IEMOP_MNEMONIC(div_Eb, "div Eb");
14232 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14233 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
14234 break;
14235 }
14236 case 7:
14237 {
14238 /**
14239 * @opdone
14240 * @opmaps grp3_f6
14241 * @opcode /7
14242 * @opflclass division
14243 */
14244 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
14245 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14246 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
14247 break;
14248 }
14249 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14250 }
14251}
14252
14253
14254/**
14255 * @opmaps grp3_f7
14256 * @opcode /0
14257 * @opflclass logical
14258 */
14259FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
14260{
14261 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
14262 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
14263
14264 if (IEM_IS_MODRM_REG_MODE(bRm))
14265 {
14266 /* register access */
14267 switch (pVCpu->iem.s.enmEffOpSize)
14268 {
14269 case IEMMODE_16BIT:
14270 IEM_MC_BEGIN(3, 0, 0, 0);
14271 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
14272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14273 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14274 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
14275 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14276 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
14277 IEM_MC_REF_EFLAGS(pEFlags);
14278 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
14279 IEM_MC_ADVANCE_RIP_AND_FINISH();
14280 IEM_MC_END();
14281 break;
14282
14283 case IEMMODE_32BIT:
14284 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
14285 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
14286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14287 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14288 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
14289 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14290 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
14291 IEM_MC_REF_EFLAGS(pEFlags);
14292 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
14293 /* No clearing the high dword here - test doesn't write back the result. */
14294 IEM_MC_ADVANCE_RIP_AND_FINISH();
14295 IEM_MC_END();
14296 break;
14297
14298 case IEMMODE_64BIT:
14299 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
14300 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
14301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14302 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14303 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
14304 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14305 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
14306 IEM_MC_REF_EFLAGS(pEFlags);
14307 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
14308 IEM_MC_ADVANCE_RIP_AND_FINISH();
14309 IEM_MC_END();
14310 break;
14311
14312 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14313 }
14314 }
14315 else
14316 {
14317 /* memory access. */
14318 switch (pVCpu->iem.s.enmEffOpSize)
14319 {
14320 case IEMMODE_16BIT:
14321 IEM_MC_BEGIN(3, 3, 0, 0);
14322 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14323 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
14324
14325 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
14326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14327
14328 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
14329 IEM_MC_ARG(uint16_t const *, pu16Dst, 0);
14330 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14331
14332 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
14333 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14334 IEM_MC_FETCH_EFLAGS(EFlags);
14335 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
14336
14337 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
14338 IEM_MC_COMMIT_EFLAGS(EFlags);
14339 IEM_MC_ADVANCE_RIP_AND_FINISH();
14340 IEM_MC_END();
14341 break;
14342
14343 case IEMMODE_32BIT:
14344 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
14345 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14346 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
14347
14348 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
14349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14350
14351 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
14352 IEM_MC_ARG(uint32_t const *, pu32Dst, 0);
14353 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14354
14355 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
14356 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14357 IEM_MC_FETCH_EFLAGS(EFlags);
14358 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
14359
14360 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
14361 IEM_MC_COMMIT_EFLAGS(EFlags);
14362 IEM_MC_ADVANCE_RIP_AND_FINISH();
14363 IEM_MC_END();
14364 break;
14365
14366 case IEMMODE_64BIT:
14367 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
14368 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14369 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
14370
14371 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
14372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14373
14374 IEM_MC_ARG(uint64_t const *, pu64Dst, 0);
14375 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
14376 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14377
14378 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1);
14379 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14380 IEM_MC_FETCH_EFLAGS(EFlags);
14381 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
14382
14383 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
14384 IEM_MC_COMMIT_EFLAGS(EFlags);
14385 IEM_MC_ADVANCE_RIP_AND_FINISH();
14386 IEM_MC_END();
14387 break;
14388
14389 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14390 }
14391 }
14392}
14393
14394
14395/**
14396 * @opmaps grp3_f7
14397 * @opcode /2
14398 * @opflclass unchanged
14399 */
14400FNIEMOP_DEF_1(iemOp_grp3_not_Ev, uint8_t, bRm)
14401{
14402/** @todo does not modify EFLAGS */
14403 IEMOP_MNEMONIC(not_Ev, "not Ev");
14404 IEMOP_BODY_UNARY_Ev( iemAImpl_not_u16, iemAImpl_not_u32, iemAImpl_not_u64);
14405 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_not_u16_locked, iemAImpl_not_u32_locked, iemAImpl_not_u64_locked);
14406}
14407
14408
14409/**
14410 * @opmaps grp3_f7
14411 * @opcode /3
14412 * @opflclass arithmetic
14413 */
14414FNIEMOP_DEF_1(iemOp_grp3_neg_Ev, uint8_t, bRm)
14415{
14416 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
14417 IEMOP_BODY_UNARY_Ev( iemAImpl_neg_u16, iemAImpl_neg_u32, iemAImpl_neg_u64);
14418 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_neg_u16_locked, iemAImpl_neg_u32_locked, iemAImpl_neg_u64_locked);
14419}
14420
14421
14422/**
14423 * @opmaps grp3_f7
14424 * @opcode /4
14425 * @opflclass multiply
14426 */
14427FNIEMOP_DEF_1(iemOp_grp3_mul_Ev, uint8_t, bRm)
14428{
14429 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
14430 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14431 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
14432}
14433
14434
14435/**
14436 * @opmaps grp3_f7
14437 * @opcode /5
14438 * @opflclass multiply
14439 */
14440FNIEMOP_DEF_1(iemOp_grp3_imul_Ev, uint8_t, bRm)
14441{
14442 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
14443 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14444 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
14445}
14446
14447
14448/**
14449 * @opmaps grp3_f7
14450 * @opcode /6
14451 * @opflclass division
14452 */
14453FNIEMOP_DEF_1(iemOp_grp3_div_Ev, uint8_t, bRm)
14454{
14455 IEMOP_MNEMONIC(div_Ev, "div Ev");
14456 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14457 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
14458}
14459
14460
14461/**
14462 * @opmaps grp3_f7
14463 * @opcode /7
14464 * @opflclass division
14465 */
14466FNIEMOP_DEF_1(iemOp_grp3_idiv_Ev, uint8_t, bRm)
14467{
14468 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
14469 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14470 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
14471}
14472
14473
14474/**
14475 * @opcode 0xf7
14476 */
14477FNIEMOP_DEF(iemOp_Grp3_Ev)
14478{
14479 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14480 switch (IEM_GET_MODRM_REG_8(bRm))
14481 {
14482 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
14483 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
14484 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Ev, bRm);
14485 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Ev, bRm);
14486 case 4: return FNIEMOP_CALL_1(iemOp_grp3_mul_Ev, bRm);
14487 case 5: return FNIEMOP_CALL_1(iemOp_grp3_imul_Ev, bRm);
14488 case 6: return FNIEMOP_CALL_1(iemOp_grp3_div_Ev, bRm);
14489 case 7: return FNIEMOP_CALL_1(iemOp_grp3_idiv_Ev, bRm);
14490 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14491 }
14492}
14493
14494
14495/**
14496 * @opcode 0xf8
14497 * @opflmodify cf
14498 * @opflclear cf
14499 */
14500FNIEMOP_DEF(iemOp_clc)
14501{
14502 IEMOP_MNEMONIC(clc, "clc");
14503 IEM_MC_BEGIN(0, 0, 0, 0);
14504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14505 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
14506 IEM_MC_ADVANCE_RIP_AND_FINISH();
14507 IEM_MC_END();
14508}
14509
14510
14511/**
14512 * @opcode 0xf9
14513 * @opflmodify cf
14514 * @opflset cf
14515 */
14516FNIEMOP_DEF(iemOp_stc)
14517{
14518 IEMOP_MNEMONIC(stc, "stc");
14519 IEM_MC_BEGIN(0, 0, 0, 0);
14520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14521 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
14522 IEM_MC_ADVANCE_RIP_AND_FINISH();
14523 IEM_MC_END();
14524}
14525
14526
14527/**
14528 * @opcode 0xfa
14529 * @opfltest iopl,vm
14530 * @opflmodify if,vif
14531 */
14532FNIEMOP_DEF(iemOp_cli)
14533{
14534 IEMOP_MNEMONIC(cli, "cli");
14535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14536 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_CHECK_IRQ_BEFORE, 0, iemCImpl_cli);
14537}
14538
14539
14540/**
14541 * @opcode 0xfb
14542 * @opfltest iopl,vm
14543 * @opflmodify if,vif
14544 */
14545FNIEMOP_DEF(iemOp_sti)
14546{
14547 IEMOP_MNEMONIC(sti, "sti");
14548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14549 IEM_MC_DEFER_TO_CIMPL_0_RET( IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_AFTER
14550 | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_INHIBIT_SHADOW, 0, iemCImpl_sti);
14551}
14552
14553
14554/**
14555 * @opcode 0xfc
14556 * @opflmodify df
14557 * @opflclear df
14558 */
14559FNIEMOP_DEF(iemOp_cld)
14560{
14561 IEMOP_MNEMONIC(cld, "cld");
14562 IEM_MC_BEGIN(0, 0, 0, 0);
14563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14564 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
14565 IEM_MC_ADVANCE_RIP_AND_FINISH();
14566 IEM_MC_END();
14567}
14568
14569
14570/**
14571 * @opcode 0xfd
14572 * @opflmodify df
14573 * @opflset df
14574 */
14575FNIEMOP_DEF(iemOp_std)
14576{
14577 IEMOP_MNEMONIC(std, "std");
14578 IEM_MC_BEGIN(0, 0, 0, 0);
14579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14580 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
14581 IEM_MC_ADVANCE_RIP_AND_FINISH();
14582 IEM_MC_END();
14583}
14584
14585
14586/**
14587 * @opmaps grp4
14588 * @opcode /0
14589 * @opflclass incdec
14590 */
14591FNIEMOP_DEF_1(iemOp_Grp4_inc_Eb, uint8_t, bRm)
14592{
14593 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
14594 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_inc_u8, iemAImpl_inc_u8_locked);
14595}
14596
14597
14598/**
14599 * @opmaps grp4
14600 * @opcode /1
14601 * @opflclass incdec
14602 */
14603FNIEMOP_DEF_1(iemOp_Grp4_dec_Eb, uint8_t, bRm)
14604{
14605 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
14606 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_dec_u8, iemAImpl_dec_u8_locked);
14607}
14608
14609
14610/**
14611 * @opcode 0xfe
14612 */
14613FNIEMOP_DEF(iemOp_Grp4)
14614{
14615 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14616 switch (IEM_GET_MODRM_REG_8(bRm))
14617 {
14618 case 0: return FNIEMOP_CALL_1(iemOp_Grp4_inc_Eb, bRm);
14619 case 1: return FNIEMOP_CALL_1(iemOp_Grp4_dec_Eb, bRm);
14620 default:
14621 /** @todo is the eff-addr decoded? */
14622 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
14623 IEMOP_RAISE_INVALID_OPCODE_RET();
14624 }
14625}
14626
14627/**
14628 * @opmaps grp5
14629 * @opcode /0
14630 * @opflclass incdec
14631 */
14632FNIEMOP_DEF_1(iemOp_Grp5_inc_Ev, uint8_t, bRm)
14633{
14634 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
14635 IEMOP_BODY_UNARY_Ev( iemAImpl_inc_u16, iemAImpl_inc_u32, iemAImpl_inc_u64);
14636 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_inc_u16_locked, iemAImpl_inc_u32_locked, iemAImpl_inc_u64_locked);
14637}
14638
14639
14640/**
14641 * @opmaps grp5
14642 * @opcode /1
14643 * @opflclass incdec
14644 */
14645FNIEMOP_DEF_1(iemOp_Grp5_dec_Ev, uint8_t, bRm)
14646{
14647 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
14648 IEMOP_BODY_UNARY_Ev( iemAImpl_dec_u16, iemAImpl_dec_u32, iemAImpl_dec_u64);
14649 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_dec_u16_locked, iemAImpl_dec_u32_locked, iemAImpl_dec_u64_locked);
14650}
14651
14652
14653/**
14654 * Opcode 0xff /2.
14655 * @param bRm The RM byte.
14656 */
14657FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
14658{
14659 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
14660 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
14661
14662 if (IEM_IS_MODRM_REG_MODE(bRm))
14663 {
14664 /* The new RIP is taken from a register. */
14665 switch (pVCpu->iem.s.enmEffOpSize)
14666 {
14667 case IEMMODE_16BIT:
14668 IEM_MC_BEGIN(1, 0, 0, 0);
14669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14670 IEM_MC_ARG(uint16_t, u16Target, 0);
14671 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14672 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_16, u16Target);
14673 IEM_MC_END();
14674 break;
14675
14676 case IEMMODE_32BIT:
14677 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_386, 0);
14678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14679 IEM_MC_ARG(uint32_t, u32Target, 0);
14680 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14681 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_32, u32Target);
14682 IEM_MC_END();
14683 break;
14684
14685 case IEMMODE_64BIT:
14686 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT, 0);
14687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14688 IEM_MC_ARG(uint64_t, u64Target, 0);
14689 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14690 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_64, u64Target);
14691 IEM_MC_END();
14692 break;
14693
14694 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14695 }
14696 }
14697 else
14698 {
14699 /* The new RIP is taken from a register. */
14700 switch (pVCpu->iem.s.enmEffOpSize)
14701 {
14702 case IEMMODE_16BIT:
14703 IEM_MC_BEGIN(1, 1, 0, 0);
14704 IEM_MC_ARG(uint16_t, u16Target, 0);
14705 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14706 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14707 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14708 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14709 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_16, u16Target);
14710 IEM_MC_END();
14711 break;
14712
14713 case IEMMODE_32BIT:
14714 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_386, 0);
14715 IEM_MC_ARG(uint32_t, u32Target, 0);
14716 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14717 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14719 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14720 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_32, u32Target);
14721 IEM_MC_END();
14722 break;
14723
14724 case IEMMODE_64BIT:
14725 IEM_MC_BEGIN(1, 1, IEM_MC_F_64BIT, 0);
14726 IEM_MC_ARG(uint64_t, u64Target, 0);
14727 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14728 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14730 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14731 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_64, u64Target);
14732 IEM_MC_END();
14733 break;
14734
14735 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14736 }
14737 }
14738}
14739
14740#define IEMOP_BODY_GRP5_FAR_EP(a_bRm, a_fnCImpl, a_fCImplExtra) \
14741 /* Registers? How?? */ \
14742 if (RT_LIKELY(IEM_IS_MODRM_MEM_MODE(a_bRm))) \
14743 { /* likely */ } \
14744 else \
14745 IEMOP_RAISE_INVALID_OPCODE_RET(); /* callf eax is not legal */ \
14746 \
14747 /* 64-bit mode: Default is 32-bit, but only intel respects a REX.W prefix. */ \
14748 /** @todo what does VIA do? */ \
14749 if (!IEM_IS_64BIT_CODE(pVCpu) || pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT || IEM_IS_GUEST_CPU_INTEL(pVCpu)) \
14750 { /* likely */ } \
14751 else \
14752 pVCpu->iem.s.enmEffOpSize = IEMMODE_32BIT; \
14753 \
14754 /* Far pointer loaded from memory. */ \
14755 switch (pVCpu->iem.s.enmEffOpSize) \
14756 { \
14757 case IEMMODE_16BIT: \
14758 IEM_MC_BEGIN(3, 1, 0, 0); \
14759 IEM_MC_ARG(uint16_t, u16Sel, 0); \
14760 IEM_MC_ARG(uint16_t, offSeg, 1); \
14761 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2); \
14762 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
14763 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
14764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14765 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
14766 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2); \
14767 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
14768 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 0, \
14769 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
14770 IEM_MC_END(); \
14771 break; \
14772 \
14773 case IEMMODE_32BIT: \
14774 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0); \
14775 IEM_MC_ARG(uint16_t, u16Sel, 0); \
14776 IEM_MC_ARG(uint32_t, offSeg, 1); \
14777 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2); \
14778 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
14779 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
14780 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14781 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
14782 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4); \
14783 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
14784 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 0, \
14785 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
14786 IEM_MC_END(); \
14787 break; \
14788 \
14789 case IEMMODE_64BIT: \
14790 Assert(!IEM_IS_GUEST_CPU_AMD(pVCpu)); \
14791 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0); \
14792 IEM_MC_ARG(uint16_t, u16Sel, 0); \
14793 IEM_MC_ARG(uint64_t, offSeg, 1); \
14794 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_64BIT, 2); \
14795 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
14796 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
14797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14798 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
14799 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8); \
14800 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
14801 | IEM_CIMPL_F_MODE /* no gates */, 0, \
14802 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
14803 IEM_MC_END(); \
14804 break; \
14805 \
14806 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14807 } do {} while (0)
14808
14809
14810/**
14811 * Opcode 0xff /3.
14812 * @param bRm The RM byte.
14813 */
14814FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
14815{
14816 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
14817 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_callf, IEM_CIMPL_F_BRANCH_STACK);
14818}
14819
14820
14821/**
14822 * Opcode 0xff /4.
14823 * @param bRm The RM byte.
14824 */
14825FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
14826{
14827 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
14828 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
14829
14830 if (IEM_IS_MODRM_REG_MODE(bRm))
14831 {
14832 /* The new RIP is taken from a register. */
14833 switch (pVCpu->iem.s.enmEffOpSize)
14834 {
14835 case IEMMODE_16BIT:
14836 IEM_MC_BEGIN(0, 1, 0, 0);
14837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14838 IEM_MC_LOCAL(uint16_t, u16Target);
14839 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14840 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
14841 IEM_MC_END();
14842 break;
14843
14844 case IEMMODE_32BIT:
14845 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
14846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14847 IEM_MC_LOCAL(uint32_t, u32Target);
14848 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14849 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
14850 IEM_MC_END();
14851 break;
14852
14853 case IEMMODE_64BIT:
14854 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
14855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14856 IEM_MC_LOCAL(uint64_t, u64Target);
14857 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14858 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
14859 IEM_MC_END();
14860 break;
14861
14862 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14863 }
14864 }
14865 else
14866 {
14867 /* The new RIP is taken from a memory location. */
14868 switch (pVCpu->iem.s.enmEffOpSize)
14869 {
14870 case IEMMODE_16BIT:
14871 IEM_MC_BEGIN(0, 2, 0, 0);
14872 IEM_MC_LOCAL(uint16_t, u16Target);
14873 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14874 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14876 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14877 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
14878 IEM_MC_END();
14879 break;
14880
14881 case IEMMODE_32BIT:
14882 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
14883 IEM_MC_LOCAL(uint32_t, u32Target);
14884 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14885 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14887 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14888 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
14889 IEM_MC_END();
14890 break;
14891
14892 case IEMMODE_64BIT:
14893 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
14894 IEM_MC_LOCAL(uint64_t, u64Target);
14895 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14898 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14899 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
14900 IEM_MC_END();
14901 break;
14902
14903 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14904 }
14905 }
14906}
14907
14908
14909/**
14910 * Opcode 0xff /5.
14911 * @param bRm The RM byte.
14912 */
14913FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
14914{
14915 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
14916 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_FarJmp, 0);
14917}
14918
14919
14920/**
14921 * Opcode 0xff /6.
14922 * @param bRm The RM byte.
14923 */
14924FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
14925{
14926 IEMOP_MNEMONIC(push_Ev, "push Ev");
14927
14928 /* Registers are handled by a common worker. */
14929 if (IEM_IS_MODRM_REG_MODE(bRm))
14930 return FNIEMOP_CALL_1(iemOpCommonPushGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
14931
14932 /* Memory we do here. */
14933 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
14934 switch (pVCpu->iem.s.enmEffOpSize)
14935 {
14936 case IEMMODE_16BIT:
14937 IEM_MC_BEGIN(0, 2, 0, 0);
14938 IEM_MC_LOCAL(uint16_t, u16Src);
14939 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14940 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14942 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14943 IEM_MC_PUSH_U16(u16Src);
14944 IEM_MC_ADVANCE_RIP_AND_FINISH();
14945 IEM_MC_END();
14946 break;
14947
14948 case IEMMODE_32BIT:
14949 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
14950 IEM_MC_LOCAL(uint32_t, u32Src);
14951 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14952 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14954 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14955 IEM_MC_PUSH_U32(u32Src);
14956 IEM_MC_ADVANCE_RIP_AND_FINISH();
14957 IEM_MC_END();
14958 break;
14959
14960 case IEMMODE_64BIT:
14961 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
14962 IEM_MC_LOCAL(uint64_t, u64Src);
14963 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14964 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14966 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14967 IEM_MC_PUSH_U64(u64Src);
14968 IEM_MC_ADVANCE_RIP_AND_FINISH();
14969 IEM_MC_END();
14970 break;
14971
14972 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14973 }
14974}
14975
14976
14977/**
14978 * @opcode 0xff
14979 */
14980FNIEMOP_DEF(iemOp_Grp5)
14981{
14982 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14983 switch (IEM_GET_MODRM_REG_8(bRm))
14984 {
14985 case 0:
14986 return FNIEMOP_CALL_1(iemOp_Grp5_inc_Ev, bRm);
14987 case 1:
14988 return FNIEMOP_CALL_1(iemOp_Grp5_dec_Ev, bRm);
14989 case 2:
14990 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
14991 case 3:
14992 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
14993 case 4:
14994 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
14995 case 5:
14996 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
14997 case 6:
14998 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
14999 case 7:
15000 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
15001 IEMOP_RAISE_INVALID_OPCODE_RET();
15002 }
15003 AssertFailedReturn(VERR_IEM_IPE_3);
15004}
15005
15006
15007
15008const PFNIEMOP g_apfnOneByteMap[256] =
15009{
15010 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
15011 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
15012 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
15013 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
15014 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
15015 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
15016 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
15017 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
15018 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
15019 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
15020 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
15021 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
15022 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
15023 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
15024 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
15025 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
15026 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
15027 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
15028 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
15029 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
15030 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
15031 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
15032 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
15033 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
15034 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
15035 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
15036 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
15037 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
15038 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
15039 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
15040 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
15041 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
15042 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
15043 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
15044 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
15045 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
15046 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
15047 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
15048 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
15049 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
15050 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
15051 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
15052 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
15053 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
15054 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
15055 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
15056 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
15057 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
15058 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
15059 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
15060 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
15061 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
15062 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
15063 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
15064 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
15065 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
15066 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
15067 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
15068 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
15069 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
15070 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
15071 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
15072 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
15073 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
15074};
15075
15076
15077/** @} */
15078
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette