VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstOneByte.cpp.h@ 103590

Last change on this file since 103590 was 103590, checked in by vboxsync, 14 months ago

VMM/IEM: Native translation of IEM_MC_FETCH_FSW() body (untested), bugref:10371

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 557.5 KB
Line 
1/* $Id: IEMAllInstOneByte.cpp.h 103590 2024-02-27 16:41:11Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*******************************************************************************
30* Global Variables *
31*******************************************************************************/
32extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
33
34/* Instruction group definitions: */
35
36/** @defgroup og_gen General
37 * @{ */
38 /** @defgroup og_gen_arith Arithmetic
39 * @{ */
40 /** @defgroup og_gen_arith_bin Binary numbers */
41 /** @defgroup og_gen_arith_dec Decimal numbers */
42 /** @} */
43/** @} */
44
45/** @defgroup og_stack Stack
46 * @{ */
47 /** @defgroup og_stack_sreg Segment registers */
48/** @} */
49
50/** @defgroup og_prefix Prefixes */
51/** @defgroup og_escapes Escape bytes */
52
53
54
55/** @name One byte opcodes.
56 * @{
57 */
58
59/**
60 * Body for instructions like ADD, AND, OR, TEST, CMP, ++ with a byte
61 * memory/register as the destination.
62 */
63#define IEMOP_BODY_BINARY_rm_r8_RW(a_fnNormalU8, a_fnLockedU8) \
64 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
65 \
66 /* \
67 * If rm is denoting a register, no more instruction bytes. \
68 */ \
69 if (IEM_IS_MODRM_REG_MODE(bRm)) \
70 { \
71 IEM_MC_BEGIN(3, 0, 0, 0); \
72 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
73 IEM_MC_ARG(uint8_t, u8Src, 1); \
74 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
75 \
76 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
77 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
78 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
79 IEM_MC_REF_EFLAGS(pEFlags); \
80 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
81 \
82 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
83 IEM_MC_END(); \
84 } \
85 else \
86 { \
87 /* \
88 * We're accessing memory. \
89 * Note! We're putting the eflags on the stack here so we can commit them \
90 * after the memory. \
91 */ \
92 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
93 { \
94 IEM_MC_BEGIN(3, 3, 0, 0); \
95 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
96 IEM_MC_ARG(uint8_t, u8Src, 1); \
97 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
98 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
99 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
100 \
101 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
102 IEMOP_HLP_DONE_DECODING(); \
103 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
104 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
105 IEM_MC_FETCH_EFLAGS(EFlags); \
106 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
107 \
108 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
109 IEM_MC_COMMIT_EFLAGS(EFlags); \
110 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
111 IEM_MC_END(); \
112 } \
113 else \
114 { \
115 IEM_MC_BEGIN(3, 3, 0, 0); \
116 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
117 IEM_MC_ARG(uint8_t, u8Src, 1); \
118 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
119 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
120 IEM_MC_LOCAL(uint8_t, bMapInfoDst); \
121 \
122 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
123 IEMOP_HLP_DONE_DECODING(); \
124 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bMapInfoDst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
125 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
126 IEM_MC_FETCH_EFLAGS(EFlags); \
127 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
128 \
129 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bMapInfoDst); \
130 IEM_MC_COMMIT_EFLAGS(EFlags); \
131 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
132 IEM_MC_END(); \
133 } \
134 } \
135 (void)0
136
137/**
138 * Body for instructions like TEST & CMP, ++ with a byte memory/registers as
139 * operands.
140 */
141#define IEMOP_BODY_BINARY_rm_r8_RO(a_fnNormalU8) \
142 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
143 \
144 /* \
145 * If rm is denoting a register, no more instruction bytes. \
146 */ \
147 if (IEM_IS_MODRM_REG_MODE(bRm)) \
148 { \
149 IEM_MC_BEGIN(3, 0, 0, 0); \
150 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
151 IEM_MC_ARG(uint8_t, u8Src, 1); \
152 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
153 \
154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
155 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
156 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
157 IEM_MC_REF_EFLAGS(pEFlags); \
158 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
159 \
160 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
161 IEM_MC_END(); \
162 } \
163 else \
164 { \
165 /* \
166 * We're accessing memory. \
167 * Note! We're putting the eflags on the stack here so we can commit them \
168 * after the memory. \
169 */ \
170 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \
171 { \
172 IEM_MC_BEGIN(3, 3, 0, 0); \
173 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
174 IEM_MC_ARG(uint8_t, u8Src, 1); \
175 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
176 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
177 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
178 \
179 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
180 IEMOP_HLP_DONE_DECODING(); \
181 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
182 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
183 IEM_MC_FETCH_EFLAGS(EFlags); \
184 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
185 \
186 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
187 IEM_MC_COMMIT_EFLAGS(EFlags); \
188 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
189 IEM_MC_END(); \
190 } \
191 else \
192 { \
193 IEMOP_HLP_DONE_DECODING(); \
194 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
195 } \
196 } \
197 (void)0
198
199/**
200 * Body for byte instructions like ADD, AND, OR, ++ with a register as the
201 * destination.
202 */
203#define IEMOP_BODY_BINARY_r8_rm(a_fnNormalU8) \
204 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
205 \
206 /* \
207 * If rm is denoting a register, no more instruction bytes. \
208 */ \
209 if (IEM_IS_MODRM_REG_MODE(bRm)) \
210 { \
211 IEM_MC_BEGIN(3, 0, 0, 0); \
212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
213 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
214 IEM_MC_ARG(uint8_t, u8Src, 1); \
215 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
216 \
217 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
218 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
219 IEM_MC_REF_EFLAGS(pEFlags); \
220 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
221 \
222 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
223 IEM_MC_END(); \
224 } \
225 else \
226 { \
227 /* \
228 * We're accessing memory. \
229 */ \
230 IEM_MC_BEGIN(3, 1, 0, 0); \
231 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
232 IEM_MC_ARG(uint8_t, u8Src, 1); \
233 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
234 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
235 \
236 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
238 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
239 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
240 IEM_MC_REF_EFLAGS(pEFlags); \
241 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
242 \
243 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
244 IEM_MC_END(); \
245 } \
246 (void)0
247
248
249/**
250 * Body for word/dword/qword instructions like ADD, AND, OR, ++ with
251 * memory/register as the destination.
252 */
253#define IEMOP_BODY_BINARY_rm_rv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
254 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
255 \
256 /* \
257 * If rm is denoting a register, no more instruction bytes. \
258 */ \
259 if (IEM_IS_MODRM_REG_MODE(bRm)) \
260 { \
261 switch (pVCpu->iem.s.enmEffOpSize) \
262 { \
263 case IEMMODE_16BIT: \
264 IEM_MC_BEGIN(3, 0, 0, 0); \
265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
266 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
267 IEM_MC_ARG(uint16_t, u16Src, 1); \
268 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
269 \
270 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
271 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
272 IEM_MC_REF_EFLAGS(pEFlags); \
273 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
274 \
275 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
276 IEM_MC_END(); \
277 break; \
278 \
279 case IEMMODE_32BIT: \
280 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
282 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
283 IEM_MC_ARG(uint32_t, u32Src, 1); \
284 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
285 \
286 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
287 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
288 IEM_MC_REF_EFLAGS(pEFlags); \
289 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
290 \
291 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
292 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
293 IEM_MC_END(); \
294 break; \
295 \
296 case IEMMODE_64BIT: \
297 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
299 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
300 IEM_MC_ARG(uint64_t, u64Src, 1); \
301 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
302 \
303 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
304 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
305 IEM_MC_REF_EFLAGS(pEFlags); \
306 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
307 \
308 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
309 IEM_MC_END(); \
310 break; \
311 \
312 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
313 } \
314 } \
315 else \
316 { \
317 /* \
318 * We're accessing memory. \
319 * Note! We're putting the eflags on the stack here so we can commit them \
320 * after the memory. \
321 */ \
322 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
323 { \
324 switch (pVCpu->iem.s.enmEffOpSize) \
325 { \
326 case IEMMODE_16BIT: \
327 IEM_MC_BEGIN(3, 3, 0, 0); \
328 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
329 IEM_MC_ARG(uint16_t, u16Src, 1); \
330 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
331 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
332 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
333 \
334 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
335 IEMOP_HLP_DONE_DECODING(); \
336 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
337 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
338 IEM_MC_FETCH_EFLAGS(EFlags); \
339 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
340 \
341 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
342 IEM_MC_COMMIT_EFLAGS(EFlags); \
343 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
344 IEM_MC_END(); \
345 break; \
346 \
347 case IEMMODE_32BIT: \
348 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
349 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
350 IEM_MC_ARG(uint32_t, u32Src, 1); \
351 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
352 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
353 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
354 \
355 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
356 IEMOP_HLP_DONE_DECODING(); \
357 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
358 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
359 IEM_MC_FETCH_EFLAGS(EFlags); \
360 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
361 \
362 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
363 IEM_MC_COMMIT_EFLAGS(EFlags); \
364 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
365 IEM_MC_END(); \
366 break; \
367 \
368 case IEMMODE_64BIT: \
369 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
370 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
371 IEM_MC_ARG(uint64_t, u64Src, 1); \
372 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
373 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
374 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
375 \
376 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
377 IEMOP_HLP_DONE_DECODING(); \
378 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
379 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
380 IEM_MC_FETCH_EFLAGS(EFlags); \
381 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
382 \
383 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
384 IEM_MC_COMMIT_EFLAGS(EFlags); \
385 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
386 IEM_MC_END(); \
387 break; \
388 \
389 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
390 } \
391 } \
392 else \
393 { \
394 (void)0
395/* Separate macro to work around parsing issue in IEMAllInstPython.py */
396#define IEMOP_BODY_BINARY_rm_rv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
397 switch (pVCpu->iem.s.enmEffOpSize) \
398 { \
399 case IEMMODE_16BIT: \
400 IEM_MC_BEGIN(3, 3, 0, 0); \
401 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
402 IEM_MC_ARG(uint16_t, u16Src, 1); \
403 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
404 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
405 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
406 \
407 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
408 IEMOP_HLP_DONE_DECODING(); \
409 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
410 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
411 IEM_MC_FETCH_EFLAGS(EFlags); \
412 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
413 \
414 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
415 IEM_MC_COMMIT_EFLAGS(EFlags); \
416 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
417 IEM_MC_END(); \
418 break; \
419 \
420 case IEMMODE_32BIT: \
421 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
422 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
423 IEM_MC_ARG(uint32_t, u32Src, 1); \
424 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
425 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
426 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
427 \
428 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
429 IEMOP_HLP_DONE_DECODING(); \
430 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
431 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
432 IEM_MC_FETCH_EFLAGS(EFlags); \
433 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
434 \
435 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo /* CMP,TEST */); \
436 IEM_MC_COMMIT_EFLAGS(EFlags); \
437 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
438 IEM_MC_END(); \
439 break; \
440 \
441 case IEMMODE_64BIT: \
442 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
443 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
444 IEM_MC_ARG(uint64_t, u64Src, 1); \
445 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
446 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
447 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
448 \
449 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
450 IEMOP_HLP_DONE_DECODING(); \
451 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
452 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
453 IEM_MC_FETCH_EFLAGS(EFlags); \
454 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
455 \
456 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
457 IEM_MC_COMMIT_EFLAGS(EFlags); \
458 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
459 IEM_MC_END(); \
460 break; \
461 \
462 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
463 } \
464 } \
465 } \
466 (void)0
467
468/**
469 * Body for read-only word/dword/qword instructions like TEST and CMP with
470 * memory/register as the destination.
471 */
472#define IEMOP_BODY_BINARY_rm_rv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
473 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
474 \
475 /* \
476 * If rm is denoting a register, no more instruction bytes. \
477 */ \
478 if (IEM_IS_MODRM_REG_MODE(bRm)) \
479 { \
480 switch (pVCpu->iem.s.enmEffOpSize) \
481 { \
482 case IEMMODE_16BIT: \
483 IEM_MC_BEGIN(3, 0, 0, 0); \
484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
485 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
486 IEM_MC_ARG(uint16_t, u16Src, 1); \
487 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
488 \
489 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
490 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
491 IEM_MC_REF_EFLAGS(pEFlags); \
492 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
493 \
494 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
495 IEM_MC_END(); \
496 break; \
497 \
498 case IEMMODE_32BIT: \
499 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
501 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
502 IEM_MC_ARG(uint32_t, u32Src, 1); \
503 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
504 \
505 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
506 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
507 IEM_MC_REF_EFLAGS(pEFlags); \
508 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
509 \
510 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
511 IEM_MC_END(); \
512 break; \
513 \
514 case IEMMODE_64BIT: \
515 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
517 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
518 IEM_MC_ARG(uint64_t, u64Src, 1); \
519 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
520 \
521 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
522 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
523 IEM_MC_REF_EFLAGS(pEFlags); \
524 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
525 \
526 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
527 IEM_MC_END(); \
528 break; \
529 \
530 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
531 } \
532 } \
533 else \
534 { \
535 /* \
536 * We're accessing memory. \
537 * Note! We're putting the eflags on the stack here so we can commit them \
538 * after the memory. \
539 */ \
540 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
541 { \
542 switch (pVCpu->iem.s.enmEffOpSize) \
543 { \
544 case IEMMODE_16BIT: \
545 IEM_MC_BEGIN(3, 3, 0, 0); \
546 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
547 IEM_MC_ARG(uint16_t, u16Src, 1); \
548 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
549 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
550 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
551 \
552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
553 IEMOP_HLP_DONE_DECODING(); \
554 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
555 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
556 IEM_MC_FETCH_EFLAGS(EFlags); \
557 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
558 \
559 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
560 IEM_MC_COMMIT_EFLAGS(EFlags); \
561 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
562 IEM_MC_END(); \
563 break; \
564 \
565 case IEMMODE_32BIT: \
566 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
567 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
568 IEM_MC_ARG(uint32_t, u32Src, 1); \
569 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
571 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
572 \
573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
574 IEMOP_HLP_DONE_DECODING(); \
575 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
576 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
577 IEM_MC_FETCH_EFLAGS(EFlags); \
578 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
579 \
580 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
581 IEM_MC_COMMIT_EFLAGS(EFlags); \
582 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
583 IEM_MC_END(); \
584 break; \
585 \
586 case IEMMODE_64BIT: \
587 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
588 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
589 IEM_MC_ARG(uint64_t, u64Src, 1); \
590 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
591 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
592 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
593 \
594 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
595 IEMOP_HLP_DONE_DECODING(); \
596 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
597 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
598 IEM_MC_FETCH_EFLAGS(EFlags); \
599 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
600 \
601 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
602 IEM_MC_COMMIT_EFLAGS(EFlags); \
603 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
604 IEM_MC_END(); \
605 break; \
606 \
607 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
608 } \
609 } \
610 else \
611 { \
612 IEMOP_HLP_DONE_DECODING(); \
613 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
614 } \
615 } \
616 (void)0
617
618
619/**
620 * Body for instructions like ADD, AND, OR, ++ with working on AL with
621 * a byte immediate.
622 */
623#define IEMOP_BODY_BINARY_AL_Ib(a_fnNormalU8) \
624 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
625 \
626 IEM_MC_BEGIN(3, 0, 0, 0); \
627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
628 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
629 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1); \
630 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
631 \
632 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX); \
633 IEM_MC_REF_EFLAGS(pEFlags); \
634 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
635 \
636 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
637 IEM_MC_END()
638
639/**
640 * Body for instructions like ADD, AND, OR, ++ with working on
641 * AX/EAX/RAX with a word/dword immediate.
642 */
643#define IEMOP_BODY_BINARY_rAX_Iz(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fModifiesDstReg) \
644 switch (pVCpu->iem.s.enmEffOpSize) \
645 { \
646 case IEMMODE_16BIT: \
647 { \
648 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
649 \
650 IEM_MC_BEGIN(3, 0, 0, 0); \
651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
652 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
653 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1); \
654 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
655 \
656 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX); \
657 IEM_MC_REF_EFLAGS(pEFlags); \
658 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
659 \
660 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
661 IEM_MC_END(); \
662 } \
663 \
664 case IEMMODE_32BIT: \
665 { \
666 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
667 \
668 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
670 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
671 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1); \
672 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
673 \
674 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX); \
675 IEM_MC_REF_EFLAGS(pEFlags); \
676 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
677 \
678 if (a_fModifiesDstReg) \
679 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
680 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
681 IEM_MC_END(); \
682 } \
683 \
684 case IEMMODE_64BIT: \
685 { \
686 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
687 \
688 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
690 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
691 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1); \
692 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
693 \
694 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX); \
695 IEM_MC_REF_EFLAGS(pEFlags); \
696 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
697 \
698 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
699 IEM_MC_END(); \
700 } \
701 \
702 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
703 } \
704 (void)0
705
706
707
708/* Instruction specification format - work in progress: */
709
710/**
711 * @opcode 0x00
712 * @opmnemonic add
713 * @op1 rm:Eb
714 * @op2 reg:Gb
715 * @opmaps one
716 * @openc ModR/M
717 * @opflclass arithmetic
718 * @ophints harmless ignores_op_sizes
719 * @opstats add_Eb_Gb
720 * @opgroup og_gen_arith_bin
721 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
722 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
723 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
724 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
725 */
726FNIEMOP_DEF(iemOp_add_Eb_Gb)
727{
728 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
729 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_add_u8, iemAImpl_add_u8_locked);
730}
731
732
733/**
734 * @opcode 0x01
735 * @opgroup og_gen_arith_bin
736 * @opflclass arithmetic
737 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
738 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
739 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
740 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
741 */
742FNIEMOP_DEF(iemOp_add_Ev_Gv)
743{
744 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
745 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
746 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
747}
748
749
750/**
751 * @opcode 0x02
752 * @opgroup og_gen_arith_bin
753 * @opflclass arithmetic
754 * @opcopytests iemOp_add_Eb_Gb
755 */
756FNIEMOP_DEF(iemOp_add_Gb_Eb)
757{
758 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
759 IEMOP_BODY_BINARY_r8_rm(iemAImpl_add_u8);
760}
761
762
763/**
764 * @opcode 0x03
765 * @opgroup og_gen_arith_bin
766 * @opflclass arithmetic
767 * @opcopytests iemOp_add_Ev_Gv
768 */
769FNIEMOP_DEF(iemOp_add_Gv_Ev)
770{
771 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
772 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
773 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1, 0);
774}
775
776
777/**
778 * @opcode 0x04
779 * @opgroup og_gen_arith_bin
780 * @opflclass arithmetic
781 * @opcopytests iemOp_add_Eb_Gb
782 */
783FNIEMOP_DEF(iemOp_add_Al_Ib)
784{
785 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
786 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_add_u8);
787}
788
789
790/**
791 * @opcode 0x05
792 * @opgroup og_gen_arith_bin
793 * @opflclass arithmetic
794 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
795 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
796 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
797 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
798 */
799FNIEMOP_DEF(iemOp_add_eAX_Iz)
800{
801 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
802 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
803}
804
805
806/**
807 * @opcode 0x06
808 * @opgroup og_stack_sreg
809 */
810FNIEMOP_DEF(iemOp_push_ES)
811{
812 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
813 IEMOP_HLP_NO_64BIT();
814 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
815}
816
817
818/**
819 * @opcode 0x07
820 * @opgroup og_stack_sreg
821 */
822FNIEMOP_DEF(iemOp_pop_ES)
823{
824 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
825 IEMOP_HLP_NO_64BIT();
826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
827 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE,
828 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
829 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
830 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
831 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
832 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES),
833 iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
834}
835
836
837/**
838 * @opcode 0x08
839 * @opgroup og_gen_arith_bin
840 * @opflclass logical
841 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
842 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
843 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
844 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
845 */
846FNIEMOP_DEF(iemOp_or_Eb_Gb)
847{
848 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
849 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
850 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_or_u8, iemAImpl_or_u8_locked);
851}
852
853
854/*
855 * @opcode 0x09
856 * @opgroup og_gen_arith_bin
857 * @opflclass logical
858 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
859 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
860 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
861 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
862 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
863 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
864 * @note AF is documented as undefined, but both modern AMD and Intel CPUs clears it.
865 */
866FNIEMOP_DEF(iemOp_or_Ev_Gv)
867{
868 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
869 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
870 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
871 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
872}
873
874
875/**
876 * @opcode 0x0a
877 * @opgroup og_gen_arith_bin
878 * @opflclass logical
879 * @opcopytests iemOp_or_Eb_Gb
880 */
881FNIEMOP_DEF(iemOp_or_Gb_Eb)
882{
883 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
884 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
885 IEMOP_BODY_BINARY_r8_rm(iemAImpl_or_u8);
886}
887
888
889/**
890 * @opcode 0x0b
891 * @opgroup og_gen_arith_bin
892 * @opflclass logical
893 * @opcopytests iemOp_or_Ev_Gv
894 */
895FNIEMOP_DEF(iemOp_or_Gv_Ev)
896{
897 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
898 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
899 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
900 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1, 0);
901}
902
903
904/**
905 * @opcode 0x0c
906 * @opgroup og_gen_arith_bin
907 * @opflclass logical
908 * @opcopytests iemOp_or_Eb_Gb
909 */
910FNIEMOP_DEF(iemOp_or_Al_Ib)
911{
912 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
913 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
914 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_or_u8);
915}
916
917
918/**
919 * @opcode 0x0d
920 * @opgroup og_gen_arith_bin
921 * @opflclass logical
922 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
923 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
924 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
925 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
926 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
927 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
928 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
929 */
930FNIEMOP_DEF(iemOp_or_eAX_Iz)
931{
932 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
933 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
934 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
935}
936
937
938/**
939 * @opcode 0x0e
940 * @opgroup og_stack_sreg
941 */
942FNIEMOP_DEF(iemOp_push_CS)
943{
944 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, 0);
945 IEMOP_HLP_NO_64BIT();
946 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
947}
948
949
950/**
951 * @opcode 0x0f
952 * @opmnemonic EscTwo0f
953 * @openc two0f
954 * @opdisenum OP_2B_ESC
955 * @ophints harmless
956 * @opgroup og_escapes
957 */
958FNIEMOP_DEF(iemOp_2byteEscape)
959{
960#if 0 /// @todo def VBOX_STRICT
961 /* Sanity check the table the first time around. */
962 static bool s_fTested = false;
963 if (RT_LIKELY(s_fTested)) { /* likely */ }
964 else
965 {
966 s_fTested = true;
967 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
968 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
969 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
970 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
971 }
972#endif
973
974 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
975 {
976 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
977 IEMOP_HLP_MIN_286();
978 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
979 }
980 /* @opdone */
981
982 /*
983 * On the 8086 this is a POP CS instruction.
984 * For the time being we don't specify this this.
985 */
986 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, IEMOPHINT_SKIP_PYTHON);
987 IEMOP_HLP_NO_64BIT();
988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
989 /** @todo eliminate END_TB here */
990 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_END_TB,
991 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
992 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_CS),
993 iemCImpl_pop_Sreg, X86_SREG_CS, pVCpu->iem.s.enmEffOpSize);
994}
995
996/**
997 * @opcode 0x10
998 * @opgroup og_gen_arith_bin
999 * @opflclass arithmetic_carry
1000 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1001 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1002 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1003 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1004 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1005 */
1006FNIEMOP_DEF(iemOp_adc_Eb_Gb)
1007{
1008 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1009 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_adc_u8, iemAImpl_adc_u8_locked);
1010}
1011
1012
1013/**
1014 * @opcode 0x11
1015 * @opgroup og_gen_arith_bin
1016 * @opflclass arithmetic_carry
1017 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1018 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1019 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1020 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1021 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1022 */
1023FNIEMOP_DEF(iemOp_adc_Ev_Gv)
1024{
1025 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1026 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
1027 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
1028}
1029
1030
1031/**
1032 * @opcode 0x12
1033 * @opgroup og_gen_arith_bin
1034 * @opflclass arithmetic_carry
1035 * @opcopytests iemOp_adc_Eb_Gb
1036 */
1037FNIEMOP_DEF(iemOp_adc_Gb_Eb)
1038{
1039 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1040 IEMOP_BODY_BINARY_r8_rm(iemAImpl_adc_u8);
1041}
1042
1043
1044/**
1045 * @opcode 0x13
1046 * @opgroup og_gen_arith_bin
1047 * @opflclass arithmetic_carry
1048 * @opcopytests iemOp_adc_Ev_Gv
1049 */
1050FNIEMOP_DEF(iemOp_adc_Gv_Ev)
1051{
1052 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1053 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1054 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1, 0);
1055}
1056
1057
1058/**
1059 * @opcode 0x14
1060 * @opgroup og_gen_arith_bin
1061 * @opflclass arithmetic_carry
1062 * @opcopytests iemOp_adc_Eb_Gb
1063 */
1064FNIEMOP_DEF(iemOp_adc_Al_Ib)
1065{
1066 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1067 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_adc_u8);
1068}
1069
1070
1071/**
1072 * @opcode 0x15
1073 * @opgroup og_gen_arith_bin
1074 * @opflclass arithmetic_carry
1075 * @opcopytests iemOp_adc_Ev_Gv
1076 */
1077FNIEMOP_DEF(iemOp_adc_eAX_Iz)
1078{
1079 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1080 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
1081}
1082
1083
1084/**
1085 * @opcode 0x16
1086 */
1087FNIEMOP_DEF(iemOp_push_SS)
1088{
1089 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1090 IEMOP_HLP_NO_64BIT();
1091 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
1092}
1093
1094
1095/**
1096 * @opcode 0x17
1097 */
1098FNIEMOP_DEF(iemOp_pop_SS)
1099{
1100 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
1101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1102 IEMOP_HLP_NO_64BIT();
1103 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_INHIBIT_SHADOW,
1104 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1105 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_SS)
1106 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_SS)
1107 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_SS)
1108 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_SS),
1109 iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
1110}
1111
1112
1113/**
1114 * @opcode 0x18
1115 * @opgroup og_gen_arith_bin
1116 * @opflclass arithmetic_carry
1117 */
1118FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
1119{
1120 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1121 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_sbb_u8, iemAImpl_sbb_u8_locked);
1122}
1123
1124
1125/**
1126 * @opcode 0x19
1127 * @opgroup og_gen_arith_bin
1128 * @opflclass arithmetic_carry
1129 */
1130FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
1131{
1132 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1133 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
1134 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
1135}
1136
1137
1138/**
1139 * @opcode 0x1a
1140 * @opgroup og_gen_arith_bin
1141 * @opflclass arithmetic_carry
1142 */
1143FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
1144{
1145 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1146 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sbb_u8);
1147}
1148
1149
1150/**
1151 * @opcode 0x1b
1152 * @opgroup og_gen_arith_bin
1153 * @opflclass arithmetic_carry
1154 */
1155FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
1156{
1157 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1158 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1159 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1, 0);
1160}
1161
1162
1163/**
1164 * @opcode 0x1c
1165 * @opgroup og_gen_arith_bin
1166 * @opflclass arithmetic_carry
1167 */
1168FNIEMOP_DEF(iemOp_sbb_Al_Ib)
1169{
1170 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1171 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sbb_u8);
1172}
1173
1174
1175/**
1176 * @opcode 0x1d
1177 * @opgroup og_gen_arith_bin
1178 * @opflclass arithmetic_carry
1179 */
1180FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
1181{
1182 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1183 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
1184}
1185
1186
1187/**
1188 * @opcode 0x1e
1189 * @opgroup og_stack_sreg
1190 */
1191FNIEMOP_DEF(iemOp_push_DS)
1192{
1193 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1194 IEMOP_HLP_NO_64BIT();
1195 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
1196}
1197
1198
1199/**
1200 * @opcode 0x1f
1201 * @opgroup og_stack_sreg
1202 */
1203FNIEMOP_DEF(iemOp_pop_DS)
1204{
1205 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1207 IEMOP_HLP_NO_64BIT();
1208 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE,
1209 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1210 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
1211 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
1212 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
1213 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS),
1214 iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
1215}
1216
1217
1218/**
1219 * @opcode 0x20
1220 * @opgroup og_gen_arith_bin
1221 * @opflclass logical
1222 */
1223FNIEMOP_DEF(iemOp_and_Eb_Gb)
1224{
1225 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1226 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1227 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_and_u8, iemAImpl_and_u8_locked);
1228}
1229
1230
1231/**
1232 * @opcode 0x21
1233 * @opgroup og_gen_arith_bin
1234 * @opflclass logical
1235 */
1236FNIEMOP_DEF(iemOp_and_Ev_Gv)
1237{
1238 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1239 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1240 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
1241 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
1242}
1243
1244
1245/**
1246 * @opcode 0x22
1247 * @opgroup og_gen_arith_bin
1248 * @opflclass logical
1249 */
1250FNIEMOP_DEF(iemOp_and_Gb_Eb)
1251{
1252 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1253 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1254 IEMOP_BODY_BINARY_r8_rm(iemAImpl_and_u8);
1255}
1256
1257
1258/**
1259 * @opcode 0x23
1260 * @opgroup og_gen_arith_bin
1261 * @opflclass logical
1262 */
1263FNIEMOP_DEF(iemOp_and_Gv_Ev)
1264{
1265 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1266 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1267 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1268 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1, 0);
1269}
1270
1271
1272/**
1273 * @opcode 0x24
1274 * @opgroup og_gen_arith_bin
1275 * @opflclass logical
1276 */
1277FNIEMOP_DEF(iemOp_and_Al_Ib)
1278{
1279 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
1280 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1281 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_and_u8);
1282}
1283
1284
1285/**
1286 * @opcode 0x25
1287 * @opgroup og_gen_arith_bin
1288 * @opflclass logical
1289 */
1290FNIEMOP_DEF(iemOp_and_eAX_Iz)
1291{
1292 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1293 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1294 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1295}
1296
1297
1298/**
1299 * @opcode 0x26
1300 * @opmnemonic SEG
1301 * @op1 ES
1302 * @opgroup og_prefix
1303 * @openc prefix
1304 * @opdisenum OP_SEG
1305 * @ophints harmless
1306 */
1307FNIEMOP_DEF(iemOp_seg_ES)
1308{
1309 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
1310 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
1311 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
1312
1313 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1314 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1315}
1316
1317
1318/**
1319 * @opcode 0x27
1320 * @opfltest af,cf
1321 * @opflmodify cf,pf,af,zf,sf,of
1322 * @opflundef of
1323 */
1324FNIEMOP_DEF(iemOp_daa)
1325{
1326 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1327 IEMOP_HLP_NO_64BIT();
1328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1329 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1330 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_daa);
1331}
1332
1333
1334/**
1335 * @opcode 0x28
1336 * @opgroup og_gen_arith_bin
1337 * @opflclass arithmetic
1338 */
1339FNIEMOP_DEF(iemOp_sub_Eb_Gb)
1340{
1341 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1342 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_sub_u8, iemAImpl_sub_u8_locked);
1343}
1344
1345
1346/**
1347 * @opcode 0x29
1348 * @opgroup og_gen_arith_bin
1349 * @opflclass arithmetic
1350 */
1351FNIEMOP_DEF(iemOp_sub_Ev_Gv)
1352{
1353 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1354 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
1355 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
1356}
1357
1358
1359/**
1360 * @opcode 0x2a
1361 * @opgroup og_gen_arith_bin
1362 * @opflclass arithmetic
1363 */
1364FNIEMOP_DEF(iemOp_sub_Gb_Eb)
1365{
1366 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1367 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sub_u8);
1368}
1369
1370
1371/**
1372 * @opcode 0x2b
1373 * @opgroup og_gen_arith_bin
1374 * @opflclass arithmetic
1375 */
1376FNIEMOP_DEF(iemOp_sub_Gv_Ev)
1377{
1378 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1379 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1380 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1, 0);
1381}
1382
1383
1384/**
1385 * @opcode 0x2c
1386 * @opgroup og_gen_arith_bin
1387 * @opflclass arithmetic
1388 */
1389FNIEMOP_DEF(iemOp_sub_Al_Ib)
1390{
1391 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1392 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sub_u8);
1393}
1394
1395
1396/**
1397 * @opcode 0x2d
1398 * @opgroup og_gen_arith_bin
1399 * @opflclass arithmetic
1400 */
1401FNIEMOP_DEF(iemOp_sub_eAX_Iz)
1402{
1403 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1404 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1405}
1406
1407
1408/**
1409 * @opcode 0x2e
1410 * @opmnemonic SEG
1411 * @op1 CS
1412 * @opgroup og_prefix
1413 * @openc prefix
1414 * @opdisenum OP_SEG
1415 * @ophints harmless
1416 */
1417FNIEMOP_DEF(iemOp_seg_CS)
1418{
1419 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
1420 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
1421 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
1422
1423 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1424 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1425}
1426
1427
1428/**
1429 * @opcode 0x2f
1430 * @opfltest af,cf
1431 * @opflmodify cf,pf,af,zf,sf,of
1432 * @opflundef of
1433 */
1434FNIEMOP_DEF(iemOp_das)
1435{
1436 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1437 IEMOP_HLP_NO_64BIT();
1438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1439 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1440 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_das);
1441}
1442
1443
1444/**
1445 * @opcode 0x30
1446 * @opgroup og_gen_arith_bin
1447 * @opflclass logical
1448 */
1449FNIEMOP_DEF(iemOp_xor_Eb_Gb)
1450{
1451 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1452 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1453 IEMOP_BODY_BINARY_rm_r8_RW(iemAImpl_xor_u8, iemAImpl_xor_u8_locked);
1454}
1455
1456
1457/**
1458 * @opcode 0x31
1459 * @opgroup og_gen_arith_bin
1460 * @opflclass logical
1461 */
1462FNIEMOP_DEF(iemOp_xor_Ev_Gv)
1463{
1464 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1465 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1466 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
1467 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
1468}
1469
1470
1471/**
1472 * @opcode 0x32
1473 * @opgroup og_gen_arith_bin
1474 * @opflclass logical
1475 */
1476FNIEMOP_DEF(iemOp_xor_Gb_Eb)
1477{
1478 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1479 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1480 IEMOP_BODY_BINARY_r8_rm(iemAImpl_xor_u8);
1481}
1482
1483
1484/**
1485 * @opcode 0x33
1486 * @opgroup og_gen_arith_bin
1487 * @opflclass logical
1488 */
1489FNIEMOP_DEF(iemOp_xor_Gv_Ev)
1490{
1491 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1492 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1493
1494 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1495
1496 /*
1497 * Deal with special case of 'xor rN, rN' which sets rN to zero and has a known EFLAGS outcome.
1498 */
1499 if ( (bRm >> X86_MODRM_REG_SHIFT) == ((bRm & X86_MODRM_RM_MASK) | (X86_MOD_REG << X86_MODRM_REG_SHIFT))
1500 && pVCpu->iem.s.uRexReg == pVCpu->iem.s.uRexB)
1501 {
1502 switch (pVCpu->iem.s.enmEffOpSize)
1503 {
1504 case IEMMODE_16BIT:
1505 IEM_MC_BEGIN(1, 0, 0, 0);
1506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1507 IEM_MC_STORE_GREG_U16_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1508 IEM_MC_LOCAL(uint32_t, fEFlags);
1509 IEM_MC_FETCH_EFLAGS(fEFlags);
1510 IEM_MC_AND_LOCAL_U32(fEFlags, ~(uint32_t)X86_EFL_STATUS_BITS);
1511 IEM_MC_OR_LOCAL_U32(fEFlags, X86_EFL_PF | X86_EFL_ZF);
1512 IEM_MC_COMMIT_EFLAGS(fEFlags);
1513 IEM_MC_ADVANCE_RIP_AND_FINISH();
1514 IEM_MC_END();
1515 break;
1516
1517 case IEMMODE_32BIT:
1518 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_386, 0);
1519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1520 IEM_MC_STORE_GREG_U32_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1521 IEM_MC_LOCAL(uint32_t, fEFlags);
1522 IEM_MC_FETCH_EFLAGS(fEFlags);
1523 IEM_MC_AND_LOCAL_U32(fEFlags, ~(uint32_t)X86_EFL_STATUS_BITS);
1524 IEM_MC_OR_LOCAL_U32(fEFlags, X86_EFL_PF | X86_EFL_ZF);
1525 IEM_MC_COMMIT_EFLAGS(fEFlags);
1526 IEM_MC_ADVANCE_RIP_AND_FINISH();
1527 IEM_MC_END();
1528 break;
1529
1530 case IEMMODE_64BIT:
1531 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT, 0);
1532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1533 IEM_MC_STORE_GREG_U64_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), 0);
1534 IEM_MC_LOCAL(uint32_t, fEFlags);
1535 IEM_MC_FETCH_EFLAGS(fEFlags);
1536 IEM_MC_AND_LOCAL_U32(fEFlags, ~(uint32_t)X86_EFL_STATUS_BITS);
1537 IEM_MC_OR_LOCAL_U32(fEFlags, X86_EFL_PF | X86_EFL_ZF);
1538 IEM_MC_COMMIT_EFLAGS(fEFlags);
1539 IEM_MC_ADVANCE_RIP_AND_FINISH();
1540 IEM_MC_END();
1541 break;
1542
1543 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1544 }
1545 }
1546 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1, 0);
1547}
1548
1549
1550/**
1551 * @opcode 0x34
1552 * @opgroup og_gen_arith_bin
1553 * @opflclass logical
1554 */
1555FNIEMOP_DEF(iemOp_xor_Al_Ib)
1556{
1557 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1558 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1559 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_xor_u8);
1560}
1561
1562
1563/**
1564 * @opcode 0x35
1565 * @opgroup og_gen_arith_bin
1566 * @opflclass logical
1567 */
1568FNIEMOP_DEF(iemOp_xor_eAX_Iz)
1569{
1570 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1571 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1572 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1573}
1574
1575
1576/**
1577 * @opcode 0x36
1578 * @opmnemonic SEG
1579 * @op1 SS
1580 * @opgroup og_prefix
1581 * @openc prefix
1582 * @opdisenum OP_SEG
1583 * @ophints harmless
1584 */
1585FNIEMOP_DEF(iemOp_seg_SS)
1586{
1587 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
1588 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
1589 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
1590
1591 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1592 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1593}
1594
1595
1596/**
1597 * @opcode 0x37
1598 * @opfltest af
1599 * @opflmodify cf,pf,af,zf,sf,of
1600 * @opflundef pf,zf,sf,of
1601 * @opgroup og_gen_arith_dec
1602 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
1603 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
1604 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1605 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1606 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1607 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
1608 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
1609 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
1610 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
1611 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
1612 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
1613 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
1614 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
1615 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
1616 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
1617 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1618 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1619 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1620 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1621 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
1622 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
1623 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
1624 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
1625 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
1626 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
1627 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1628 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
1629 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
1630 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
1631 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
1632 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
1633 */
1634FNIEMOP_DEF(iemOp_aaa)
1635{
1636 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1637 IEMOP_HLP_NO_64BIT();
1638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1639 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1640
1641 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aaa);
1642}
1643
1644
1645/**
1646 * @opcode 0x38
1647 * @opflclass arithmetic
1648 */
1649FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
1650{
1651 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
1652 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_cmp_u8);
1653}
1654
1655
1656/**
1657 * @opcode 0x39
1658 * @opflclass arithmetic
1659 */
1660FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
1661{
1662 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
1663 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
1664}
1665
1666
1667/**
1668 * @opcode 0x3a
1669 * @opflclass arithmetic
1670 */
1671FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
1672{
1673 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
1674 IEMOP_BODY_BINARY_r8_rm(iemAImpl_cmp_u8);
1675}
1676
1677
1678/**
1679 * @opcode 0x3b
1680 * @opflclass arithmetic
1681 */
1682FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
1683{
1684 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
1685 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1686 IEMOP_BODY_BINARY_rv_rm(bRm, iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0, 0);
1687}
1688
1689
1690/**
1691 * @opcode 0x3c
1692 * @opflclass arithmetic
1693 */
1694FNIEMOP_DEF(iemOp_cmp_Al_Ib)
1695{
1696 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
1697 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_cmp_u8);
1698}
1699
1700
1701/**
1702 * @opcode 0x3d
1703 * @opflclass arithmetic
1704 */
1705FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1706{
1707 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1708 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1709}
1710
1711
1712/**
1713 * @opcode 0x3e
1714 */
1715FNIEMOP_DEF(iemOp_seg_DS)
1716{
1717 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1718 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1719 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1720
1721 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1722 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1723}
1724
1725
1726/**
1727 * @opcode 0x3f
1728 * @opfltest af
1729 * @opflmodify cf,pf,af,zf,sf,of
1730 * @opflundef pf,zf,sf,of
1731 * @opgroup og_gen_arith_dec
1732 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1733 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1734 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1735 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1736 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1737 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1738 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1739 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1740 * @optest amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1741 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1742 * @optest amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1743 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1744 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1745 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1746 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1747 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1748 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1749 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1750 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1751 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1752 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
1753 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1754 * @optest amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
1755 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1756 * @optest amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
1757 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1758 * @optest amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
1759 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1760 * @optest amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
1761 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1762 * @optest amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
1763 * @optest intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1764 * @optest amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1765 * @optest intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1766 * @optest amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1767 */
1768FNIEMOP_DEF(iemOp_aas)
1769{
1770 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1771 IEMOP_HLP_NO_64BIT();
1772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1773 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1774
1775 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aas);
1776}
1777
1778
1779/**
1780 * Common 'inc/dec register' helper.
1781 *
1782 * Not for 64-bit code, only for what became the rex prefixes.
1783 */
1784#define IEMOP_BODY_UNARY_GReg(a_fnNormalU16, a_fnNormalU32, a_iReg) \
1785 switch (pVCpu->iem.s.enmEffOpSize) \
1786 { \
1787 case IEMMODE_16BIT: \
1788 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_64BIT, 0); \
1789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1790 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
1791 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1792 IEM_MC_REF_GREG_U16(pu16Dst, a_iReg); \
1793 IEM_MC_REF_EFLAGS(pEFlags); \
1794 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
1795 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1796 IEM_MC_END(); \
1797 break; \
1798 \
1799 case IEMMODE_32BIT: \
1800 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0); \
1801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1802 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
1803 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1804 IEM_MC_REF_GREG_U32(pu32Dst, a_iReg); \
1805 IEM_MC_REF_EFLAGS(pEFlags); \
1806 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
1807 IEM_MC_CLEAR_HIGH_GREG_U64(a_iReg); \
1808 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1809 IEM_MC_END(); \
1810 break; \
1811 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1812 } \
1813 (void)0
1814
1815/**
1816 * @opcode 0x40
1817 * @opflclass incdec
1818 */
1819FNIEMOP_DEF(iemOp_inc_eAX)
1820{
1821 /*
1822 * This is a REX prefix in 64-bit mode.
1823 */
1824 if (IEM_IS_64BIT_CODE(pVCpu))
1825 {
1826 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1827 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1828
1829 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1830 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1831 }
1832
1833 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1834 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xAX);
1835}
1836
1837
1838/**
1839 * @opcode 0x41
1840 * @opflclass incdec
1841 */
1842FNIEMOP_DEF(iemOp_inc_eCX)
1843{
1844 /*
1845 * This is a REX prefix in 64-bit mode.
1846 */
1847 if (IEM_IS_64BIT_CODE(pVCpu))
1848 {
1849 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1850 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1851 pVCpu->iem.s.uRexB = 1 << 3;
1852
1853 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1854 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1855 }
1856
1857 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1858 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xCX);
1859}
1860
1861
1862/**
1863 * @opcode 0x42
1864 * @opflclass incdec
1865 */
1866FNIEMOP_DEF(iemOp_inc_eDX)
1867{
1868 /*
1869 * This is a REX prefix in 64-bit mode.
1870 */
1871 if (IEM_IS_64BIT_CODE(pVCpu))
1872 {
1873 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1874 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1875 pVCpu->iem.s.uRexIndex = 1 << 3;
1876
1877 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1878 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1879 }
1880
1881 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1882 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDX);
1883}
1884
1885
1886
1887/**
1888 * @opcode 0x43
1889 * @opflclass incdec
1890 */
1891FNIEMOP_DEF(iemOp_inc_eBX)
1892{
1893 /*
1894 * This is a REX prefix in 64-bit mode.
1895 */
1896 if (IEM_IS_64BIT_CODE(pVCpu))
1897 {
1898 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1899 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1900 pVCpu->iem.s.uRexB = 1 << 3;
1901 pVCpu->iem.s.uRexIndex = 1 << 3;
1902
1903 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1904 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1905 }
1906
1907 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1908 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBX);
1909}
1910
1911
1912/**
1913 * @opcode 0x44
1914 * @opflclass incdec
1915 */
1916FNIEMOP_DEF(iemOp_inc_eSP)
1917{
1918 /*
1919 * This is a REX prefix in 64-bit mode.
1920 */
1921 if (IEM_IS_64BIT_CODE(pVCpu))
1922 {
1923 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1924 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1925 pVCpu->iem.s.uRexReg = 1 << 3;
1926
1927 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1928 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1929 }
1930
1931 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1932 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSP);
1933}
1934
1935
1936/**
1937 * @opcode 0x45
1938 * @opflclass incdec
1939 */
1940FNIEMOP_DEF(iemOp_inc_eBP)
1941{
1942 /*
1943 * This is a REX prefix in 64-bit mode.
1944 */
1945 if (IEM_IS_64BIT_CODE(pVCpu))
1946 {
1947 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1948 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1949 pVCpu->iem.s.uRexReg = 1 << 3;
1950 pVCpu->iem.s.uRexB = 1 << 3;
1951
1952 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1953 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1954 }
1955
1956 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1957 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBP);
1958}
1959
1960
1961/**
1962 * @opcode 0x46
1963 * @opflclass incdec
1964 */
1965FNIEMOP_DEF(iemOp_inc_eSI)
1966{
1967 /*
1968 * This is a REX prefix in 64-bit mode.
1969 */
1970 if (IEM_IS_64BIT_CODE(pVCpu))
1971 {
1972 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1973 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1974 pVCpu->iem.s.uRexReg = 1 << 3;
1975 pVCpu->iem.s.uRexIndex = 1 << 3;
1976
1977 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1978 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1979 }
1980
1981 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1982 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSI);
1983}
1984
1985
1986/**
1987 * @opcode 0x47
1988 * @opflclass incdec
1989 */
1990FNIEMOP_DEF(iemOp_inc_eDI)
1991{
1992 /*
1993 * This is a REX prefix in 64-bit mode.
1994 */
1995 if (IEM_IS_64BIT_CODE(pVCpu))
1996 {
1997 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1998 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1999 pVCpu->iem.s.uRexReg = 1 << 3;
2000 pVCpu->iem.s.uRexB = 1 << 3;
2001 pVCpu->iem.s.uRexIndex = 1 << 3;
2002
2003 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2004 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2005 }
2006
2007 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
2008 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDI);
2009}
2010
2011
2012/**
2013 * @opcode 0x48
2014 * @opflclass incdec
2015 */
2016FNIEMOP_DEF(iemOp_dec_eAX)
2017{
2018 /*
2019 * This is a REX prefix in 64-bit mode.
2020 */
2021 if (IEM_IS_64BIT_CODE(pVCpu))
2022 {
2023 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
2024 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
2025 iemRecalEffOpSize(pVCpu);
2026
2027 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2028 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2029 }
2030
2031 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
2032 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xAX);
2033}
2034
2035
2036/**
2037 * @opcode 0x49
2038 * @opflclass incdec
2039 */
2040FNIEMOP_DEF(iemOp_dec_eCX)
2041{
2042 /*
2043 * This is a REX prefix in 64-bit mode.
2044 */
2045 if (IEM_IS_64BIT_CODE(pVCpu))
2046 {
2047 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
2048 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2049 pVCpu->iem.s.uRexB = 1 << 3;
2050 iemRecalEffOpSize(pVCpu);
2051
2052 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2053 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2054 }
2055
2056 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
2057 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xCX);
2058}
2059
2060
2061/**
2062 * @opcode 0x4a
2063 * @opflclass incdec
2064 */
2065FNIEMOP_DEF(iemOp_dec_eDX)
2066{
2067 /*
2068 * This is a REX prefix in 64-bit mode.
2069 */
2070 if (IEM_IS_64BIT_CODE(pVCpu))
2071 {
2072 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
2073 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2074 pVCpu->iem.s.uRexIndex = 1 << 3;
2075 iemRecalEffOpSize(pVCpu);
2076
2077 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2078 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2079 }
2080
2081 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
2082 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDX);
2083}
2084
2085
2086/**
2087 * @opcode 0x4b
2088 * @opflclass incdec
2089 */
2090FNIEMOP_DEF(iemOp_dec_eBX)
2091{
2092 /*
2093 * This is a REX prefix in 64-bit mode.
2094 */
2095 if (IEM_IS_64BIT_CODE(pVCpu))
2096 {
2097 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
2098 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2099 pVCpu->iem.s.uRexB = 1 << 3;
2100 pVCpu->iem.s.uRexIndex = 1 << 3;
2101 iemRecalEffOpSize(pVCpu);
2102
2103 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2104 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2105 }
2106
2107 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
2108 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBX);
2109}
2110
2111
2112/**
2113 * @opcode 0x4c
2114 * @opflclass incdec
2115 */
2116FNIEMOP_DEF(iemOp_dec_eSP)
2117{
2118 /*
2119 * This is a REX prefix in 64-bit mode.
2120 */
2121 if (IEM_IS_64BIT_CODE(pVCpu))
2122 {
2123 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
2124 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
2125 pVCpu->iem.s.uRexReg = 1 << 3;
2126 iemRecalEffOpSize(pVCpu);
2127
2128 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2129 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2130 }
2131
2132 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
2133 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSP);
2134}
2135
2136
2137/**
2138 * @opcode 0x4d
2139 * @opflclass incdec
2140 */
2141FNIEMOP_DEF(iemOp_dec_eBP)
2142{
2143 /*
2144 * This is a REX prefix in 64-bit mode.
2145 */
2146 if (IEM_IS_64BIT_CODE(pVCpu))
2147 {
2148 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
2149 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2150 pVCpu->iem.s.uRexReg = 1 << 3;
2151 pVCpu->iem.s.uRexB = 1 << 3;
2152 iemRecalEffOpSize(pVCpu);
2153
2154 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2155 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2156 }
2157
2158 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
2159 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBP);
2160}
2161
2162
2163/**
2164 * @opcode 0x4e
2165 * @opflclass incdec
2166 */
2167FNIEMOP_DEF(iemOp_dec_eSI)
2168{
2169 /*
2170 * This is a REX prefix in 64-bit mode.
2171 */
2172 if (IEM_IS_64BIT_CODE(pVCpu))
2173 {
2174 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
2175 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2176 pVCpu->iem.s.uRexReg = 1 << 3;
2177 pVCpu->iem.s.uRexIndex = 1 << 3;
2178 iemRecalEffOpSize(pVCpu);
2179
2180 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2181 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2182 }
2183
2184 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
2185 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSI);
2186}
2187
2188
2189/**
2190 * @opcode 0x4f
2191 * @opflclass incdec
2192 */
2193FNIEMOP_DEF(iemOp_dec_eDI)
2194{
2195 /*
2196 * This is a REX prefix in 64-bit mode.
2197 */
2198 if (IEM_IS_64BIT_CODE(pVCpu))
2199 {
2200 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
2201 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2202 pVCpu->iem.s.uRexReg = 1 << 3;
2203 pVCpu->iem.s.uRexB = 1 << 3;
2204 pVCpu->iem.s.uRexIndex = 1 << 3;
2205 iemRecalEffOpSize(pVCpu);
2206
2207 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2208 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2209 }
2210
2211 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
2212 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDI);
2213}
2214
2215
2216/**
2217 * Common 'push register' helper.
2218 */
2219FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
2220{
2221 if (IEM_IS_64BIT_CODE(pVCpu))
2222 {
2223 iReg |= pVCpu->iem.s.uRexB;
2224 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2225 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2226 }
2227
2228 switch (pVCpu->iem.s.enmEffOpSize)
2229 {
2230 case IEMMODE_16BIT:
2231 IEM_MC_BEGIN(0, 1, 0, 0);
2232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2233 IEM_MC_LOCAL(uint16_t, u16Value);
2234 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
2235 IEM_MC_PUSH_U16(u16Value);
2236 IEM_MC_ADVANCE_RIP_AND_FINISH();
2237 IEM_MC_END();
2238 break;
2239
2240 case IEMMODE_32BIT:
2241 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2243 IEM_MC_LOCAL(uint32_t, u32Value);
2244 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
2245 IEM_MC_PUSH_U32(u32Value);
2246 IEM_MC_ADVANCE_RIP_AND_FINISH();
2247 IEM_MC_END();
2248 break;
2249
2250 case IEMMODE_64BIT:
2251 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2253 IEM_MC_LOCAL(uint64_t, u64Value);
2254 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
2255 IEM_MC_PUSH_U64(u64Value);
2256 IEM_MC_ADVANCE_RIP_AND_FINISH();
2257 IEM_MC_END();
2258 break;
2259
2260 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2261 }
2262}
2263
2264
2265/**
2266 * @opcode 0x50
2267 */
2268FNIEMOP_DEF(iemOp_push_eAX)
2269{
2270 IEMOP_MNEMONIC(push_rAX, "push rAX");
2271 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
2272}
2273
2274
2275/**
2276 * @opcode 0x51
2277 */
2278FNIEMOP_DEF(iemOp_push_eCX)
2279{
2280 IEMOP_MNEMONIC(push_rCX, "push rCX");
2281 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
2282}
2283
2284
2285/**
2286 * @opcode 0x52
2287 */
2288FNIEMOP_DEF(iemOp_push_eDX)
2289{
2290 IEMOP_MNEMONIC(push_rDX, "push rDX");
2291 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
2292}
2293
2294
2295/**
2296 * @opcode 0x53
2297 */
2298FNIEMOP_DEF(iemOp_push_eBX)
2299{
2300 IEMOP_MNEMONIC(push_rBX, "push rBX");
2301 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
2302}
2303
2304
2305/**
2306 * @opcode 0x54
2307 */
2308FNIEMOP_DEF(iemOp_push_eSP)
2309{
2310 IEMOP_MNEMONIC(push_rSP, "push rSP");
2311 if (IEM_GET_TARGET_CPU(pVCpu) != IEMTARGETCPU_8086)
2312 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
2313
2314 /* 8086 works differently wrt to 'push sp' compared to 80186 and later. */
2315 IEM_MC_BEGIN(0, 1, IEM_MC_F_ONLY_8086, 0);
2316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2317 IEM_MC_LOCAL(uint16_t, u16Value);
2318 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
2319 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
2320 IEM_MC_PUSH_U16(u16Value);
2321 IEM_MC_ADVANCE_RIP_AND_FINISH();
2322 IEM_MC_END();
2323}
2324
2325
2326/**
2327 * @opcode 0x55
2328 */
2329FNIEMOP_DEF(iemOp_push_eBP)
2330{
2331 IEMOP_MNEMONIC(push_rBP, "push rBP");
2332 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
2333}
2334
2335
2336/**
2337 * @opcode 0x56
2338 */
2339FNIEMOP_DEF(iemOp_push_eSI)
2340{
2341 IEMOP_MNEMONIC(push_rSI, "push rSI");
2342 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
2343}
2344
2345
2346/**
2347 * @opcode 0x57
2348 */
2349FNIEMOP_DEF(iemOp_push_eDI)
2350{
2351 IEMOP_MNEMONIC(push_rDI, "push rDI");
2352 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
2353}
2354
2355
2356/**
2357 * Common 'pop register' helper.
2358 */
2359FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
2360{
2361 if (IEM_IS_64BIT_CODE(pVCpu))
2362 {
2363 iReg |= pVCpu->iem.s.uRexB;
2364 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2365 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2366 }
2367
2368 switch (pVCpu->iem.s.enmEffOpSize)
2369 {
2370 case IEMMODE_16BIT:
2371 IEM_MC_BEGIN(0, 0, 0, 0);
2372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2373 IEM_MC_POP_GREG_U16(iReg);
2374 IEM_MC_ADVANCE_RIP_AND_FINISH();
2375 IEM_MC_END();
2376 break;
2377
2378 case IEMMODE_32BIT:
2379 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2381 IEM_MC_POP_GREG_U32(iReg);
2382 IEM_MC_ADVANCE_RIP_AND_FINISH();
2383 IEM_MC_END();
2384 break;
2385
2386 case IEMMODE_64BIT:
2387 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
2388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2389 IEM_MC_POP_GREG_U64(iReg);
2390 IEM_MC_ADVANCE_RIP_AND_FINISH();
2391 IEM_MC_END();
2392 break;
2393
2394 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2395 }
2396}
2397
2398
2399/**
2400 * @opcode 0x58
2401 */
2402FNIEMOP_DEF(iemOp_pop_eAX)
2403{
2404 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
2405 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
2406}
2407
2408
2409/**
2410 * @opcode 0x59
2411 */
2412FNIEMOP_DEF(iemOp_pop_eCX)
2413{
2414 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
2415 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
2416}
2417
2418
2419/**
2420 * @opcode 0x5a
2421 */
2422FNIEMOP_DEF(iemOp_pop_eDX)
2423{
2424 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
2425 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
2426}
2427
2428
2429/**
2430 * @opcode 0x5b
2431 */
2432FNIEMOP_DEF(iemOp_pop_eBX)
2433{
2434 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
2435 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
2436}
2437
2438
2439/**
2440 * @opcode 0x5c
2441 */
2442FNIEMOP_DEF(iemOp_pop_eSP)
2443{
2444 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
2445 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
2446}
2447
2448
2449/**
2450 * @opcode 0x5d
2451 */
2452FNIEMOP_DEF(iemOp_pop_eBP)
2453{
2454 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
2455 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
2456}
2457
2458
2459/**
2460 * @opcode 0x5e
2461 */
2462FNIEMOP_DEF(iemOp_pop_eSI)
2463{
2464 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
2465 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
2466}
2467
2468
2469/**
2470 * @opcode 0x5f
2471 */
2472FNIEMOP_DEF(iemOp_pop_eDI)
2473{
2474 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
2475 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
2476}
2477
2478
2479/**
2480 * @opcode 0x60
2481 */
2482FNIEMOP_DEF(iemOp_pusha)
2483{
2484 IEMOP_MNEMONIC(pusha, "pusha");
2485 IEMOP_HLP_MIN_186();
2486 IEMOP_HLP_NO_64BIT();
2487 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2488 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pusha_16);
2489 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2490 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pusha_32);
2491}
2492
2493
2494/**
2495 * @opcode 0x61
2496 */
2497FNIEMOP_DEF(iemOp_popa__mvex)
2498{
2499 if (!IEM_IS_64BIT_CODE(pVCpu))
2500 {
2501 IEMOP_MNEMONIC(popa, "popa");
2502 IEMOP_HLP_MIN_186();
2503 IEMOP_HLP_NO_64BIT();
2504 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2505 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
2506 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
2507 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
2508 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
2509 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX)
2510 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
2511 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP)
2512 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
2513 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
2514 iemCImpl_popa_16);
2515 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2516 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
2517 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
2518 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
2519 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
2520 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX)
2521 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
2522 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP)
2523 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
2524 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
2525 iemCImpl_popa_32);
2526 }
2527 IEMOP_MNEMONIC(mvex, "mvex");
2528 Log(("mvex prefix is not supported!\n"));
2529 IEMOP_RAISE_INVALID_OPCODE_RET();
2530}
2531
2532
2533/**
2534 * @opcode 0x62
2535 * @opmnemonic bound
2536 * @op1 Gv_RO
2537 * @op2 Ma
2538 * @opmincpu 80186
2539 * @ophints harmless x86_invalid_64
2540 * @optest op1=0 op2=0 ->
2541 * @optest op1=1 op2=0 -> value.xcpt=5
2542 * @optest o16 / op1=0xffff op2=0x0000fffe ->
2543 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
2544 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
2545 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
2546 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
2547 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
2548 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
2549 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
2550 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
2551 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
2552 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
2553 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
2554 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
2555 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
2556 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
2557 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
2558 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
2559 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
2560 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
2561 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
2562 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
2563 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
2564 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
2565 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
2566 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
2567 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
2568 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
2569 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
2570 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
2571 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
2572 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
2573 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
2574 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
2575 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
2576 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
2577 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
2578 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
2579 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
2580 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
2581 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
2582 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
2583 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
2584 */
2585FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
2586{
2587 /* The BOUND instruction is invalid 64-bit mode. In legacy and
2588 compatability mode it is invalid with MOD=3.
2589
2590 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
2591 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
2592 given as R and X without an exact description, so we assume it builds on
2593 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
2594 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
2595 uint8_t bRm;
2596 if (!IEM_IS_64BIT_CODE(pVCpu))
2597 {
2598 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2599 IEMOP_HLP_MIN_186();
2600 IEM_OPCODE_GET_NEXT_U8(&bRm);
2601 if (IEM_IS_MODRM_MEM_MODE(bRm))
2602 {
2603 /** @todo testcase: check that there are two memory accesses involved. Check
2604 * whether they're both read before the \#BR triggers. */
2605 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2606 {
2607 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186 | IEM_MC_F_NOT_64BIT, 0);
2608 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2609 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
2610 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
2611 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2612
2613 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2615
2616 IEM_MC_FETCH_GREG_U16(u16Index, IEM_GET_MODRM_REG_8(bRm));
2617 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2618 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
2619
2620 IEM_MC_CALL_CIMPL_3(0, 0, iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
2621 IEM_MC_END();
2622 }
2623 else /* 32-bit operands */
2624 {
2625 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2626 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2627 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
2628 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
2629 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2630
2631 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2633
2634 IEM_MC_FETCH_GREG_U32(u32Index, IEM_GET_MODRM_REG_8(bRm));
2635 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2636 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
2637
2638 IEM_MC_CALL_CIMPL_3(0, 0, iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
2639 IEM_MC_END();
2640 }
2641 }
2642
2643 /*
2644 * @opdone
2645 */
2646 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2647 {
2648 /* Note that there is no need for the CPU to fetch further bytes
2649 here because MODRM.MOD == 3. */
2650 Log(("evex not supported by the guest CPU!\n"));
2651 IEMOP_RAISE_INVALID_OPCODE_RET();
2652 }
2653 }
2654 else
2655 {
2656 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
2657 * does modr/m read, whereas AMD probably doesn't... */
2658 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2659 {
2660 Log(("evex not supported by the guest CPU!\n"));
2661 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
2662 }
2663 IEM_OPCODE_GET_NEXT_U8(&bRm);
2664 }
2665
2666 IEMOP_MNEMONIC(evex, "evex");
2667 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
2668 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
2669 Log(("evex prefix is not implemented!\n"));
2670 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2671}
2672
2673
2674/**
2675 * @opcode 0x63
2676 * @opflmodify zf
2677 * @note non-64-bit modes.
2678 */
2679FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
2680{
2681 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
2682 IEMOP_HLP_MIN_286();
2683 IEMOP_HLP_NO_REAL_OR_V86_MODE();
2684 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2685
2686 if (IEM_IS_MODRM_REG_MODE(bRm))
2687 {
2688 /* Register */
2689 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2690 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2691 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2692 IEM_MC_ARG(uint16_t, u16Src, 1);
2693 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2694
2695 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2696 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM_8(bRm));
2697 IEM_MC_REF_EFLAGS(pEFlags);
2698 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2699
2700 IEM_MC_ADVANCE_RIP_AND_FINISH();
2701 IEM_MC_END();
2702 }
2703 else
2704 {
2705 /* Memory */
2706 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2707 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2708 IEM_MC_ARG(uint16_t, u16Src, 1);
2709 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2710 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2711 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
2712
2713 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2714 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2715 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2716 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2717 IEM_MC_FETCH_EFLAGS(EFlags);
2718 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2719
2720 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
2721 IEM_MC_COMMIT_EFLAGS(EFlags);
2722 IEM_MC_ADVANCE_RIP_AND_FINISH();
2723 IEM_MC_END();
2724 }
2725}
2726
2727
2728/**
2729 * @opcode 0x63
2730 *
2731 * @note This is a weird one. It works like a regular move instruction if
2732 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2733 * @todo This definitely needs a testcase to verify the odd cases. */
2734FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2735{
2736 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2737
2738 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2739 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2740
2741 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2742 {
2743 if (IEM_IS_MODRM_REG_MODE(bRm))
2744 {
2745 /*
2746 * Register to register.
2747 */
2748 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2750 IEM_MC_LOCAL(uint64_t, u64Value);
2751 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
2752 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2753 IEM_MC_ADVANCE_RIP_AND_FINISH();
2754 IEM_MC_END();
2755 }
2756 else
2757 {
2758 /*
2759 * We're loading a register from memory.
2760 */
2761 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
2762 IEM_MC_LOCAL(uint64_t, u64Value);
2763 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2764 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2766 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2767 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2768 IEM_MC_ADVANCE_RIP_AND_FINISH();
2769 IEM_MC_END();
2770 }
2771 }
2772 else
2773 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
2774}
2775
2776
2777/**
2778 * @opcode 0x64
2779 * @opmnemonic segfs
2780 * @opmincpu 80386
2781 * @opgroup og_prefixes
2782 */
2783FNIEMOP_DEF(iemOp_seg_FS)
2784{
2785 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2786 IEMOP_HLP_MIN_386();
2787
2788 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2789 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2790
2791 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2792 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2793}
2794
2795
2796/**
2797 * @opcode 0x65
2798 * @opmnemonic seggs
2799 * @opmincpu 80386
2800 * @opgroup og_prefixes
2801 */
2802FNIEMOP_DEF(iemOp_seg_GS)
2803{
2804 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2805 IEMOP_HLP_MIN_386();
2806
2807 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2808 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2809
2810 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2811 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2812}
2813
2814
2815/**
2816 * @opcode 0x66
2817 * @opmnemonic opsize
2818 * @openc prefix
2819 * @opmincpu 80386
2820 * @ophints harmless
2821 * @opgroup og_prefixes
2822 */
2823FNIEMOP_DEF(iemOp_op_size)
2824{
2825 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2826 IEMOP_HLP_MIN_386();
2827
2828 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2829 iemRecalEffOpSize(pVCpu);
2830
2831 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2832 when REPZ or REPNZ are present. */
2833 if (pVCpu->iem.s.idxPrefix == 0)
2834 pVCpu->iem.s.idxPrefix = 1;
2835
2836 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2837 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2838}
2839
2840
2841/**
2842 * @opcode 0x67
2843 * @opmnemonic addrsize
2844 * @openc prefix
2845 * @opmincpu 80386
2846 * @ophints harmless
2847 * @opgroup og_prefixes
2848 */
2849FNIEMOP_DEF(iemOp_addr_size)
2850{
2851 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2852 IEMOP_HLP_MIN_386();
2853
2854 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2855 switch (pVCpu->iem.s.enmDefAddrMode)
2856 {
2857 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2858 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2859 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2860 default: AssertFailed();
2861 }
2862
2863 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2864 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2865}
2866
2867
2868/**
2869 * @opcode 0x68
2870 */
2871FNIEMOP_DEF(iemOp_push_Iz)
2872{
2873 IEMOP_MNEMONIC(push_Iz, "push Iz");
2874 IEMOP_HLP_MIN_186();
2875 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2876 switch (pVCpu->iem.s.enmEffOpSize)
2877 {
2878 case IEMMODE_16BIT:
2879 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_186, 0);
2880 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2882 IEM_MC_LOCAL_CONST(uint16_t, u16Value, u16Imm);
2883 IEM_MC_PUSH_U16(u16Value);
2884 IEM_MC_ADVANCE_RIP_AND_FINISH();
2885 IEM_MC_END();
2886 break;
2887
2888 case IEMMODE_32BIT:
2889 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2890 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2891 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2892 IEM_MC_LOCAL_CONST(uint32_t, u32Value, u32Imm);
2893 IEM_MC_PUSH_U32(u32Value);
2894 IEM_MC_ADVANCE_RIP_AND_FINISH();
2895 IEM_MC_END();
2896 break;
2897
2898 case IEMMODE_64BIT:
2899 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2900 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2902 IEM_MC_LOCAL_CONST(uint64_t, u64Value, u64Imm);
2903 IEM_MC_PUSH_U64(u64Value);
2904 IEM_MC_ADVANCE_RIP_AND_FINISH();
2905 IEM_MC_END();
2906 break;
2907
2908 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2909 }
2910}
2911
2912
2913/**
2914 * @opcode 0x69
2915 * @opflclass multiply
2916 */
2917FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
2918{
2919 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
2920 IEMOP_HLP_MIN_186();
2921 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2922 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2923
2924 switch (pVCpu->iem.s.enmEffOpSize)
2925 {
2926 case IEMMODE_16BIT:
2927 {
2928 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
2929 if (IEM_IS_MODRM_REG_MODE(bRm))
2930 {
2931 /* register operand */
2932 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2933 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186, 0);
2934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2935 IEM_MC_LOCAL(uint16_t, u16Tmp);
2936 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2937 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
2938 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
2939 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2940 IEM_MC_REF_EFLAGS(pEFlags);
2941 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2942 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2943
2944 IEM_MC_ADVANCE_RIP_AND_FINISH();
2945 IEM_MC_END();
2946 }
2947 else
2948 {
2949 /* memory operand */
2950 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186, 0);
2951 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2952 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2953
2954 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2956
2957 IEM_MC_LOCAL(uint16_t, u16Tmp);
2958 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2959
2960 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
2961 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
2962 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2963 IEM_MC_REF_EFLAGS(pEFlags);
2964 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2965 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2966
2967 IEM_MC_ADVANCE_RIP_AND_FINISH();
2968 IEM_MC_END();
2969 }
2970 break;
2971 }
2972
2973 case IEMMODE_32BIT:
2974 {
2975 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
2976 if (IEM_IS_MODRM_REG_MODE(bRm))
2977 {
2978 /* register operand */
2979 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2980 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
2981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2982 IEM_MC_LOCAL(uint32_t, u32Tmp);
2983 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2984
2985 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
2986 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
2987 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2988 IEM_MC_REF_EFLAGS(pEFlags);
2989 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
2990 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
2991
2992 IEM_MC_ADVANCE_RIP_AND_FINISH();
2993 IEM_MC_END();
2994 }
2995 else
2996 {
2997 /* memory operand */
2998 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0);
2999 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3000 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3001
3002 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3004
3005 IEM_MC_LOCAL(uint32_t, u32Tmp);
3006 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3007
3008 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3009 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
3010 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3011 IEM_MC_REF_EFLAGS(pEFlags);
3012 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3013 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3014
3015 IEM_MC_ADVANCE_RIP_AND_FINISH();
3016 IEM_MC_END();
3017 }
3018 break;
3019 }
3020
3021 case IEMMODE_64BIT:
3022 {
3023 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3024 if (IEM_IS_MODRM_REG_MODE(bRm))
3025 {
3026 /* register operand */
3027 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3028 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
3029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3030 IEM_MC_LOCAL(uint64_t, u64Tmp);
3031 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3032
3033 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3034 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
3035 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3036 IEM_MC_REF_EFLAGS(pEFlags);
3037 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3038 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3039
3040 IEM_MC_ADVANCE_RIP_AND_FINISH();
3041 IEM_MC_END();
3042 }
3043 else
3044 {
3045 /* memory operand */
3046 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3047 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3048 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3049
3050 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); /* Not using IEM_OPCODE_GET_NEXT_S32_SX_U64 to reduce the */
3051 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /* parameter count for the threaded function for this block. */
3052
3053 IEM_MC_LOCAL(uint64_t, u64Tmp);
3054 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3055
3056 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3057 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int32_t)u32Imm, 1);
3058 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3059 IEM_MC_REF_EFLAGS(pEFlags);
3060 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3061 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3062
3063 IEM_MC_ADVANCE_RIP_AND_FINISH();
3064 IEM_MC_END();
3065 }
3066 break;
3067 }
3068
3069 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3070 }
3071}
3072
3073
3074/**
3075 * @opcode 0x6a
3076 */
3077FNIEMOP_DEF(iemOp_push_Ib)
3078{
3079 IEMOP_MNEMONIC(push_Ib, "push Ib");
3080 IEMOP_HLP_MIN_186();
3081 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3082 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3083
3084 switch (pVCpu->iem.s.enmEffOpSize)
3085 {
3086 case IEMMODE_16BIT:
3087 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_186, 0);
3088 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3089 IEM_MC_LOCAL_CONST(uint16_t, uValue, (int16_t)i8Imm);
3090 IEM_MC_PUSH_U16(uValue);
3091 IEM_MC_ADVANCE_RIP_AND_FINISH();
3092 IEM_MC_END();
3093 break;
3094 case IEMMODE_32BIT:
3095 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
3096 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3097 IEM_MC_LOCAL_CONST(uint32_t, uValue, (int32_t)i8Imm);
3098 IEM_MC_PUSH_U32(uValue);
3099 IEM_MC_ADVANCE_RIP_AND_FINISH();
3100 IEM_MC_END();
3101 break;
3102 case IEMMODE_64BIT:
3103 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
3104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3105 IEM_MC_LOCAL_CONST(uint64_t, uValue, (int64_t)i8Imm);
3106 IEM_MC_PUSH_U64(uValue);
3107 IEM_MC_ADVANCE_RIP_AND_FINISH();
3108 IEM_MC_END();
3109 break;
3110 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3111 }
3112}
3113
3114
3115/**
3116 * @opcode 0x6b
3117 * @opflclass multiply
3118 */
3119FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
3120{
3121 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
3122 IEMOP_HLP_MIN_186();
3123 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3124 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3125
3126 switch (pVCpu->iem.s.enmEffOpSize)
3127 {
3128 case IEMMODE_16BIT:
3129 {
3130 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
3131 if (IEM_IS_MODRM_REG_MODE(bRm))
3132 {
3133 /* register operand */
3134 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186, 0);
3135 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3137
3138 IEM_MC_LOCAL(uint16_t, u16Tmp);
3139 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3140
3141 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3142 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
3143 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3144 IEM_MC_REF_EFLAGS(pEFlags);
3145 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3146 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3147
3148 IEM_MC_ADVANCE_RIP_AND_FINISH();
3149 IEM_MC_END();
3150 }
3151 else
3152 {
3153 /* memory operand */
3154 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186, 0);
3155
3156 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3157 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3158
3159 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
3160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3161
3162 IEM_MC_LOCAL(uint16_t, u16Tmp);
3163 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3164
3165 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3166 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
3167 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3168 IEM_MC_REF_EFLAGS(pEFlags);
3169 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3170 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3171
3172 IEM_MC_ADVANCE_RIP_AND_FINISH();
3173 IEM_MC_END();
3174 }
3175 break;
3176 }
3177
3178 case IEMMODE_32BIT:
3179 {
3180 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
3181 if (IEM_IS_MODRM_REG_MODE(bRm))
3182 {
3183 /* register operand */
3184 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3185 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
3186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3187 IEM_MC_LOCAL(uint32_t, u32Tmp);
3188 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3189
3190 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3191 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
3192 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3193 IEM_MC_REF_EFLAGS(pEFlags);
3194 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3195 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3196
3197 IEM_MC_ADVANCE_RIP_AND_FINISH();
3198 IEM_MC_END();
3199 }
3200 else
3201 {
3202 /* memory operand */
3203 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0);
3204 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3205 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3206
3207 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
3208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3209
3210 IEM_MC_LOCAL(uint32_t, u32Tmp);
3211 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3212
3213 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3214 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
3215 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3216 IEM_MC_REF_EFLAGS(pEFlags);
3217 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3218 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3219
3220 IEM_MC_ADVANCE_RIP_AND_FINISH();
3221 IEM_MC_END();
3222 }
3223 break;
3224 }
3225
3226 case IEMMODE_64BIT:
3227 {
3228 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3229 if (IEM_IS_MODRM_REG_MODE(bRm))
3230 {
3231 /* register operand */
3232 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3233 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
3234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3235 IEM_MC_LOCAL(uint64_t, u64Tmp);
3236 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3237
3238 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3239 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 1);
3240 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3241 IEM_MC_REF_EFLAGS(pEFlags);
3242 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3243 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3244
3245 IEM_MC_ADVANCE_RIP_AND_FINISH();
3246 IEM_MC_END();
3247 }
3248 else
3249 {
3250 /* memory operand */
3251 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3252 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3253 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3254
3255 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); /* Not using IEM_OPCODE_GET_NEXT_S8_SX_U64 to reduce the threaded parameter count. */
3256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3257
3258 IEM_MC_LOCAL(uint64_t, u64Tmp);
3259 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3260
3261 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3262 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 1);
3263 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3264 IEM_MC_REF_EFLAGS(pEFlags);
3265 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3266 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3267
3268 IEM_MC_ADVANCE_RIP_AND_FINISH();
3269 IEM_MC_END();
3270 }
3271 break;
3272 }
3273
3274 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3275 }
3276}
3277
3278
3279/**
3280 * @opcode 0x6c
3281 * @opfltest iopl,df
3282 */
3283FNIEMOP_DEF(iemOp_insb_Yb_DX)
3284{
3285 IEMOP_HLP_MIN_186();
3286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3287 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3288 {
3289 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
3290 switch (pVCpu->iem.s.enmEffAddrMode)
3291 {
3292 case IEMMODE_16BIT:
3293 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3294 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3295 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3296 iemCImpl_rep_ins_op8_addr16, false);
3297 case IEMMODE_32BIT:
3298 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3299 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3300 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3301 iemCImpl_rep_ins_op8_addr32, false);
3302 case IEMMODE_64BIT:
3303 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3304 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3305 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3306 iemCImpl_rep_ins_op8_addr64, false);
3307 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3308 }
3309 }
3310 else
3311 {
3312 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
3313 switch (pVCpu->iem.s.enmEffAddrMode)
3314 {
3315 case IEMMODE_16BIT:
3316 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3317 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3318 iemCImpl_ins_op8_addr16, false);
3319 case IEMMODE_32BIT:
3320 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3321 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3322 iemCImpl_ins_op8_addr32, false);
3323 case IEMMODE_64BIT:
3324 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3325 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3326 iemCImpl_ins_op8_addr64, false);
3327 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3328 }
3329 }
3330}
3331
3332
3333/**
3334 * @opcode 0x6d
3335 * @opfltest iopl,df
3336 */
3337FNIEMOP_DEF(iemOp_inswd_Yv_DX)
3338{
3339 IEMOP_HLP_MIN_186();
3340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3341 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3342 {
3343 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
3344 switch (pVCpu->iem.s.enmEffOpSize)
3345 {
3346 case IEMMODE_16BIT:
3347 switch (pVCpu->iem.s.enmEffAddrMode)
3348 {
3349 case IEMMODE_16BIT:
3350 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3351 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3352 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3353 iemCImpl_rep_ins_op16_addr16, false);
3354 case IEMMODE_32BIT:
3355 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3356 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3357 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3358 iemCImpl_rep_ins_op16_addr32, false);
3359 case IEMMODE_64BIT:
3360 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3361 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3362 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3363 iemCImpl_rep_ins_op16_addr64, false);
3364 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3365 }
3366 break;
3367 case IEMMODE_64BIT:
3368 case IEMMODE_32BIT:
3369 switch (pVCpu->iem.s.enmEffAddrMode)
3370 {
3371 case IEMMODE_16BIT:
3372 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3373 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3374 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3375 iemCImpl_rep_ins_op32_addr16, false);
3376 case IEMMODE_32BIT:
3377 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3378 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3379 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3380 iemCImpl_rep_ins_op32_addr32, false);
3381 case IEMMODE_64BIT:
3382 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3383 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3384 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3385 iemCImpl_rep_ins_op32_addr64, false);
3386 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3387 }
3388 break;
3389 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3390 }
3391 }
3392 else
3393 {
3394 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
3395 switch (pVCpu->iem.s.enmEffOpSize)
3396 {
3397 case IEMMODE_16BIT:
3398 switch (pVCpu->iem.s.enmEffAddrMode)
3399 {
3400 case IEMMODE_16BIT:
3401 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3402 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3403 iemCImpl_ins_op16_addr16, false);
3404 case IEMMODE_32BIT:
3405 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3406 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3407 iemCImpl_ins_op16_addr32, false);
3408 case IEMMODE_64BIT:
3409 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3410 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3411 iemCImpl_ins_op16_addr64, false);
3412 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3413 }
3414 break;
3415 case IEMMODE_64BIT:
3416 case IEMMODE_32BIT:
3417 switch (pVCpu->iem.s.enmEffAddrMode)
3418 {
3419 case IEMMODE_16BIT:
3420 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3421 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3422 iemCImpl_ins_op32_addr16, false);
3423 case IEMMODE_32BIT:
3424 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3425 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3426 iemCImpl_ins_op32_addr32, false);
3427 case IEMMODE_64BIT:
3428 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3429 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3430 iemCImpl_ins_op32_addr64, false);
3431 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3432 }
3433 break;
3434 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3435 }
3436 }
3437}
3438
3439
3440/**
3441 * @opcode 0x6e
3442 * @opfltest iopl,df
3443 */
3444FNIEMOP_DEF(iemOp_outsb_Yb_DX)
3445{
3446 IEMOP_HLP_MIN_186();
3447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3448 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3449 {
3450 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
3451 switch (pVCpu->iem.s.enmEffAddrMode)
3452 {
3453 case IEMMODE_16BIT:
3454 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3455 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3456 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3457 iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3458 case IEMMODE_32BIT:
3459 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3460 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3461 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3462 iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3463 case IEMMODE_64BIT:
3464 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3465 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3466 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3467 iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3468 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3469 }
3470 }
3471 else
3472 {
3473 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
3474 switch (pVCpu->iem.s.enmEffAddrMode)
3475 {
3476 case IEMMODE_16BIT:
3477 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3478 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3479 iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3480 case IEMMODE_32BIT:
3481 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3482 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3483 iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3484 case IEMMODE_64BIT:
3485 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3486 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3487 iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3488 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3489 }
3490 }
3491}
3492
3493
3494/**
3495 * @opcode 0x6f
3496 * @opfltest iopl,df
3497 */
3498FNIEMOP_DEF(iemOp_outswd_Yv_DX)
3499{
3500 IEMOP_HLP_MIN_186();
3501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3502 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3503 {
3504 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
3505 switch (pVCpu->iem.s.enmEffOpSize)
3506 {
3507 case IEMMODE_16BIT:
3508 switch (pVCpu->iem.s.enmEffAddrMode)
3509 {
3510 case IEMMODE_16BIT:
3511 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3512 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3513 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3514 iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3515 case IEMMODE_32BIT:
3516 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3517 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3518 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3519 iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3520 case IEMMODE_64BIT:
3521 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3522 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3523 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3524 iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3525 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3526 }
3527 break;
3528 case IEMMODE_64BIT:
3529 case IEMMODE_32BIT:
3530 switch (pVCpu->iem.s.enmEffAddrMode)
3531 {
3532 case IEMMODE_16BIT:
3533 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3534 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3535 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3536 iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3537 case IEMMODE_32BIT:
3538 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3539 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3540 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3541 iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3542 case IEMMODE_64BIT:
3543 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3544 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3545 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3546 iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3547 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3548 }
3549 break;
3550 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3551 }
3552 }
3553 else
3554 {
3555 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
3556 switch (pVCpu->iem.s.enmEffOpSize)
3557 {
3558 case IEMMODE_16BIT:
3559 switch (pVCpu->iem.s.enmEffAddrMode)
3560 {
3561 case IEMMODE_16BIT:
3562 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3563 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3564 iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3565 case IEMMODE_32BIT:
3566 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3567 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3568 iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3569 case IEMMODE_64BIT:
3570 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3571 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3572 iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3573 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3574 }
3575 break;
3576 case IEMMODE_64BIT:
3577 case IEMMODE_32BIT:
3578 switch (pVCpu->iem.s.enmEffAddrMode)
3579 {
3580 case IEMMODE_16BIT:
3581 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3582 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3583 iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3584 case IEMMODE_32BIT:
3585 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3586 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3587 iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3588 case IEMMODE_64BIT:
3589 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3590 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3591 iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3592 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3593 }
3594 break;
3595 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3596 }
3597 }
3598}
3599
3600
3601/**
3602 * @opcode 0x70
3603 * @opfltest of
3604 */
3605FNIEMOP_DEF(iemOp_jo_Jb)
3606{
3607 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
3608 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3609 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3610
3611 IEM_MC_BEGIN(0, 0, 0, 0);
3612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3613 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3614 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3615 } IEM_MC_ELSE() {
3616 IEM_MC_ADVANCE_RIP_AND_FINISH();
3617 } IEM_MC_ENDIF();
3618 IEM_MC_END();
3619}
3620
3621
3622/**
3623 * @opcode 0x71
3624 * @opfltest of
3625 */
3626FNIEMOP_DEF(iemOp_jno_Jb)
3627{
3628 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
3629 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3630 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3631
3632 IEM_MC_BEGIN(0, 0, 0, 0);
3633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3634 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3635 IEM_MC_ADVANCE_RIP_AND_FINISH();
3636 } IEM_MC_ELSE() {
3637 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3638 } IEM_MC_ENDIF();
3639 IEM_MC_END();
3640}
3641
3642/**
3643 * @opcode 0x72
3644 * @opfltest cf
3645 */
3646FNIEMOP_DEF(iemOp_jc_Jb)
3647{
3648 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
3649 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3650 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3651
3652 IEM_MC_BEGIN(0, 0, 0, 0);
3653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3654 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3655 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3656 } IEM_MC_ELSE() {
3657 IEM_MC_ADVANCE_RIP_AND_FINISH();
3658 } IEM_MC_ENDIF();
3659 IEM_MC_END();
3660}
3661
3662
3663/**
3664 * @opcode 0x73
3665 * @opfltest cf
3666 */
3667FNIEMOP_DEF(iemOp_jnc_Jb)
3668{
3669 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
3670 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3671 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3672
3673 IEM_MC_BEGIN(0, 0, 0, 0);
3674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3675 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3676 IEM_MC_ADVANCE_RIP_AND_FINISH();
3677 } IEM_MC_ELSE() {
3678 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3679 } IEM_MC_ENDIF();
3680 IEM_MC_END();
3681}
3682
3683
3684/**
3685 * @opcode 0x74
3686 * @opfltest zf
3687 */
3688FNIEMOP_DEF(iemOp_je_Jb)
3689{
3690 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
3691 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3692 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3693
3694 IEM_MC_BEGIN(0, 0, 0, 0);
3695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3696 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3697 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3698 } IEM_MC_ELSE() {
3699 IEM_MC_ADVANCE_RIP_AND_FINISH();
3700 } IEM_MC_ENDIF();
3701 IEM_MC_END();
3702}
3703
3704
3705/**
3706 * @opcode 0x75
3707 * @opfltest zf
3708 */
3709FNIEMOP_DEF(iemOp_jne_Jb)
3710{
3711 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
3712 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3713 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3714
3715 IEM_MC_BEGIN(0, 0, 0, 0);
3716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3717 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3718 IEM_MC_ADVANCE_RIP_AND_FINISH();
3719 } IEM_MC_ELSE() {
3720 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3721 } IEM_MC_ENDIF();
3722 IEM_MC_END();
3723}
3724
3725
3726/**
3727 * @opcode 0x76
3728 * @opfltest cf,zf
3729 */
3730FNIEMOP_DEF(iemOp_jbe_Jb)
3731{
3732 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
3733 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3734 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3735
3736 IEM_MC_BEGIN(0, 0, 0, 0);
3737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3738 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3739 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3740 } IEM_MC_ELSE() {
3741 IEM_MC_ADVANCE_RIP_AND_FINISH();
3742 } IEM_MC_ENDIF();
3743 IEM_MC_END();
3744}
3745
3746
3747/**
3748 * @opcode 0x77
3749 * @opfltest cf,zf
3750 */
3751FNIEMOP_DEF(iemOp_jnbe_Jb)
3752{
3753 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
3754 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3755 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3756
3757 IEM_MC_BEGIN(0, 0, 0, 0);
3758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3759 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3760 IEM_MC_ADVANCE_RIP_AND_FINISH();
3761 } IEM_MC_ELSE() {
3762 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3763 } IEM_MC_ENDIF();
3764 IEM_MC_END();
3765}
3766
3767
3768/**
3769 * @opcode 0x78
3770 * @opfltest sf
3771 */
3772FNIEMOP_DEF(iemOp_js_Jb)
3773{
3774 IEMOP_MNEMONIC(js_Jb, "js Jb");
3775 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3776 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3777
3778 IEM_MC_BEGIN(0, 0, 0, 0);
3779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3780 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3781 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3782 } IEM_MC_ELSE() {
3783 IEM_MC_ADVANCE_RIP_AND_FINISH();
3784 } IEM_MC_ENDIF();
3785 IEM_MC_END();
3786}
3787
3788
3789/**
3790 * @opcode 0x79
3791 * @opfltest sf
3792 */
3793FNIEMOP_DEF(iemOp_jns_Jb)
3794{
3795 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
3796 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3797 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3798
3799 IEM_MC_BEGIN(0, 0, 0, 0);
3800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3801 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3802 IEM_MC_ADVANCE_RIP_AND_FINISH();
3803 } IEM_MC_ELSE() {
3804 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3805 } IEM_MC_ENDIF();
3806 IEM_MC_END();
3807}
3808
3809
3810/**
3811 * @opcode 0x7a
3812 * @opfltest pf
3813 */
3814FNIEMOP_DEF(iemOp_jp_Jb)
3815{
3816 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
3817 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3818 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3819
3820 IEM_MC_BEGIN(0, 0, 0, 0);
3821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3822 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3823 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3824 } IEM_MC_ELSE() {
3825 IEM_MC_ADVANCE_RIP_AND_FINISH();
3826 } IEM_MC_ENDIF();
3827 IEM_MC_END();
3828}
3829
3830
3831/**
3832 * @opcode 0x7b
3833 * @opfltest pf
3834 */
3835FNIEMOP_DEF(iemOp_jnp_Jb)
3836{
3837 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
3838 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3839 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3840
3841 IEM_MC_BEGIN(0, 0, 0, 0);
3842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3843 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3844 IEM_MC_ADVANCE_RIP_AND_FINISH();
3845 } IEM_MC_ELSE() {
3846 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3847 } IEM_MC_ENDIF();
3848 IEM_MC_END();
3849}
3850
3851
3852/**
3853 * @opcode 0x7c
3854 * @opfltest sf,of
3855 */
3856FNIEMOP_DEF(iemOp_jl_Jb)
3857{
3858 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
3859 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3860 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3861
3862 IEM_MC_BEGIN(0, 0, 0, 0);
3863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3864 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3865 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3866 } IEM_MC_ELSE() {
3867 IEM_MC_ADVANCE_RIP_AND_FINISH();
3868 } IEM_MC_ENDIF();
3869 IEM_MC_END();
3870}
3871
3872
3873/**
3874 * @opcode 0x7d
3875 * @opfltest sf,of
3876 */
3877FNIEMOP_DEF(iemOp_jnl_Jb)
3878{
3879 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
3880 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3881 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3882
3883 IEM_MC_BEGIN(0, 0, 0, 0);
3884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3885 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3886 IEM_MC_ADVANCE_RIP_AND_FINISH();
3887 } IEM_MC_ELSE() {
3888 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3889 } IEM_MC_ENDIF();
3890 IEM_MC_END();
3891}
3892
3893
3894/**
3895 * @opcode 0x7e
3896 * @opfltest zf,sf,of
3897 */
3898FNIEMOP_DEF(iemOp_jle_Jb)
3899{
3900 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
3901 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3902 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3903
3904 IEM_MC_BEGIN(0, 0, 0, 0);
3905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3906 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3907 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3908 } IEM_MC_ELSE() {
3909 IEM_MC_ADVANCE_RIP_AND_FINISH();
3910 } IEM_MC_ENDIF();
3911 IEM_MC_END();
3912}
3913
3914
3915/**
3916 * @opcode 0x7f
3917 * @opfltest zf,sf,of
3918 */
3919FNIEMOP_DEF(iemOp_jnle_Jb)
3920{
3921 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
3922 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3923 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3924
3925 IEM_MC_BEGIN(0, 0, 0, 0);
3926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3927 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3928 IEM_MC_ADVANCE_RIP_AND_FINISH();
3929 } IEM_MC_ELSE() {
3930 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3931 } IEM_MC_ENDIF();
3932 IEM_MC_END();
3933}
3934
3935
3936/**
3937 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
3938 * iemOp_Grp1_Eb_Ib_80.
3939 */
3940#define IEMOP_BODY_BINARY_Eb_Ib_RW(a_fnNormalU8) \
3941 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3942 { \
3943 /* register target */ \
3944 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3945 IEM_MC_BEGIN(3, 0, 0, 0); \
3946 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3947 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3948 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3949 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3950 \
3951 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3952 IEM_MC_REF_EFLAGS(pEFlags); \
3953 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3954 \
3955 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3956 IEM_MC_END(); \
3957 } \
3958 else \
3959 { \
3960 /* memory target */ \
3961 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
3962 { \
3963 IEM_MC_BEGIN(3, 3, 0, 0); \
3964 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3965 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3966 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3967 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3968 \
3969 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3970 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3971 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3972 IEMOP_HLP_DONE_DECODING(); \
3973 \
3974 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3975 IEM_MC_FETCH_EFLAGS(EFlags); \
3976 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3977 \
3978 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
3979 IEM_MC_COMMIT_EFLAGS(EFlags); \
3980 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3981 IEM_MC_END(); \
3982 } \
3983 else \
3984 { \
3985 (void)0
3986
3987#define IEMOP_BODY_BINARY_Eb_Ib_LOCKED(a_fnLockedU8) \
3988 IEM_MC_BEGIN(3, 3, 0, 0); \
3989 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3990 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3991 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3992 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3993 \
3994 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3995 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3996 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3997 IEMOP_HLP_DONE_DECODING(); \
3998 \
3999 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4000 IEM_MC_FETCH_EFLAGS(EFlags); \
4001 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
4002 \
4003 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4004 IEM_MC_COMMIT_EFLAGS(EFlags); \
4005 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4006 IEM_MC_END(); \
4007 } \
4008 } \
4009 (void)0
4010
4011#define IEMOP_BODY_BINARY_Eb_Ib_RO(a_fnNormalU8) \
4012 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4013 { \
4014 /* register target */ \
4015 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4016 IEM_MC_BEGIN(3, 0, 0, 0); \
4017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4018 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
4019 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4020 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4021 \
4022 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4023 IEM_MC_REF_EFLAGS(pEFlags); \
4024 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
4025 \
4026 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4027 IEM_MC_END(); \
4028 } \
4029 else \
4030 { \
4031 /* memory target */ \
4032 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4033 { \
4034 IEM_MC_BEGIN(3, 3, 0, 0); \
4035 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
4036 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4037 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4038 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4039 \
4040 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4041 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4042 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4043 IEMOP_HLP_DONE_DECODING(); \
4044 \
4045 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4046 IEM_MC_FETCH_EFLAGS(EFlags); \
4047 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
4048 \
4049 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4050 IEM_MC_COMMIT_EFLAGS(EFlags); \
4051 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4052 IEM_MC_END(); \
4053 } \
4054 else \
4055 { \
4056 (void)0
4057
4058#define IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK() \
4059 IEMOP_HLP_DONE_DECODING(); \
4060 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4061 } \
4062 } \
4063 (void)0
4064
4065
4066
4067/**
4068 * @opmaps grp1_80,grp1_83
4069 * @opcode /0
4070 * @opflclass arithmetic
4071 */
4072FNIEMOP_DEF_1(iemOp_Grp1_add_Eb_Ib, uint8_t, bRm)
4073{
4074 IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib");
4075 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_add_u8);
4076 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_add_u8_locked);
4077}
4078
4079
4080/**
4081 * @opmaps grp1_80,grp1_83
4082 * @opcode /1
4083 * @opflclass logical
4084 */
4085FNIEMOP_DEF_1(iemOp_Grp1_or_Eb_Ib, uint8_t, bRm)
4086{
4087 IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib");
4088 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_or_u8);
4089 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_or_u8_locked);
4090}
4091
4092
4093/**
4094 * @opmaps grp1_80,grp1_83
4095 * @opcode /2
4096 * @opflclass arithmetic_carry
4097 */
4098FNIEMOP_DEF_1(iemOp_Grp1_adc_Eb_Ib, uint8_t, bRm)
4099{
4100 IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib");
4101 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_adc_u8);
4102 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_adc_u8_locked);
4103}
4104
4105
4106/**
4107 * @opmaps grp1_80,grp1_83
4108 * @opcode /3
4109 * @opflclass arithmetic_carry
4110 */
4111FNIEMOP_DEF_1(iemOp_Grp1_sbb_Eb_Ib, uint8_t, bRm)
4112{
4113 IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib");
4114 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sbb_u8);
4115 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sbb_u8_locked);
4116}
4117
4118
4119/**
4120 * @opmaps grp1_80,grp1_83
4121 * @opcode /4
4122 * @opflclass logical
4123 */
4124FNIEMOP_DEF_1(iemOp_Grp1_and_Eb_Ib, uint8_t, bRm)
4125{
4126 IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib");
4127 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_and_u8);
4128 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_and_u8_locked);
4129}
4130
4131
4132/**
4133 * @opmaps grp1_80,grp1_83
4134 * @opcode /5
4135 * @opflclass arithmetic
4136 */
4137FNIEMOP_DEF_1(iemOp_Grp1_sub_Eb_Ib, uint8_t, bRm)
4138{
4139 IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib");
4140 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sub_u8);
4141 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sub_u8_locked);
4142}
4143
4144
4145/**
4146 * @opmaps grp1_80,grp1_83
4147 * @opcode /6
4148 * @opflclass logical
4149 */
4150FNIEMOP_DEF_1(iemOp_Grp1_xor_Eb_Ib, uint8_t, bRm)
4151{
4152 IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib");
4153 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_xor_u8);
4154 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_xor_u8_locked);
4155}
4156
4157
4158/**
4159 * @opmaps grp1_80,grp1_83
4160 * @opcode /7
4161 * @opflclass arithmetic
4162 */
4163FNIEMOP_DEF_1(iemOp_Grp1_cmp_Eb_Ib, uint8_t, bRm)
4164{
4165 IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib");
4166 IEMOP_BODY_BINARY_Eb_Ib_RO(iemAImpl_cmp_u8);
4167 IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK();
4168}
4169
4170
4171/**
4172 * @opcode 0x80
4173 */
4174FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
4175{
4176 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4177 switch (IEM_GET_MODRM_REG_8(bRm))
4178 {
4179 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Eb_Ib, bRm);
4180 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Eb_Ib, bRm);
4181 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Eb_Ib, bRm);
4182 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Eb_Ib, bRm);
4183 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Eb_Ib, bRm);
4184 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Eb_Ib, bRm);
4185 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Eb_Ib, bRm);
4186 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Eb_Ib, bRm);
4187 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4188 }
4189}
4190
4191
4192/**
4193 * Body for a group 1 binary operator.
4194 */
4195#define IEMOP_BODY_BINARY_Ev_Iz_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4196 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4197 { \
4198 /* register target */ \
4199 switch (pVCpu->iem.s.enmEffOpSize) \
4200 { \
4201 case IEMMODE_16BIT: \
4202 { \
4203 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4204 IEM_MC_BEGIN(3, 0, 0, 0); \
4205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4206 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4207 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4208 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4209 \
4210 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4211 IEM_MC_REF_EFLAGS(pEFlags); \
4212 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4213 \
4214 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4215 IEM_MC_END(); \
4216 break; \
4217 } \
4218 \
4219 case IEMMODE_32BIT: \
4220 { \
4221 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4222 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4224 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4225 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4226 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4227 \
4228 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4229 IEM_MC_REF_EFLAGS(pEFlags); \
4230 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4231 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
4232 \
4233 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4234 IEM_MC_END(); \
4235 break; \
4236 } \
4237 \
4238 case IEMMODE_64BIT: \
4239 { \
4240 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4241 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4243 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4244 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4245 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4246 \
4247 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4248 IEM_MC_REF_EFLAGS(pEFlags); \
4249 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4250 \
4251 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4252 IEM_MC_END(); \
4253 break; \
4254 } \
4255 \
4256 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4257 } \
4258 } \
4259 else \
4260 { \
4261 /* memory target */ \
4262 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4263 { \
4264 switch (pVCpu->iem.s.enmEffOpSize) \
4265 { \
4266 case IEMMODE_16BIT: \
4267 { \
4268 IEM_MC_BEGIN(3, 3, 0, 0); \
4269 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4270 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4271 \
4272 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4273 IEMOP_HLP_DONE_DECODING(); \
4274 \
4275 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4276 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4277 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4278 \
4279 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4280 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4281 IEM_MC_FETCH_EFLAGS(EFlags); \
4282 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4283 \
4284 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4285 IEM_MC_COMMIT_EFLAGS(EFlags); \
4286 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4287 IEM_MC_END(); \
4288 break; \
4289 } \
4290 \
4291 case IEMMODE_32BIT: \
4292 { \
4293 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4294 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4295 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4296 \
4297 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4298 IEMOP_HLP_DONE_DECODING(); \
4299 \
4300 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4301 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4302 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4303 \
4304 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4305 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4306 IEM_MC_FETCH_EFLAGS(EFlags); \
4307 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4308 \
4309 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4310 IEM_MC_COMMIT_EFLAGS(EFlags); \
4311 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4312 IEM_MC_END(); \
4313 break; \
4314 } \
4315 \
4316 case IEMMODE_64BIT: \
4317 { \
4318 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4319 \
4320 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4321 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4322 \
4323 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4324 IEMOP_HLP_DONE_DECODING(); \
4325 \
4326 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4327 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4328 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4329 \
4330 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4331 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4332 IEM_MC_FETCH_EFLAGS(EFlags); \
4333 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4334 \
4335 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4336 IEM_MC_COMMIT_EFLAGS(EFlags); \
4337 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4338 IEM_MC_END(); \
4339 break; \
4340 } \
4341 \
4342 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4343 } \
4344 } \
4345 else \
4346 { \
4347 (void)0
4348/* This must be a separate macro due to parsing restrictions in IEMAllInstPython.py. */
4349#define IEMOP_BODY_BINARY_Ev_Iz_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4350 switch (pVCpu->iem.s.enmEffOpSize) \
4351 { \
4352 case IEMMODE_16BIT: \
4353 { \
4354 IEM_MC_BEGIN(3, 3, 0, 0); \
4355 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4356 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4357 \
4358 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4359 IEMOP_HLP_DONE_DECODING(); \
4360 \
4361 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4362 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4363 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4364 \
4365 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4366 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4367 IEM_MC_FETCH_EFLAGS(EFlags); \
4368 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4369 \
4370 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4371 IEM_MC_COMMIT_EFLAGS(EFlags); \
4372 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4373 IEM_MC_END(); \
4374 break; \
4375 } \
4376 \
4377 case IEMMODE_32BIT: \
4378 { \
4379 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4380 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4381 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4382 \
4383 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4384 IEMOP_HLP_DONE_DECODING(); \
4385 \
4386 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4387 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4388 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4389 \
4390 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4391 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4392 IEM_MC_FETCH_EFLAGS(EFlags); \
4393 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4394 \
4395 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4396 IEM_MC_COMMIT_EFLAGS(EFlags); \
4397 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4398 IEM_MC_END(); \
4399 break; \
4400 } \
4401 \
4402 case IEMMODE_64BIT: \
4403 { \
4404 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4405 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4406 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4407 \
4408 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4409 IEMOP_HLP_DONE_DECODING(); \
4410 \
4411 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4412 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4413 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4414 \
4415 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4416 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4417 IEM_MC_FETCH_EFLAGS(EFlags); \
4418 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4419 \
4420 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4421 IEM_MC_COMMIT_EFLAGS(EFlags); \
4422 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4423 IEM_MC_END(); \
4424 break; \
4425 } \
4426 \
4427 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4428 } \
4429 } \
4430 } \
4431 (void)0
4432
4433/* read-only version */
4434#define IEMOP_BODY_BINARY_Ev_Iz_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4435 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4436 { \
4437 /* register target */ \
4438 switch (pVCpu->iem.s.enmEffOpSize) \
4439 { \
4440 case IEMMODE_16BIT: \
4441 { \
4442 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4443 IEM_MC_BEGIN(3, 0, 0, 0); \
4444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4445 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4446 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4447 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4448 \
4449 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4450 IEM_MC_REF_EFLAGS(pEFlags); \
4451 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4452 \
4453 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4454 IEM_MC_END(); \
4455 break; \
4456 } \
4457 \
4458 case IEMMODE_32BIT: \
4459 { \
4460 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4461 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4463 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4464 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4465 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4466 \
4467 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4468 IEM_MC_REF_EFLAGS(pEFlags); \
4469 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4470 \
4471 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4472 IEM_MC_END(); \
4473 break; \
4474 } \
4475 \
4476 case IEMMODE_64BIT: \
4477 { \
4478 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4479 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4481 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4482 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4483 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4484 \
4485 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4486 IEM_MC_REF_EFLAGS(pEFlags); \
4487 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4488 \
4489 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4490 IEM_MC_END(); \
4491 break; \
4492 } \
4493 \
4494 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4495 } \
4496 } \
4497 else \
4498 { \
4499 /* memory target */ \
4500 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4501 { \
4502 switch (pVCpu->iem.s.enmEffOpSize) \
4503 { \
4504 case IEMMODE_16BIT: \
4505 { \
4506 IEM_MC_BEGIN(3, 3, 0, 0); \
4507 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4508 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4509 \
4510 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4511 IEMOP_HLP_DONE_DECODING(); \
4512 \
4513 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4514 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
4515 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4516 \
4517 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4518 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4519 IEM_MC_FETCH_EFLAGS(EFlags); \
4520 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4521 \
4522 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4523 IEM_MC_COMMIT_EFLAGS(EFlags); \
4524 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4525 IEM_MC_END(); \
4526 break; \
4527 } \
4528 \
4529 case IEMMODE_32BIT: \
4530 { \
4531 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4532 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4533 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4534 \
4535 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4536 IEMOP_HLP_DONE_DECODING(); \
4537 \
4538 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4539 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
4540 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4541 \
4542 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4543 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4544 IEM_MC_FETCH_EFLAGS(EFlags); \
4545 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4546 \
4547 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4548 IEM_MC_COMMIT_EFLAGS(EFlags); \
4549 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4550 IEM_MC_END(); \
4551 break; \
4552 } \
4553 \
4554 case IEMMODE_64BIT: \
4555 { \
4556 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4557 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4558 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4559 \
4560 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4561 IEMOP_HLP_DONE_DECODING(); \
4562 \
4563 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4564 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
4565 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4566 \
4567 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4568 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4569 IEM_MC_FETCH_EFLAGS(EFlags); \
4570 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4571 \
4572 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4573 IEM_MC_COMMIT_EFLAGS(EFlags); \
4574 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4575 IEM_MC_END(); \
4576 break; \
4577 } \
4578 \
4579 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4580 } \
4581 } \
4582 else \
4583 { \
4584 IEMOP_HLP_DONE_DECODING(); \
4585 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4586 } \
4587 } \
4588 (void)0
4589
4590
4591/**
4592 * @opmaps grp1_81
4593 * @opcode /0
4594 * @opflclass arithmetic
4595 */
4596FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Iz, uint8_t, bRm)
4597{
4598 IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz");
4599 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
4600 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4601}
4602
4603
4604/**
4605 * @opmaps grp1_81
4606 * @opcode /1
4607 * @opflclass logical
4608 */
4609FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Iz, uint8_t, bRm)
4610{
4611 IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz");
4612 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
4613 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4614}
4615
4616
4617/**
4618 * @opmaps grp1_81
4619 * @opcode /2
4620 * @opflclass arithmetic_carry
4621 */
4622FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Iz, uint8_t, bRm)
4623{
4624 IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz");
4625 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
4626 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
4627}
4628
4629
4630/**
4631 * @opmaps grp1_81
4632 * @opcode /3
4633 * @opflclass arithmetic_carry
4634 */
4635FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Iz, uint8_t, bRm)
4636{
4637 IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz");
4638 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
4639 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
4640}
4641
4642
4643/**
4644 * @opmaps grp1_81
4645 * @opcode /4
4646 * @opflclass logical
4647 */
4648FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Iz, uint8_t, bRm)
4649{
4650 IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz");
4651 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
4652 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
4653}
4654
4655
4656/**
4657 * @opmaps grp1_81
4658 * @opcode /5
4659 * @opflclass arithmetic
4660 */
4661FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Iz, uint8_t, bRm)
4662{
4663 IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz");
4664 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
4665 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
4666}
4667
4668
4669/**
4670 * @opmaps grp1_81
4671 * @opcode /6
4672 * @opflclass logical
4673 */
4674FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Iz, uint8_t, bRm)
4675{
4676 IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz");
4677 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
4678 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
4679}
4680
4681
4682/**
4683 * @opmaps grp1_81
4684 * @opcode /7
4685 * @opflclass arithmetic
4686 */
4687FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Iz, uint8_t, bRm)
4688{
4689 IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz");
4690 IEMOP_BODY_BINARY_Ev_Iz_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
4691}
4692
4693
4694/**
4695 * @opcode 0x81
4696 */
4697FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
4698{
4699 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4700 switch (IEM_GET_MODRM_REG_8(bRm))
4701 {
4702 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Iz, bRm);
4703 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Iz, bRm);
4704 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Iz, bRm);
4705 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Iz, bRm);
4706 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Iz, bRm);
4707 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Iz, bRm);
4708 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Iz, bRm);
4709 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Iz, bRm);
4710 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4711 }
4712}
4713
4714
4715/**
4716 * @opcode 0x82
4717 * @opmnemonic grp1_82
4718 * @opgroup og_groups
4719 */
4720FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
4721{
4722 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
4723 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
4724}
4725
4726
4727/**
4728 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
4729 * iemOp_Grp1_Ev_Ib.
4730 */
4731#define IEMOP_BODY_BINARY_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4732 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4733 { \
4734 /* \
4735 * Register target \
4736 */ \
4737 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4738 switch (pVCpu->iem.s.enmEffOpSize) \
4739 { \
4740 case IEMMODE_16BIT: \
4741 IEM_MC_BEGIN(3, 0, 0, 0); \
4742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4743 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4744 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (uint16_t)(int16_t)(int8_t)u8Imm, 1); \
4745 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4746 \
4747 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4748 IEM_MC_REF_EFLAGS(pEFlags); \
4749 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4750 \
4751 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4752 IEM_MC_END(); \
4753 break; \
4754 \
4755 case IEMMODE_32BIT: \
4756 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4758 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4759 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (uint32_t)(int32_t)(int8_t)u8Imm, 1); \
4760 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4761 \
4762 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4763 IEM_MC_REF_EFLAGS(pEFlags); \
4764 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4765 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
4766 \
4767 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4768 IEM_MC_END(); \
4769 break; \
4770 \
4771 case IEMMODE_64BIT: \
4772 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4774 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4775 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (uint64_t)(int64_t)(int8_t)u8Imm, 1); \
4776 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4777 \
4778 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4779 IEM_MC_REF_EFLAGS(pEFlags); \
4780 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4781 \
4782 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4783 IEM_MC_END(); \
4784 break; \
4785 \
4786 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4787 } \
4788 } \
4789 else \
4790 { \
4791 /* \
4792 * Memory target. \
4793 */ \
4794 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4795 { \
4796 switch (pVCpu->iem.s.enmEffOpSize) \
4797 { \
4798 case IEMMODE_16BIT: \
4799 IEM_MC_BEGIN(3, 3, 0, 0); \
4800 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4801 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4802 \
4803 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4804 IEMOP_HLP_DONE_DECODING(); \
4805 \
4806 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4807 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4808 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4809 \
4810 IEM_MC_ARG_CONST(uint16_t, u16Src, (uint16_t)(int16_t)(int8_t)u8Imm, 1); \
4811 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4812 IEM_MC_FETCH_EFLAGS(EFlags); \
4813 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4814 \
4815 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4816 IEM_MC_COMMIT_EFLAGS(EFlags); \
4817 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4818 IEM_MC_END(); \
4819 break; \
4820 \
4821 case IEMMODE_32BIT: \
4822 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4823 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4824 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4825 \
4826 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4827 IEMOP_HLP_DONE_DECODING(); \
4828 \
4829 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4830 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4831 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4832 \
4833 IEM_MC_ARG_CONST(uint32_t, u32Src, (uint32_t)(int32_t)(int8_t)u8Imm, 1); \
4834 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4835 IEM_MC_FETCH_EFLAGS(EFlags); \
4836 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4837 \
4838 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4839 IEM_MC_COMMIT_EFLAGS(EFlags); \
4840 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4841 IEM_MC_END(); \
4842 break; \
4843 \
4844 case IEMMODE_64BIT: \
4845 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4846 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4847 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4848 \
4849 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4850 IEMOP_HLP_DONE_DECODING(); \
4851 \
4852 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4853 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4854 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4855 \
4856 IEM_MC_ARG_CONST(uint64_t, u64Src, (uint64_t)(int64_t)(int8_t)u8Imm, 1); \
4857 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4858 IEM_MC_FETCH_EFLAGS(EFlags); \
4859 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4860 \
4861 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4862 IEM_MC_COMMIT_EFLAGS(EFlags); \
4863 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4864 IEM_MC_END(); \
4865 break; \
4866 \
4867 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4868 } \
4869 } \
4870 else \
4871 { \
4872 (void)0
4873/* Separate macro to work around parsing issue in IEMAllInstPython.py */
4874#define IEMOP_BODY_BINARY_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4875 switch (pVCpu->iem.s.enmEffOpSize) \
4876 { \
4877 case IEMMODE_16BIT: \
4878 IEM_MC_BEGIN(3, 3, 0, 0); \
4879 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4880 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4881 \
4882 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4883 IEMOP_HLP_DONE_DECODING(); \
4884 \
4885 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4886 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4887 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4888 \
4889 IEM_MC_ARG_CONST(uint16_t, u16Src, (uint16_t)(int16_t)(int8_t)u8Imm, 1); \
4890 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4891 IEM_MC_FETCH_EFLAGS(EFlags); \
4892 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4893 \
4894 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4895 IEM_MC_COMMIT_EFLAGS(EFlags); \
4896 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4897 IEM_MC_END(); \
4898 break; \
4899 \
4900 case IEMMODE_32BIT: \
4901 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4902 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4903 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4904 \
4905 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4906 IEMOP_HLP_DONE_DECODING(); \
4907 \
4908 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4909 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4910 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4911 \
4912 IEM_MC_ARG_CONST(uint32_t, u32Src, (uint32_t)(int32_t)(int8_t)u8Imm, 1); \
4913 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4914 IEM_MC_FETCH_EFLAGS(EFlags); \
4915 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4916 \
4917 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4918 IEM_MC_COMMIT_EFLAGS(EFlags); \
4919 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4920 IEM_MC_END(); \
4921 break; \
4922 \
4923 case IEMMODE_64BIT: \
4924 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4925 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4926 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4927 \
4928 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4929 IEMOP_HLP_DONE_DECODING(); \
4930 \
4931 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4932 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4933 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4934 \
4935 IEM_MC_ARG_CONST(uint64_t, u64Src, (uint64_t)(int64_t)(int8_t)u8Imm, 1); \
4936 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4937 IEM_MC_FETCH_EFLAGS(EFlags); \
4938 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4939 \
4940 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
4941 IEM_MC_COMMIT_EFLAGS(EFlags); \
4942 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4943 IEM_MC_END(); \
4944 break; \
4945 \
4946 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4947 } \
4948 } \
4949 } \
4950 (void)0
4951
4952/* read-only variant */
4953#define IEMOP_BODY_BINARY_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4954 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4955 { \
4956 /* \
4957 * Register target \
4958 */ \
4959 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4960 switch (pVCpu->iem.s.enmEffOpSize) \
4961 { \
4962 case IEMMODE_16BIT: \
4963 IEM_MC_BEGIN(3, 0, 0, 0); \
4964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4965 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4966 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (uint16_t)(int16_t)(int8_t)u8Imm, 1); \
4967 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4968 \
4969 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4970 IEM_MC_REF_EFLAGS(pEFlags); \
4971 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4972 \
4973 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4974 IEM_MC_END(); \
4975 break; \
4976 \
4977 case IEMMODE_32BIT: \
4978 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4980 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4981 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (uint32_t)(int32_t)(int8_t)u8Imm, 1); \
4982 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4983 \
4984 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4985 IEM_MC_REF_EFLAGS(pEFlags); \
4986 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4987 \
4988 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4989 IEM_MC_END(); \
4990 break; \
4991 \
4992 case IEMMODE_64BIT: \
4993 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4995 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4996 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (uint64_t)(int64_t)(int8_t)u8Imm, 1); \
4997 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4998 \
4999 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5000 IEM_MC_REF_EFLAGS(pEFlags); \
5001 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
5002 \
5003 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5004 IEM_MC_END(); \
5005 break; \
5006 \
5007 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5008 } \
5009 } \
5010 else \
5011 { \
5012 /* \
5013 * Memory target. \
5014 */ \
5015 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
5016 { \
5017 switch (pVCpu->iem.s.enmEffOpSize) \
5018 { \
5019 case IEMMODE_16BIT: \
5020 IEM_MC_BEGIN(3, 3, 0, 0); \
5021 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5023 \
5024 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5025 IEMOP_HLP_DONE_DECODING(); \
5026 \
5027 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5028 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
5029 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5030 \
5031 IEM_MC_ARG_CONST(uint16_t, u16Src, (uint16_t)(int16_t)(int8_t)u8Imm, 1); \
5032 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5033 IEM_MC_FETCH_EFLAGS(EFlags); \
5034 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
5035 \
5036 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5037 IEM_MC_COMMIT_EFLAGS(EFlags); \
5038 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5039 IEM_MC_END(); \
5040 break; \
5041 \
5042 case IEMMODE_32BIT: \
5043 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
5044 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5045 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5046 \
5047 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5048 IEMOP_HLP_DONE_DECODING(); \
5049 \
5050 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5051 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
5052 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5053 \
5054 IEM_MC_ARG_CONST(uint32_t, u32Src, (uint32_t)(int32_t)(int8_t)u8Imm, 1); \
5055 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5056 IEM_MC_FETCH_EFLAGS(EFlags); \
5057 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
5058 \
5059 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5060 IEM_MC_COMMIT_EFLAGS(EFlags); \
5061 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5062 IEM_MC_END(); \
5063 break; \
5064 \
5065 case IEMMODE_64BIT: \
5066 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
5067 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5068 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5069 \
5070 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5071 IEMOP_HLP_DONE_DECODING(); \
5072 \
5073 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5074 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
5075 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5076 \
5077 IEM_MC_ARG_CONST(uint64_t, u64Src, (uint64_t)(int64_t)(int8_t)u8Imm, 1); \
5078 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5079 IEM_MC_FETCH_EFLAGS(EFlags); \
5080 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
5081 \
5082 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5083 IEM_MC_COMMIT_EFLAGS(EFlags); \
5084 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5085 IEM_MC_END(); \
5086 break; \
5087 \
5088 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5089 } \
5090 } \
5091 else \
5092 { \
5093 IEMOP_HLP_DONE_DECODING(); \
5094 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
5095 } \
5096 } \
5097 (void)0
5098
5099/**
5100 * @opmaps grp1_83
5101 * @opcode /0
5102 * @opflclass arithmetic
5103 */
5104FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Ib, uint8_t, bRm)
5105{
5106 IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib");
5107 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
5108 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
5109}
5110
5111
5112/**
5113 * @opmaps grp1_83
5114 * @opcode /1
5115 * @opflclass logical
5116 */
5117FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Ib, uint8_t, bRm)
5118{
5119 IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib");
5120 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
5121 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
5122}
5123
5124
5125/**
5126 * @opmaps grp1_83
5127 * @opcode /2
5128 * @opflclass arithmetic_carry
5129 */
5130FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Ib, uint8_t, bRm)
5131{
5132 IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib");
5133 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
5134 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
5135}
5136
5137
5138/**
5139 * @opmaps grp1_83
5140 * @opcode /3
5141 * @opflclass arithmetic_carry
5142 */
5143FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Ib, uint8_t, bRm)
5144{
5145 IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib");
5146 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
5147 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
5148}
5149
5150
5151/**
5152 * @opmaps grp1_83
5153 * @opcode /4
5154 * @opflclass logical
5155 */
5156FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Ib, uint8_t, bRm)
5157{
5158 IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib");
5159 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
5160 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
5161}
5162
5163
5164/**
5165 * @opmaps grp1_83
5166 * @opcode /5
5167 * @opflclass arithmetic
5168 */
5169FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Ib, uint8_t, bRm)
5170{
5171 IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib");
5172 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
5173 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
5174}
5175
5176
5177/**
5178 * @opmaps grp1_83
5179 * @opcode /6
5180 * @opflclass logical
5181 */
5182FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Ib, uint8_t, bRm)
5183{
5184 IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib");
5185 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
5186 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
5187}
5188
5189
5190/**
5191 * @opmaps grp1_83
5192 * @opcode /7
5193 * @opflclass arithmetic
5194 */
5195FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Ib, uint8_t, bRm)
5196{
5197 IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib");
5198 IEMOP_BODY_BINARY_Ev_Ib_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
5199}
5200
5201
5202/**
5203 * @opcode 0x83
5204 */
5205FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
5206{
5207 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
5208 to the 386 even if absent in the intel reference manuals and some
5209 3rd party opcode listings. */
5210 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5211 switch (IEM_GET_MODRM_REG_8(bRm))
5212 {
5213 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Ib, bRm);
5214 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Ib, bRm);
5215 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Ib, bRm);
5216 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Ib, bRm);
5217 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Ib, bRm);
5218 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Ib, bRm);
5219 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Ib, bRm);
5220 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Ib, bRm);
5221 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5222 }
5223}
5224
5225
5226/**
5227 * @opcode 0x84
5228 * @opflclass logical
5229 */
5230FNIEMOP_DEF(iemOp_test_Eb_Gb)
5231{
5232 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
5233 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5234 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_test_u8);
5235}
5236
5237
5238/**
5239 * @opcode 0x85
5240 * @opflclass logical
5241 */
5242FNIEMOP_DEF(iemOp_test_Ev_Gv)
5243{
5244 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
5245 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5246 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64);
5247}
5248
5249
5250/**
5251 * @opcode 0x86
5252 */
5253FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
5254{
5255 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5256 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
5257
5258 /*
5259 * If rm is denoting a register, no more instruction bytes.
5260 */
5261 if (IEM_IS_MODRM_REG_MODE(bRm))
5262 {
5263 IEM_MC_BEGIN(0, 2, 0, 0);
5264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5265 IEM_MC_LOCAL(uint8_t, uTmp1);
5266 IEM_MC_LOCAL(uint8_t, uTmp2);
5267
5268 IEM_MC_FETCH_GREG_U8(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5269 IEM_MC_FETCH_GREG_U8(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5270 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5271 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5272
5273 IEM_MC_ADVANCE_RIP_AND_FINISH();
5274 IEM_MC_END();
5275 }
5276 else
5277 {
5278 /*
5279 * We're accessing memory.
5280 */
5281#define IEMOP_XCHG_BYTE(a_fnWorker, a_Style) \
5282 IEM_MC_BEGIN(2, 4, 0, 0); \
5283 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5284 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5285 IEM_MC_LOCAL(uint8_t, uTmpReg); \
5286 IEM_MC_ARG(uint8_t *, pu8Mem, 0); \
5287 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, uTmpReg, 1); \
5288 \
5289 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5290 IEMOP_HLP_DONE_DECODING(); /** @todo testcase: lock xchg */ \
5291 IEM_MC_MEM_MAP_U8_##a_Style(pu8Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5292 IEM_MC_FETCH_GREG_U8(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5293 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker, pu8Mem, pu8Reg); \
5294 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Style(bUnmapInfo); \
5295 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5296 \
5297 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5298 IEM_MC_END()
5299
5300 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5301 {
5302 IEMOP_XCHG_BYTE(iemAImpl_xchg_u8_locked,ATOMIC);
5303 }
5304 else
5305 {
5306 IEMOP_XCHG_BYTE(iemAImpl_xchg_u8_unlocked,RW);
5307 }
5308 }
5309}
5310
5311
5312/**
5313 * @opcode 0x87
5314 */
5315FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
5316{
5317 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
5318 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5319
5320 /*
5321 * If rm is denoting a register, no more instruction bytes.
5322 */
5323 if (IEM_IS_MODRM_REG_MODE(bRm))
5324 {
5325 switch (pVCpu->iem.s.enmEffOpSize)
5326 {
5327 case IEMMODE_16BIT:
5328 IEM_MC_BEGIN(0, 2, 0, 0);
5329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5330 IEM_MC_LOCAL(uint16_t, uTmp1);
5331 IEM_MC_LOCAL(uint16_t, uTmp2);
5332
5333 IEM_MC_FETCH_GREG_U16(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5334 IEM_MC_FETCH_GREG_U16(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5335 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5336 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5337
5338 IEM_MC_ADVANCE_RIP_AND_FINISH();
5339 IEM_MC_END();
5340 break;
5341
5342 case IEMMODE_32BIT:
5343 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5345 IEM_MC_LOCAL(uint32_t, uTmp1);
5346 IEM_MC_LOCAL(uint32_t, uTmp2);
5347
5348 IEM_MC_FETCH_GREG_U32(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5349 IEM_MC_FETCH_GREG_U32(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5350 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5351 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5352
5353 IEM_MC_ADVANCE_RIP_AND_FINISH();
5354 IEM_MC_END();
5355 break;
5356
5357 case IEMMODE_64BIT:
5358 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5360 IEM_MC_LOCAL(uint64_t, uTmp1);
5361 IEM_MC_LOCAL(uint64_t, uTmp2);
5362
5363 IEM_MC_FETCH_GREG_U64(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5364 IEM_MC_FETCH_GREG_U64(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5365 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5366 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5367
5368 IEM_MC_ADVANCE_RIP_AND_FINISH();
5369 IEM_MC_END();
5370 break;
5371
5372 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5373 }
5374 }
5375 else
5376 {
5377 /*
5378 * We're accessing memory.
5379 */
5380#define IEMOP_XCHG_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64, a_Type) \
5381 do { \
5382 switch (pVCpu->iem.s.enmEffOpSize) \
5383 { \
5384 case IEMMODE_16BIT: \
5385 IEM_MC_BEGIN(2, 4, 0, 0); \
5386 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5387 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5388 IEM_MC_LOCAL(uint16_t, uTmpReg); \
5389 IEM_MC_ARG(uint16_t *, pu16Mem, 0); \
5390 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, uTmpReg, 1); \
5391 \
5392 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5393 IEMOP_HLP_DONE_DECODING(); /** @todo testcase: lock xchg */ \
5394 IEM_MC_MEM_MAP_U16_##a_Type(pu16Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5395 IEM_MC_FETCH_GREG_U16(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5396 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker16, pu16Mem, pu16Reg); \
5397 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
5398 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5399 \
5400 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5401 IEM_MC_END(); \
5402 break; \
5403 \
5404 case IEMMODE_32BIT: \
5405 IEM_MC_BEGIN(2, 4, IEM_MC_F_MIN_386, 0); \
5406 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5407 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5408 IEM_MC_LOCAL(uint32_t, uTmpReg); \
5409 IEM_MC_ARG(uint32_t *, pu32Mem, 0); \
5410 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, uTmpReg, 1); \
5411 \
5412 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5413 IEMOP_HLP_DONE_DECODING(); \
5414 IEM_MC_MEM_MAP_U32_##a_Type(pu32Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5415 IEM_MC_FETCH_GREG_U32(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5416 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker32, pu32Mem, pu32Reg); \
5417 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
5418 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5419 \
5420 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5421 IEM_MC_END(); \
5422 break; \
5423 \
5424 case IEMMODE_64BIT: \
5425 IEM_MC_BEGIN(2, 4, IEM_MC_F_64BIT, 0); \
5426 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5427 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5428 IEM_MC_LOCAL(uint64_t, uTmpReg); \
5429 IEM_MC_ARG(uint64_t *, pu64Mem, 0); \
5430 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, uTmpReg, 1); \
5431 \
5432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5433 IEMOP_HLP_DONE_DECODING(); \
5434 IEM_MC_MEM_MAP_U64_##a_Type(pu64Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5435 IEM_MC_FETCH_GREG_U64(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5436 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker64, pu64Mem, pu64Reg); \
5437 IEM_MC_MEM_COMMIT_AND_UNMAP_##a_Type(bUnmapInfo); \
5438 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5439 \
5440 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5441 IEM_MC_END(); \
5442 break; \
5443 \
5444 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5445 } \
5446 } while (0)
5447 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5448 {
5449 IEMOP_XCHG_EV_GV(iemAImpl_xchg_u16_locked, iemAImpl_xchg_u32_locked, iemAImpl_xchg_u64_locked,ATOMIC);
5450 }
5451 else
5452 {
5453 IEMOP_XCHG_EV_GV(iemAImpl_xchg_u16_unlocked, iemAImpl_xchg_u32_unlocked, iemAImpl_xchg_u64_unlocked,RW);
5454 }
5455 }
5456}
5457
5458
5459/**
5460 * @opcode 0x88
5461 */
5462FNIEMOP_DEF(iemOp_mov_Eb_Gb)
5463{
5464 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
5465
5466 uint8_t bRm;
5467 IEM_OPCODE_GET_NEXT_U8(&bRm);
5468
5469 /*
5470 * If rm is denoting a register, no more instruction bytes.
5471 */
5472 if (IEM_IS_MODRM_REG_MODE(bRm))
5473 {
5474 IEM_MC_BEGIN(0, 1, 0, 0);
5475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5476 IEM_MC_LOCAL(uint8_t, u8Value);
5477 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5478 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Value);
5479 IEM_MC_ADVANCE_RIP_AND_FINISH();
5480 IEM_MC_END();
5481 }
5482 else
5483 {
5484 /*
5485 * We're writing a register to memory.
5486 */
5487 IEM_MC_BEGIN(0, 2, 0, 0);
5488 IEM_MC_LOCAL(uint8_t, u8Value);
5489 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5490 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5492 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5493 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
5494 IEM_MC_ADVANCE_RIP_AND_FINISH();
5495 IEM_MC_END();
5496 }
5497}
5498
5499
5500/**
5501 * @opcode 0x89
5502 */
5503FNIEMOP_DEF(iemOp_mov_Ev_Gv)
5504{
5505 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
5506
5507 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5508
5509 /*
5510 * If rm is denoting a register, no more instruction bytes.
5511 */
5512 if (IEM_IS_MODRM_REG_MODE(bRm))
5513 {
5514 switch (pVCpu->iem.s.enmEffOpSize)
5515 {
5516 case IEMMODE_16BIT:
5517 IEM_MC_BEGIN(0, 1, 0, 0);
5518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5519 IEM_MC_LOCAL(uint16_t, u16Value);
5520 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5521 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5522 IEM_MC_ADVANCE_RIP_AND_FINISH();
5523 IEM_MC_END();
5524 break;
5525
5526 case IEMMODE_32BIT:
5527 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5529 IEM_MC_LOCAL(uint32_t, u32Value);
5530 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5531 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5532 IEM_MC_ADVANCE_RIP_AND_FINISH();
5533 IEM_MC_END();
5534 break;
5535
5536 case IEMMODE_64BIT:
5537 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5539 IEM_MC_LOCAL(uint64_t, u64Value);
5540 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5541 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5542 IEM_MC_ADVANCE_RIP_AND_FINISH();
5543 IEM_MC_END();
5544 break;
5545
5546 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5547 }
5548 }
5549 else
5550 {
5551 /*
5552 * We're writing a register to memory.
5553 */
5554 switch (pVCpu->iem.s.enmEffOpSize)
5555 {
5556 case IEMMODE_16BIT:
5557 IEM_MC_BEGIN(0, 2, 0, 0);
5558 IEM_MC_LOCAL(uint16_t, u16Value);
5559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5560 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5562 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5563 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5564 IEM_MC_ADVANCE_RIP_AND_FINISH();
5565 IEM_MC_END();
5566 break;
5567
5568 case IEMMODE_32BIT:
5569 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5570 IEM_MC_LOCAL(uint32_t, u32Value);
5571 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5572 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5574 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5575 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
5576 IEM_MC_ADVANCE_RIP_AND_FINISH();
5577 IEM_MC_END();
5578 break;
5579
5580 case IEMMODE_64BIT:
5581 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5582 IEM_MC_LOCAL(uint64_t, u64Value);
5583 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5584 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5586 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5587 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
5588 IEM_MC_ADVANCE_RIP_AND_FINISH();
5589 IEM_MC_END();
5590 break;
5591
5592 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5593 }
5594 }
5595}
5596
5597
5598/**
5599 * @opcode 0x8a
5600 */
5601FNIEMOP_DEF(iemOp_mov_Gb_Eb)
5602{
5603 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
5604
5605 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5606
5607 /*
5608 * If rm is denoting a register, no more instruction bytes.
5609 */
5610 if (IEM_IS_MODRM_REG_MODE(bRm))
5611 {
5612 IEM_MC_BEGIN(0, 1, 0, 0);
5613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5614 IEM_MC_LOCAL(uint8_t, u8Value);
5615 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5616 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5617 IEM_MC_ADVANCE_RIP_AND_FINISH();
5618 IEM_MC_END();
5619 }
5620 else
5621 {
5622 /*
5623 * We're loading a register from memory.
5624 */
5625 IEM_MC_BEGIN(0, 2, 0, 0);
5626 IEM_MC_LOCAL(uint8_t, u8Value);
5627 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5628 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5630 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5631 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5632 IEM_MC_ADVANCE_RIP_AND_FINISH();
5633 IEM_MC_END();
5634 }
5635}
5636
5637
5638/**
5639 * @opcode 0x8b
5640 */
5641FNIEMOP_DEF(iemOp_mov_Gv_Ev)
5642{
5643 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
5644
5645 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5646
5647 /*
5648 * If rm is denoting a register, no more instruction bytes.
5649 */
5650 if (IEM_IS_MODRM_REG_MODE(bRm))
5651 {
5652 switch (pVCpu->iem.s.enmEffOpSize)
5653 {
5654 case IEMMODE_16BIT:
5655 IEM_MC_BEGIN(0, 1, 0, 0);
5656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5657 IEM_MC_LOCAL(uint16_t, u16Value);
5658 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5659 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5660 IEM_MC_ADVANCE_RIP_AND_FINISH();
5661 IEM_MC_END();
5662 break;
5663
5664 case IEMMODE_32BIT:
5665 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5667 IEM_MC_LOCAL(uint32_t, u32Value);
5668 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5669 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5670 IEM_MC_ADVANCE_RIP_AND_FINISH();
5671 IEM_MC_END();
5672 break;
5673
5674 case IEMMODE_64BIT:
5675 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5677 IEM_MC_LOCAL(uint64_t, u64Value);
5678 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5679 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5680 IEM_MC_ADVANCE_RIP_AND_FINISH();
5681 IEM_MC_END();
5682 break;
5683
5684 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5685 }
5686 }
5687 else
5688 {
5689 /*
5690 * We're loading a register from memory.
5691 */
5692 switch (pVCpu->iem.s.enmEffOpSize)
5693 {
5694 case IEMMODE_16BIT:
5695 IEM_MC_BEGIN(0, 2, 0, 0);
5696 IEM_MC_LOCAL(uint16_t, u16Value);
5697 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5698 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5700 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5701 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5702 IEM_MC_ADVANCE_RIP_AND_FINISH();
5703 IEM_MC_END();
5704 break;
5705
5706 case IEMMODE_32BIT:
5707 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5708 IEM_MC_LOCAL(uint32_t, u32Value);
5709 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5710 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5712 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5713 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5714 IEM_MC_ADVANCE_RIP_AND_FINISH();
5715 IEM_MC_END();
5716 break;
5717
5718 case IEMMODE_64BIT:
5719 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5720 IEM_MC_LOCAL(uint64_t, u64Value);
5721 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5722 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5724 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5725 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5726 IEM_MC_ADVANCE_RIP_AND_FINISH();
5727 IEM_MC_END();
5728 break;
5729
5730 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5731 }
5732 }
5733}
5734
5735
5736/**
5737 * opcode 0x63
5738 * @todo Table fixme
5739 */
5740FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
5741{
5742 if (!IEM_IS_64BIT_CODE(pVCpu))
5743 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
5744 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
5745 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
5746 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
5747}
5748
5749
5750/**
5751 * @opcode 0x8c
5752 */
5753FNIEMOP_DEF(iemOp_mov_Ev_Sw)
5754{
5755 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
5756
5757 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5758
5759 /*
5760 * Check that the destination register exists. The REX.R prefix is ignored.
5761 */
5762 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5763 if (iSegReg > X86_SREG_GS)
5764 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5765
5766 /*
5767 * If rm is denoting a register, no more instruction bytes.
5768 * In that case, the operand size is respected and the upper bits are
5769 * cleared (starting with some pentium).
5770 */
5771 if (IEM_IS_MODRM_REG_MODE(bRm))
5772 {
5773 switch (pVCpu->iem.s.enmEffOpSize)
5774 {
5775 case IEMMODE_16BIT:
5776 IEM_MC_BEGIN(0, 1, 0, 0);
5777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5778 IEM_MC_LOCAL(uint16_t, u16Value);
5779 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5780 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5781 IEM_MC_ADVANCE_RIP_AND_FINISH();
5782 IEM_MC_END();
5783 break;
5784
5785 case IEMMODE_32BIT:
5786 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5788 IEM_MC_LOCAL(uint32_t, u32Value);
5789 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
5790 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5791 IEM_MC_ADVANCE_RIP_AND_FINISH();
5792 IEM_MC_END();
5793 break;
5794
5795 case IEMMODE_64BIT:
5796 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5798 IEM_MC_LOCAL(uint64_t, u64Value);
5799 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
5800 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5801 IEM_MC_ADVANCE_RIP_AND_FINISH();
5802 IEM_MC_END();
5803 break;
5804
5805 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5806 }
5807 }
5808 else
5809 {
5810 /*
5811 * We're saving the register to memory. The access is word sized
5812 * regardless of operand size prefixes.
5813 */
5814#if 0 /* not necessary */
5815 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5816#endif
5817 IEM_MC_BEGIN(0, 2, 0, 0);
5818 IEM_MC_LOCAL(uint16_t, u16Value);
5819 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5820 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5822 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5823 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5824 IEM_MC_ADVANCE_RIP_AND_FINISH();
5825 IEM_MC_END();
5826 }
5827}
5828
5829
5830
5831
5832/**
5833 * @opcode 0x8d
5834 */
5835FNIEMOP_DEF(iemOp_lea_Gv_M)
5836{
5837 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
5838 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5839 if (IEM_IS_MODRM_REG_MODE(bRm))
5840 IEMOP_RAISE_INVALID_OPCODE_RET(); /* no register form */
5841
5842 switch (pVCpu->iem.s.enmEffOpSize)
5843 {
5844 case IEMMODE_16BIT:
5845 IEM_MC_BEGIN(0, 2, 0, 0);
5846 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5847 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5849 /** @todo optimize: This value casting/masking can be skipped if addr-size ==
5850 * operand-size, which is usually the case. It'll save an instruction
5851 * and a register. */
5852 IEM_MC_LOCAL(uint16_t, u16Cast);
5853 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
5854 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Cast);
5855 IEM_MC_ADVANCE_RIP_AND_FINISH();
5856 IEM_MC_END();
5857 break;
5858
5859 case IEMMODE_32BIT:
5860 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5861 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5864 /** @todo optimize: This value casting/masking can be skipped if addr-size ==
5865 * operand-size, which is usually the case. It'll save an instruction
5866 * and a register. */
5867 IEM_MC_LOCAL(uint32_t, u32Cast);
5868 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
5869 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Cast);
5870 IEM_MC_ADVANCE_RIP_AND_FINISH();
5871 IEM_MC_END();
5872 break;
5873
5874 case IEMMODE_64BIT:
5875 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5876 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5877 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5879 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), GCPtrEffSrc);
5880 IEM_MC_ADVANCE_RIP_AND_FINISH();
5881 IEM_MC_END();
5882 break;
5883
5884 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5885 }
5886}
5887
5888
5889/**
5890 * @opcode 0x8e
5891 */
5892FNIEMOP_DEF(iemOp_mov_Sw_Ev)
5893{
5894 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
5895
5896 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5897
5898 /*
5899 * The practical operand size is 16-bit.
5900 */
5901#if 0 /* not necessary */
5902 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5903#endif
5904
5905 /*
5906 * Check that the destination register exists and can be used with this
5907 * instruction. The REX.R prefix is ignored.
5908 */
5909 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5910 /** @todo r=bird: What does 8086 do here wrt CS? */
5911 if ( iSegReg == X86_SREG_CS
5912 || iSegReg > X86_SREG_GS)
5913 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5914
5915 /*
5916 * If rm is denoting a register, no more instruction bytes.
5917 *
5918 * Note! Using IEMOP_MOV_SW_EV_REG_BODY here to specify different
5919 * IEM_CIMPL_F_XXX values depending on the CPU mode and target
5920 * register. This is a restriction of the current recompiler
5921 * approach.
5922 */
5923 if (IEM_IS_MODRM_REG_MODE(bRm))
5924 {
5925#define IEMOP_MOV_SW_EV_REG_BODY(a_fCImplFlags) \
5926 IEM_MC_BEGIN(2, 0, 0, a_fCImplFlags); \
5927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5928 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
5929 IEM_MC_ARG(uint16_t, u16Value, 1); \
5930 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5931 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, \
5932 RT_BIT_64(kIemNativeGstReg_SegSelFirst + iSegReg) \
5933 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + iSegReg) \
5934 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + iSegReg) \
5935 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + iSegReg), \
5936 iemCImpl_load_SReg, iSRegArg, u16Value); \
5937 IEM_MC_END()
5938
5939 if (iSegReg == X86_SREG_SS)
5940 {
5941 if (IEM_IS_32BIT_CODE(pVCpu))
5942 {
5943 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
5944 }
5945 else
5946 {
5947 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
5948 }
5949 }
5950 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5951 {
5952 IEMOP_MOV_SW_EV_REG_BODY(0);
5953 }
5954 else
5955 {
5956 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_MODE);
5957 }
5958#undef IEMOP_MOV_SW_EV_REG_BODY
5959 }
5960 else
5961 {
5962 /*
5963 * We're loading the register from memory. The access is word sized
5964 * regardless of operand size prefixes.
5965 */
5966#define IEMOP_MOV_SW_EV_MEM_BODY(a_fCImplFlags) \
5967 IEM_MC_BEGIN(2, 1, 0, a_fCImplFlags); \
5968 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
5969 IEM_MC_ARG(uint16_t, u16Value, 1); \
5970 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5971 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5973 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5974 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, \
5975 RT_BIT_64(kIemNativeGstReg_SegSelFirst + iSegReg) \
5976 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + iSegReg) \
5977 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + iSegReg) \
5978 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + iSegReg), \
5979 iemCImpl_load_SReg, iSRegArg, u16Value); \
5980 IEM_MC_END()
5981
5982 if (iSegReg == X86_SREG_SS)
5983 {
5984 if (IEM_IS_32BIT_CODE(pVCpu))
5985 {
5986 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
5987 }
5988 else
5989 {
5990 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
5991 }
5992 }
5993 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5994 {
5995 IEMOP_MOV_SW_EV_MEM_BODY(0);
5996 }
5997 else
5998 {
5999 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_MODE);
6000 }
6001#undef IEMOP_MOV_SW_EV_MEM_BODY
6002 }
6003}
6004
6005
6006/** Opcode 0x8f /0. */
6007FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
6008{
6009 /* This bugger is rather annoying as it requires rSP to be updated before
6010 doing the effective address calculations. Will eventually require a
6011 split between the R/M+SIB decoding and the effective address
6012 calculation - which is something that is required for any attempt at
6013 reusing this code for a recompiler. It may also be good to have if we
6014 need to delay #UD exception caused by invalid lock prefixes.
6015
6016 For now, we'll do a mostly safe interpreter-only implementation here. */
6017 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
6018 * now until tests show it's checked.. */
6019 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
6020
6021 /* Register access is relatively easy and can share code. */
6022 if (IEM_IS_MODRM_REG_MODE(bRm))
6023 return FNIEMOP_CALL_1(iemOpCommonPopGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
6024
6025 /*
6026 * Memory target.
6027 *
6028 * Intel says that RSP is incremented before it's used in any effective
6029 * address calcuations. This means some serious extra annoyance here since
6030 * we decode and calculate the effective address in one step and like to
6031 * delay committing registers till everything is done.
6032 *
6033 * So, we'll decode and calculate the effective address twice. This will
6034 * require some recoding if turned into a recompiler.
6035 */
6036 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
6037
6038#if 1 /* This can be compiled, optimize later if needed. */
6039 switch (pVCpu->iem.s.enmEffOpSize)
6040 {
6041 case IEMMODE_16BIT:
6042 IEM_MC_BEGIN(2, 0, 0, 0);
6043 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6044 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2 << 8);
6045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6046 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6047 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem16, iEffSeg, GCPtrEffDst);
6048 IEM_MC_END();
6049 break;
6050
6051 case IEMMODE_32BIT:
6052 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0);
6053 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6054 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4 << 8);
6055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6056 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6057 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem32, iEffSeg, GCPtrEffDst);
6058 IEM_MC_END();
6059 break;
6060
6061 case IEMMODE_64BIT:
6062 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
6063 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6064 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 8 << 8);
6065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6066 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6067 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem64, iEffSeg, GCPtrEffDst);
6068 IEM_MC_END();
6069 break;
6070
6071 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6072 }
6073
6074#else
6075# ifndef TST_IEM_CHECK_MC
6076 /* Calc effective address with modified ESP. */
6077/** @todo testcase */
6078 RTGCPTR GCPtrEff;
6079 VBOXSTRICTRC rcStrict;
6080 switch (pVCpu->iem.s.enmEffOpSize)
6081 {
6082 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 2 << 8, &GCPtrEff); break;
6083 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 4 << 8, &GCPtrEff); break;
6084 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 8 << 8, &GCPtrEff); break;
6085 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6086 }
6087 if (rcStrict != VINF_SUCCESS)
6088 return rcStrict;
6089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6090
6091 /* Perform the operation - this should be CImpl. */
6092 RTUINT64U TmpRsp;
6093 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
6094 switch (pVCpu->iem.s.enmEffOpSize)
6095 {
6096 case IEMMODE_16BIT:
6097 {
6098 uint16_t u16Value;
6099 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
6100 if (rcStrict == VINF_SUCCESS)
6101 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
6102 break;
6103 }
6104
6105 case IEMMODE_32BIT:
6106 {
6107 uint32_t u32Value;
6108 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
6109 if (rcStrict == VINF_SUCCESS)
6110 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
6111 break;
6112 }
6113
6114 case IEMMODE_64BIT:
6115 {
6116 uint64_t u64Value;
6117 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
6118 if (rcStrict == VINF_SUCCESS)
6119 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
6120 break;
6121 }
6122
6123 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6124 }
6125 if (rcStrict == VINF_SUCCESS)
6126 {
6127 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
6128 return iemRegUpdateRipAndFinishClearingRF(pVCpu);
6129 }
6130 return rcStrict;
6131
6132# else
6133 return VERR_IEM_IPE_2;
6134# endif
6135#endif
6136}
6137
6138
6139/**
6140 * @opcode 0x8f
6141 */
6142FNIEMOP_DEF(iemOp_Grp1A__xop)
6143{
6144 /*
6145 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
6146 * three byte VEX prefix, except that the mmmmm field cannot have the values
6147 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
6148 */
6149 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6150 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
6151 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
6152
6153 IEMOP_MNEMONIC(xop, "xop");
6154 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
6155 {
6156 /** @todo Test when exctly the XOP conformance checks kick in during
6157 * instruction decoding and fetching (using \#PF). */
6158 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
6159 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6160 if ( ( pVCpu->iem.s.fPrefixes
6161 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
6162 == 0)
6163 {
6164 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
6165 if ((bXop2 & 0x80 /* XOP.W */) && IEM_IS_64BIT_CODE(pVCpu))
6166 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
6167 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6168 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
6169 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
6170 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
6171 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
6172 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
6173
6174 /** @todo XOP: Just use new tables and decoders. */
6175 switch (bRm & 0x1f)
6176 {
6177 case 8: /* xop opcode map 8. */
6178 IEMOP_BITCH_ABOUT_STUB();
6179 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6180
6181 case 9: /* xop opcode map 9. */
6182 IEMOP_BITCH_ABOUT_STUB();
6183 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6184
6185 case 10: /* xop opcode map 10. */
6186 IEMOP_BITCH_ABOUT_STUB();
6187 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6188
6189 default:
6190 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6191 IEMOP_RAISE_INVALID_OPCODE_RET();
6192 }
6193 }
6194 else
6195 Log(("XOP: Invalid prefix mix!\n"));
6196 }
6197 else
6198 Log(("XOP: XOP support disabled!\n"));
6199 IEMOP_RAISE_INVALID_OPCODE_RET();
6200}
6201
6202
6203/**
6204 * Common 'xchg reg,rAX' helper.
6205 */
6206FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
6207{
6208 iReg |= pVCpu->iem.s.uRexB;
6209 switch (pVCpu->iem.s.enmEffOpSize)
6210 {
6211 case IEMMODE_16BIT:
6212 IEM_MC_BEGIN(0, 2, 0, 0);
6213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6214 IEM_MC_LOCAL(uint16_t, u16Tmp1);
6215 IEM_MC_LOCAL(uint16_t, u16Tmp2);
6216 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
6217 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
6218 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
6219 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
6220 IEM_MC_ADVANCE_RIP_AND_FINISH();
6221 IEM_MC_END();
6222 break;
6223
6224 case IEMMODE_32BIT:
6225 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6227 IEM_MC_LOCAL(uint32_t, u32Tmp1);
6228 IEM_MC_LOCAL(uint32_t, u32Tmp2);
6229 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
6230 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
6231 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
6232 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
6233 IEM_MC_ADVANCE_RIP_AND_FINISH();
6234 IEM_MC_END();
6235 break;
6236
6237 case IEMMODE_64BIT:
6238 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6239 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6240 IEM_MC_LOCAL(uint64_t, u64Tmp1);
6241 IEM_MC_LOCAL(uint64_t, u64Tmp2);
6242 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
6243 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
6244 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
6245 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
6246 IEM_MC_ADVANCE_RIP_AND_FINISH();
6247 IEM_MC_END();
6248 break;
6249
6250 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6251 }
6252}
6253
6254
6255/**
6256 * @opcode 0x90
6257 */
6258FNIEMOP_DEF(iemOp_nop)
6259{
6260 /* R8/R8D and RAX/EAX can be exchanged. */
6261 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
6262 {
6263 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
6264 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
6265 }
6266
6267 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
6268 {
6269 IEMOP_MNEMONIC(pause, "pause");
6270 /* ASSUMING that we keep the IEM_F_X86_CTX_IN_GUEST, IEM_F_X86_CTX_VMX
6271 and IEM_F_X86_CTX_SVM in the TB key, we can safely do the following: */
6272 if (!IEM_IS_IN_GUEST(pVCpu))
6273 { /* probable */ }
6274#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6275 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_VMX)
6276 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmx_pause);
6277#endif
6278#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
6279 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_SVM)
6280 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_svm_pause);
6281#endif
6282 }
6283 else
6284 IEMOP_MNEMONIC(nop, "nop");
6285 /** @todo testcase: lock nop; lock pause */
6286 IEM_MC_BEGIN(0, 0, 0, 0);
6287 IEMOP_HLP_DONE_DECODING();
6288 IEM_MC_ADVANCE_RIP_AND_FINISH();
6289 IEM_MC_END();
6290}
6291
6292
6293/**
6294 * @opcode 0x91
6295 */
6296FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
6297{
6298 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
6299 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
6300}
6301
6302
6303/**
6304 * @opcode 0x92
6305 */
6306FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
6307{
6308 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
6309 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
6310}
6311
6312
6313/**
6314 * @opcode 0x93
6315 */
6316FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
6317{
6318 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
6319 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
6320}
6321
6322
6323/**
6324 * @opcode 0x94
6325 */
6326FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
6327{
6328 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
6329 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
6330}
6331
6332
6333/**
6334 * @opcode 0x95
6335 */
6336FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
6337{
6338 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
6339 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
6340}
6341
6342
6343/**
6344 * @opcode 0x96
6345 */
6346FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
6347{
6348 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
6349 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
6350}
6351
6352
6353/**
6354 * @opcode 0x97
6355 */
6356FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
6357{
6358 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
6359 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
6360}
6361
6362
6363/**
6364 * @opcode 0x98
6365 */
6366FNIEMOP_DEF(iemOp_cbw)
6367{
6368 switch (pVCpu->iem.s.enmEffOpSize)
6369 {
6370 case IEMMODE_16BIT:
6371 IEMOP_MNEMONIC(cbw, "cbw");
6372 IEM_MC_BEGIN(0, 1, 0, 0);
6373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6374 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
6375 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
6376 } IEM_MC_ELSE() {
6377 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
6378 } IEM_MC_ENDIF();
6379 IEM_MC_ADVANCE_RIP_AND_FINISH();
6380 IEM_MC_END();
6381 break;
6382
6383 case IEMMODE_32BIT:
6384 IEMOP_MNEMONIC(cwde, "cwde");
6385 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6386 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6387 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6388 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
6389 } IEM_MC_ELSE() {
6390 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
6391 } IEM_MC_ENDIF();
6392 IEM_MC_ADVANCE_RIP_AND_FINISH();
6393 IEM_MC_END();
6394 break;
6395
6396 case IEMMODE_64BIT:
6397 IEMOP_MNEMONIC(cdqe, "cdqe");
6398 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6399 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6400 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6401 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
6402 } IEM_MC_ELSE() {
6403 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
6404 } IEM_MC_ENDIF();
6405 IEM_MC_ADVANCE_RIP_AND_FINISH();
6406 IEM_MC_END();
6407 break;
6408
6409 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6410 }
6411}
6412
6413
6414/**
6415 * @opcode 0x99
6416 */
6417FNIEMOP_DEF(iemOp_cwd)
6418{
6419 switch (pVCpu->iem.s.enmEffOpSize)
6420 {
6421 case IEMMODE_16BIT:
6422 IEMOP_MNEMONIC(cwd, "cwd");
6423 IEM_MC_BEGIN(0, 1, 0, 0);
6424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6425 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6426 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
6427 } IEM_MC_ELSE() {
6428 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
6429 } IEM_MC_ENDIF();
6430 IEM_MC_ADVANCE_RIP_AND_FINISH();
6431 IEM_MC_END();
6432 break;
6433
6434 case IEMMODE_32BIT:
6435 IEMOP_MNEMONIC(cdq, "cdq");
6436 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6437 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6438 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6439 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
6440 } IEM_MC_ELSE() {
6441 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
6442 } IEM_MC_ENDIF();
6443 IEM_MC_ADVANCE_RIP_AND_FINISH();
6444 IEM_MC_END();
6445 break;
6446
6447 case IEMMODE_64BIT:
6448 IEMOP_MNEMONIC(cqo, "cqo");
6449 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6450 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6451 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
6452 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
6453 } IEM_MC_ELSE() {
6454 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
6455 } IEM_MC_ENDIF();
6456 IEM_MC_ADVANCE_RIP_AND_FINISH();
6457 IEM_MC_END();
6458 break;
6459
6460 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6461 }
6462}
6463
6464
6465/**
6466 * @opcode 0x9a
6467 */
6468FNIEMOP_DEF(iemOp_call_Ap)
6469{
6470 IEMOP_MNEMONIC(call_Ap, "call Ap");
6471 IEMOP_HLP_NO_64BIT();
6472
6473 /* Decode the far pointer address and pass it on to the far call C implementation. */
6474 uint32_t off32Seg;
6475 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
6476 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
6477 else
6478 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
6479 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
6480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6481 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
6482 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, UINT64_MAX,
6483 iemCImpl_callf, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
6484 /** @todo make task-switches, ring-switches, ++ return non-zero status */
6485}
6486
6487
6488/** Opcode 0x9b. (aka fwait) */
6489FNIEMOP_DEF(iemOp_wait)
6490{
6491 IEMOP_MNEMONIC(wait, "wait");
6492 IEM_MC_BEGIN(0, 0, 0, 0);
6493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6494 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
6495 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6496 IEM_MC_ADVANCE_RIP_AND_FINISH();
6497 IEM_MC_END();
6498}
6499
6500
6501/**
6502 * @opcode 0x9c
6503 */
6504FNIEMOP_DEF(iemOp_pushf_Fv)
6505{
6506 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
6507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6508 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6509 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP),
6510 iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
6511}
6512
6513
6514/**
6515 * @opcode 0x9d
6516 */
6517FNIEMOP_DEF(iemOp_popf_Fv)
6518{
6519 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
6520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6521 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6522 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE_AND_AFTER,
6523 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP),
6524 iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
6525}
6526
6527
6528/**
6529 * @opcode 0x9e
6530 * @opflmodify cf,pf,af,zf,sf
6531 */
6532FNIEMOP_DEF(iemOp_sahf)
6533{
6534 IEMOP_MNEMONIC(sahf, "sahf");
6535 if ( IEM_IS_64BIT_CODE(pVCpu)
6536 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6537 IEMOP_RAISE_INVALID_OPCODE_RET();
6538 IEM_MC_BEGIN(0, 2, 0, 0);
6539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6540 IEM_MC_LOCAL(uint32_t, u32Flags);
6541 IEM_MC_LOCAL(uint32_t, EFlags);
6542 IEM_MC_FETCH_EFLAGS(EFlags);
6543 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
6544 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6545 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
6546 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
6547 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
6548 IEM_MC_COMMIT_EFLAGS(EFlags);
6549 IEM_MC_ADVANCE_RIP_AND_FINISH();
6550 IEM_MC_END();
6551}
6552
6553
6554/**
6555 * @opcode 0x9f
6556 * @opfltest cf,pf,af,zf,sf
6557 */
6558FNIEMOP_DEF(iemOp_lahf)
6559{
6560 IEMOP_MNEMONIC(lahf, "lahf");
6561 if ( IEM_IS_64BIT_CODE(pVCpu)
6562 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6563 IEMOP_RAISE_INVALID_OPCODE_RET();
6564 IEM_MC_BEGIN(0, 1, 0, 0);
6565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6566 IEM_MC_LOCAL(uint8_t, u8Flags);
6567 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
6568 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
6569 IEM_MC_ADVANCE_RIP_AND_FINISH();
6570 IEM_MC_END();
6571}
6572
6573
6574/**
6575 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
6576 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode.
6577 * Will return/throw on failures.
6578 * @param a_GCPtrMemOff The variable to store the offset in.
6579 */
6580#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
6581 do \
6582 { \
6583 switch (pVCpu->iem.s.enmEffAddrMode) \
6584 { \
6585 case IEMMODE_16BIT: \
6586 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
6587 break; \
6588 case IEMMODE_32BIT: \
6589 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
6590 break; \
6591 case IEMMODE_64BIT: \
6592 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
6593 break; \
6594 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
6595 } \
6596 } while (0)
6597
6598/**
6599 * @opcode 0xa0
6600 */
6601FNIEMOP_DEF(iemOp_mov_AL_Ob)
6602{
6603 /*
6604 * Get the offset.
6605 */
6606 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
6607 RTGCPTR GCPtrMemOffDecode;
6608 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6609
6610 /*
6611 * Fetch AL.
6612 */
6613 IEM_MC_BEGIN(0, 2, 0, 0);
6614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6615 IEM_MC_LOCAL(uint8_t, u8Tmp);
6616 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6617 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6618 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6619 IEM_MC_ADVANCE_RIP_AND_FINISH();
6620 IEM_MC_END();
6621}
6622
6623
6624/**
6625 * @opcode 0xa1
6626 */
6627FNIEMOP_DEF(iemOp_mov_rAX_Ov)
6628{
6629 /*
6630 * Get the offset.
6631 */
6632 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
6633 RTGCPTR GCPtrMemOffDecode;
6634 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6635
6636 /*
6637 * Fetch rAX.
6638 */
6639 switch (pVCpu->iem.s.enmEffOpSize)
6640 {
6641 case IEMMODE_16BIT:
6642 IEM_MC_BEGIN(0, 2, 0, 0);
6643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6644 IEM_MC_LOCAL(uint16_t, u16Tmp);
6645 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6646 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6647 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
6648 IEM_MC_ADVANCE_RIP_AND_FINISH();
6649 IEM_MC_END();
6650 break;
6651
6652 case IEMMODE_32BIT:
6653 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6655 IEM_MC_LOCAL(uint32_t, u32Tmp);
6656 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6657 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6658 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
6659 IEM_MC_ADVANCE_RIP_AND_FINISH();
6660 IEM_MC_END();
6661 break;
6662
6663 case IEMMODE_64BIT:
6664 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6666 IEM_MC_LOCAL(uint64_t, u64Tmp);
6667 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6668 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6669 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
6670 IEM_MC_ADVANCE_RIP_AND_FINISH();
6671 IEM_MC_END();
6672 break;
6673
6674 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6675 }
6676}
6677
6678
6679/**
6680 * @opcode 0xa2
6681 */
6682FNIEMOP_DEF(iemOp_mov_Ob_AL)
6683{
6684 /*
6685 * Get the offset.
6686 */
6687 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
6688 RTGCPTR GCPtrMemOffDecode;
6689 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6690
6691 /*
6692 * Store AL.
6693 */
6694 IEM_MC_BEGIN(0, 2, 0, 0);
6695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6696 IEM_MC_LOCAL(uint8_t, u8Tmp);
6697 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
6698 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6699 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
6700 IEM_MC_ADVANCE_RIP_AND_FINISH();
6701 IEM_MC_END();
6702}
6703
6704
6705/**
6706 * @opcode 0xa3
6707 */
6708FNIEMOP_DEF(iemOp_mov_Ov_rAX)
6709{
6710 /*
6711 * Get the offset.
6712 */
6713 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
6714 RTGCPTR GCPtrMemOffDecode;
6715 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6716
6717 /*
6718 * Store rAX.
6719 */
6720 switch (pVCpu->iem.s.enmEffOpSize)
6721 {
6722 case IEMMODE_16BIT:
6723 IEM_MC_BEGIN(0, 2, 0, 0);
6724 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6725 IEM_MC_LOCAL(uint16_t, u16Tmp);
6726 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
6727 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6728 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
6729 IEM_MC_ADVANCE_RIP_AND_FINISH();
6730 IEM_MC_END();
6731 break;
6732
6733 case IEMMODE_32BIT:
6734 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6735 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6736 IEM_MC_LOCAL(uint32_t, u32Tmp);
6737 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
6738 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6739 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
6740 IEM_MC_ADVANCE_RIP_AND_FINISH();
6741 IEM_MC_END();
6742 break;
6743
6744 case IEMMODE_64BIT:
6745 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6747 IEM_MC_LOCAL(uint64_t, u64Tmp);
6748 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
6749 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6750 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
6751 IEM_MC_ADVANCE_RIP_AND_FINISH();
6752 IEM_MC_END();
6753 break;
6754
6755 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6756 }
6757}
6758
6759/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
6760#define IEM_MOVS_CASE(ValBits, AddrBits, a_fMcFlags) \
6761 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
6762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6763 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6764 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6765 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6766 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
6767 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6768 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
6769 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6770 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6771 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6772 } IEM_MC_ELSE() { \
6773 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6774 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6775 } IEM_MC_ENDIF(); \
6776 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6777 IEM_MC_END() \
6778
6779/**
6780 * @opcode 0xa4
6781 * @opfltest df
6782 */
6783FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
6784{
6785 /*
6786 * Use the C implementation if a repeat prefix is encountered.
6787 */
6788 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6789 {
6790 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
6791 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6792 switch (pVCpu->iem.s.enmEffAddrMode)
6793 {
6794 case IEMMODE_16BIT:
6795 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6796 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6797 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6798 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6799 iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
6800 case IEMMODE_32BIT:
6801 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6802 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6803 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6804 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6805 iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
6806 case IEMMODE_64BIT:
6807 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6808 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6809 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6810 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6811 iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
6812 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6813 }
6814 }
6815
6816 /*
6817 * Sharing case implementation with movs[wdq] below.
6818 */
6819 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
6820 switch (pVCpu->iem.s.enmEffAddrMode)
6821 {
6822 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
6823 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32, IEM_MC_F_MIN_386); break;
6824 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64, IEM_MC_F_64BIT); break;
6825 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6826 }
6827}
6828
6829
6830/**
6831 * @opcode 0xa5
6832 * @opfltest df
6833 */
6834FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
6835{
6836
6837 /*
6838 * Use the C implementation if a repeat prefix is encountered.
6839 */
6840 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6841 {
6842 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
6843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6844 switch (pVCpu->iem.s.enmEffOpSize)
6845 {
6846 case IEMMODE_16BIT:
6847 switch (pVCpu->iem.s.enmEffAddrMode)
6848 {
6849 case IEMMODE_16BIT:
6850 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6851 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6852 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6853 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6854 iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
6855 case IEMMODE_32BIT:
6856 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6857 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6858 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6859 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6860 iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
6861 case IEMMODE_64BIT:
6862 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6863 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6864 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6865 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6866 iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
6867 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6868 }
6869 break;
6870 case IEMMODE_32BIT:
6871 switch (pVCpu->iem.s.enmEffAddrMode)
6872 {
6873 case IEMMODE_16BIT:
6874 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6875 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6876 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6877 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6878 iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
6879 case IEMMODE_32BIT:
6880 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6881 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6882 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6883 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6884 iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
6885 case IEMMODE_64BIT:
6886 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6887 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6888 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6889 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6890 iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
6891 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6892 }
6893 case IEMMODE_64BIT:
6894 switch (pVCpu->iem.s.enmEffAddrMode)
6895 {
6896 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
6897 case IEMMODE_32BIT:
6898 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6899 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6900 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6901 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6902 iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
6903 case IEMMODE_64BIT:
6904 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6905 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6906 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6907 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6908 iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
6909 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6910 }
6911 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6912 }
6913 }
6914
6915 /*
6916 * Annoying double switch here.
6917 * Using ugly macro for implementing the cases, sharing it with movsb.
6918 */
6919 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
6920 switch (pVCpu->iem.s.enmEffOpSize)
6921 {
6922 case IEMMODE_16BIT:
6923 switch (pVCpu->iem.s.enmEffAddrMode)
6924 {
6925 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
6926 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32, IEM_MC_F_MIN_386); break;
6927 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64, IEM_MC_F_64BIT); break;
6928 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6929 }
6930 break;
6931
6932 case IEMMODE_32BIT:
6933 switch (pVCpu->iem.s.enmEffAddrMode)
6934 {
6935 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
6936 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32, IEM_MC_F_MIN_386); break;
6937 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64, IEM_MC_F_64BIT); break;
6938 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6939 }
6940 break;
6941
6942 case IEMMODE_64BIT:
6943 switch (pVCpu->iem.s.enmEffAddrMode)
6944 {
6945 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6946 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32, IEM_MC_F_64BIT); break;
6947 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64, IEM_MC_F_64BIT); break;
6948 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6949 }
6950 break;
6951 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6952 }
6953}
6954
6955#undef IEM_MOVS_CASE
6956
6957/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
6958#define IEM_CMPS_CASE(ValBits, AddrBits, a_fMcFlags) \
6959 IEM_MC_BEGIN(3, 3, a_fMcFlags, 0); \
6960 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6961 \
6962 IEM_MC_LOCAL(RTGCPTR, uAddr1); \
6963 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr1, X86_GREG_xSI); \
6964 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
6965 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr1); \
6966 \
6967 IEM_MC_LOCAL(RTGCPTR, uAddr2); \
6968 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr2, X86_GREG_xDI); \
6969 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
6970 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr2); \
6971 \
6972 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
6973 IEM_MC_REF_EFLAGS(pEFlags); \
6974 IEM_MC_ARG_LOCAL_REF(uint##ValBits##_t *, puValue1, uValue1, 0); \
6975 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
6976 \
6977 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6978 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6979 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6980 } IEM_MC_ELSE() { \
6981 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6982 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6983 } IEM_MC_ENDIF(); \
6984 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6985 IEM_MC_END() \
6986
6987/**
6988 * @opcode 0xa6
6989 * @opflclass arithmetic
6990 * @opfltest df
6991 */
6992FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
6993{
6994
6995 /*
6996 * Use the C implementation if a repeat prefix is encountered.
6997 */
6998 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6999 {
7000 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
7001 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7002 switch (pVCpu->iem.s.enmEffAddrMode)
7003 {
7004 case IEMMODE_16BIT:
7005 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7006 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7007 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7008 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7009 iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
7010 case IEMMODE_32BIT:
7011 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7012 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7013 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7014 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7015 iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
7016 case IEMMODE_64BIT:
7017 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7018 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7019 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7020 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7021 iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
7022 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7023 }
7024 }
7025 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7026 {
7027 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
7028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7029 switch (pVCpu->iem.s.enmEffAddrMode)
7030 {
7031 case IEMMODE_16BIT:
7032 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7033 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7034 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7035 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7036 iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
7037 case IEMMODE_32BIT:
7038 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7039 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7040 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7041 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7042 iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
7043 case IEMMODE_64BIT:
7044 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7045 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7046 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7047 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7048 iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
7049 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7050 }
7051 }
7052
7053 /*
7054 * Sharing case implementation with cmps[wdq] below.
7055 */
7056 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
7057 switch (pVCpu->iem.s.enmEffAddrMode)
7058 {
7059 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7060 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7061 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64, IEM_MC_F_64BIT); break;
7062 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7063 }
7064}
7065
7066
7067/**
7068 * @opcode 0xa7
7069 * @opflclass arithmetic
7070 * @opfltest df
7071 */
7072FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
7073{
7074 /*
7075 * Use the C implementation if a repeat prefix is encountered.
7076 */
7077 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7078 {
7079 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
7080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7081 switch (pVCpu->iem.s.enmEffOpSize)
7082 {
7083 case IEMMODE_16BIT:
7084 switch (pVCpu->iem.s.enmEffAddrMode)
7085 {
7086 case IEMMODE_16BIT:
7087 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7088 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7089 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7090 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7091 iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
7092 case IEMMODE_32BIT:
7093 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7094 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7095 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7096 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7097 iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
7098 case IEMMODE_64BIT:
7099 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7100 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7101 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7102 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7103 iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
7104 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7105 }
7106 break;
7107 case IEMMODE_32BIT:
7108 switch (pVCpu->iem.s.enmEffAddrMode)
7109 {
7110 case IEMMODE_16BIT:
7111 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7112 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7113 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7114 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7115 iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
7116 case IEMMODE_32BIT:
7117 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7118 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7119 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7120 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7121 iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
7122 case IEMMODE_64BIT:
7123 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7124 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7125 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7126 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7127 iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
7128 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7129 }
7130 case IEMMODE_64BIT:
7131 switch (pVCpu->iem.s.enmEffAddrMode)
7132 {
7133 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
7134 case IEMMODE_32BIT:
7135 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7136 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7137 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7138 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7139 iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
7140 case IEMMODE_64BIT:
7141 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7142 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7143 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7144 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7145 iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
7146 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7147 }
7148 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7149 }
7150 }
7151
7152 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7153 {
7154 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
7155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7156 switch (pVCpu->iem.s.enmEffOpSize)
7157 {
7158 case IEMMODE_16BIT:
7159 switch (pVCpu->iem.s.enmEffAddrMode)
7160 {
7161 case IEMMODE_16BIT:
7162 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7163 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7164 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7165 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7166 iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
7167 case IEMMODE_32BIT:
7168 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7169 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7170 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7171 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7172 iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
7173 case IEMMODE_64BIT:
7174 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7175 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7176 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7177 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7178 iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
7179 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7180 }
7181 break;
7182 case IEMMODE_32BIT:
7183 switch (pVCpu->iem.s.enmEffAddrMode)
7184 {
7185 case IEMMODE_16BIT:
7186 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7187 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7188 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7189 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7190 iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
7191 case IEMMODE_32BIT:
7192 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7193 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7194 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7195 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7196 iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
7197 case IEMMODE_64BIT:
7198 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7199 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7200 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7201 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7202 iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
7203 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7204 }
7205 case IEMMODE_64BIT:
7206 switch (pVCpu->iem.s.enmEffAddrMode)
7207 {
7208 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
7209 case IEMMODE_32BIT:
7210 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7211 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7212 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7213 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7214 iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
7215 case IEMMODE_64BIT:
7216 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7217 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7218 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7219 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7220 iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
7221 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7222 }
7223 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7224 }
7225 }
7226
7227 /*
7228 * Annoying double switch here.
7229 * Using ugly macro for implementing the cases, sharing it with cmpsb.
7230 */
7231 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
7232 switch (pVCpu->iem.s.enmEffOpSize)
7233 {
7234 case IEMMODE_16BIT:
7235 switch (pVCpu->iem.s.enmEffAddrMode)
7236 {
7237 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7238 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7239 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64, IEM_MC_F_64BIT); break;
7240 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7241 }
7242 break;
7243
7244 case IEMMODE_32BIT:
7245 switch (pVCpu->iem.s.enmEffAddrMode)
7246 {
7247 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7248 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7249 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64, IEM_MC_F_64BIT); break;
7250 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7251 }
7252 break;
7253
7254 case IEMMODE_64BIT:
7255 switch (pVCpu->iem.s.enmEffAddrMode)
7256 {
7257 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7258 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32, IEM_MC_F_MIN_386); break;
7259 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64, IEM_MC_F_64BIT); break;
7260 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7261 }
7262 break;
7263 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7264 }
7265}
7266
7267#undef IEM_CMPS_CASE
7268
7269/**
7270 * @opcode 0xa8
7271 * @opflclass logical
7272 */
7273FNIEMOP_DEF(iemOp_test_AL_Ib)
7274{
7275 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
7276 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7277 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_test_u8);
7278}
7279
7280
7281/**
7282 * @opcode 0xa9
7283 * @opflclass logical
7284 */
7285FNIEMOP_DEF(iemOp_test_eAX_Iz)
7286{
7287 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
7288 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7289 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, 0);
7290}
7291
7292
7293/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
7294#define IEM_STOS_CASE(ValBits, AddrBits, a_fMcFlags) \
7295 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
7296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7297 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7298 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7299 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
7300 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7301 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
7302 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7303 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7304 } IEM_MC_ELSE() { \
7305 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7306 } IEM_MC_ENDIF(); \
7307 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7308 IEM_MC_END() \
7309
7310/**
7311 * @opcode 0xaa
7312 */
7313FNIEMOP_DEF(iemOp_stosb_Yb_AL)
7314{
7315 /*
7316 * Use the C implementation if a repeat prefix is encountered.
7317 */
7318 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7319 {
7320 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
7321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7322 switch (pVCpu->iem.s.enmEffAddrMode)
7323 {
7324 case IEMMODE_16BIT:
7325 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7326 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7327 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7328 iemCImpl_stos_al_m16);
7329 case IEMMODE_32BIT:
7330 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7331 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7332 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7333 iemCImpl_stos_al_m32);
7334 case IEMMODE_64BIT:
7335 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7336 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7337 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7338 iemCImpl_stos_al_m64);
7339 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7340 }
7341 }
7342
7343 /*
7344 * Sharing case implementation with stos[wdq] below.
7345 */
7346 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
7347 switch (pVCpu->iem.s.enmEffAddrMode)
7348 {
7349 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7350 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7351 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64, IEM_MC_F_64BIT); break;
7352 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7353 }
7354}
7355
7356
7357/**
7358 * @opcode 0xab
7359 */
7360FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
7361{
7362 /*
7363 * Use the C implementation if a repeat prefix is encountered.
7364 */
7365 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7366 {
7367 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
7368 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7369 switch (pVCpu->iem.s.enmEffOpSize)
7370 {
7371 case IEMMODE_16BIT:
7372 switch (pVCpu->iem.s.enmEffAddrMode)
7373 {
7374 case IEMMODE_16BIT:
7375 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7376 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7377 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7378 iemCImpl_stos_ax_m16);
7379 case IEMMODE_32BIT:
7380 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7381 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7382 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7383 iemCImpl_stos_ax_m32);
7384 case IEMMODE_64BIT:
7385 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7386 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7387 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7388 iemCImpl_stos_ax_m64);
7389 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7390 }
7391 break;
7392 case IEMMODE_32BIT:
7393 switch (pVCpu->iem.s.enmEffAddrMode)
7394 {
7395 case IEMMODE_16BIT:
7396 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7397 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7398 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7399 iemCImpl_stos_eax_m16);
7400 case IEMMODE_32BIT:
7401 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7402 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7403 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7404 iemCImpl_stos_eax_m32);
7405 case IEMMODE_64BIT:
7406 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7407 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7408 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7409 iemCImpl_stos_eax_m64);
7410 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7411 }
7412 case IEMMODE_64BIT:
7413 switch (pVCpu->iem.s.enmEffAddrMode)
7414 {
7415 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
7416 case IEMMODE_32BIT:
7417 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7418 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7419 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7420 iemCImpl_stos_rax_m32);
7421 case IEMMODE_64BIT:
7422 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7423 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7424 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7425 iemCImpl_stos_rax_m64);
7426 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7427 }
7428 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7429 }
7430 }
7431
7432 /*
7433 * Annoying double switch here.
7434 * Using ugly macro for implementing the cases, sharing it with stosb.
7435 */
7436 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
7437 switch (pVCpu->iem.s.enmEffOpSize)
7438 {
7439 case IEMMODE_16BIT:
7440 switch (pVCpu->iem.s.enmEffAddrMode)
7441 {
7442 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7443 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7444 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64, IEM_MC_F_64BIT); break;
7445 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7446 }
7447 break;
7448
7449 case IEMMODE_32BIT:
7450 switch (pVCpu->iem.s.enmEffAddrMode)
7451 {
7452 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7453 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7454 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64, IEM_MC_F_64BIT); break;
7455 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7456 }
7457 break;
7458
7459 case IEMMODE_64BIT:
7460 switch (pVCpu->iem.s.enmEffAddrMode)
7461 {
7462 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7463 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32, IEM_MC_F_64BIT); break;
7464 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64, IEM_MC_F_64BIT); break;
7465 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7466 }
7467 break;
7468 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7469 }
7470}
7471
7472#undef IEM_STOS_CASE
7473
7474/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
7475#define IEM_LODS_CASE(ValBits, AddrBits, a_fMcFlags) \
7476 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
7477 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7478 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7479 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7480 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
7481 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
7482 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
7483 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7484 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7485 } IEM_MC_ELSE() { \
7486 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7487 } IEM_MC_ENDIF(); \
7488 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7489 IEM_MC_END() \
7490
7491/**
7492 * @opcode 0xac
7493 * @opfltest df
7494 */
7495FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
7496{
7497 /*
7498 * Use the C implementation if a repeat prefix is encountered.
7499 */
7500 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7501 {
7502 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
7503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7504 switch (pVCpu->iem.s.enmEffAddrMode)
7505 {
7506 case IEMMODE_16BIT:
7507 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7508 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7509 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7510 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7511 iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
7512 case IEMMODE_32BIT:
7513 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7514 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7515 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7516 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7517 iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
7518 case IEMMODE_64BIT:
7519 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7520 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7521 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7522 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7523 iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
7524 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7525 }
7526 }
7527
7528 /*
7529 * Sharing case implementation with stos[wdq] below.
7530 */
7531 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
7532 switch (pVCpu->iem.s.enmEffAddrMode)
7533 {
7534 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7535 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7536 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64, IEM_MC_F_64BIT); break;
7537 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7538 }
7539}
7540
7541
7542/**
7543 * @opcode 0xad
7544 * @opfltest df
7545 */
7546FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
7547{
7548 /*
7549 * Use the C implementation if a repeat prefix is encountered.
7550 */
7551 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7552 {
7553 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
7554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7555 switch (pVCpu->iem.s.enmEffOpSize)
7556 {
7557 case IEMMODE_16BIT:
7558 switch (pVCpu->iem.s.enmEffAddrMode)
7559 {
7560 case IEMMODE_16BIT:
7561 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7562 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7563 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7564 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7565 iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
7566 case IEMMODE_32BIT:
7567 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7568 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7569 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7570 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7571 iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
7572 case IEMMODE_64BIT:
7573 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7574 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7575 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7576 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7577 iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
7578 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7579 }
7580 break;
7581 case IEMMODE_32BIT:
7582 switch (pVCpu->iem.s.enmEffAddrMode)
7583 {
7584 case IEMMODE_16BIT:
7585 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7586 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7587 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7588 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7589 iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
7590 case IEMMODE_32BIT:
7591 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7592 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7593 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7594 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7595 iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
7596 case IEMMODE_64BIT:
7597 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7598 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7599 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7600 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7601 iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
7602 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7603 }
7604 case IEMMODE_64BIT:
7605 switch (pVCpu->iem.s.enmEffAddrMode)
7606 {
7607 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
7608 case IEMMODE_32BIT:
7609 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7610 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7611 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7612 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7613 iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
7614 case IEMMODE_64BIT:
7615 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7616 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7617 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7618 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7619 iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
7620 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7621 }
7622 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7623 }
7624 }
7625
7626 /*
7627 * Annoying double switch here.
7628 * Using ugly macro for implementing the cases, sharing it with lodsb.
7629 */
7630 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
7631 switch (pVCpu->iem.s.enmEffOpSize)
7632 {
7633 case IEMMODE_16BIT:
7634 switch (pVCpu->iem.s.enmEffAddrMode)
7635 {
7636 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7637 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7638 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64, IEM_MC_F_64BIT); break;
7639 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7640 }
7641 break;
7642
7643 case IEMMODE_32BIT:
7644 switch (pVCpu->iem.s.enmEffAddrMode)
7645 {
7646 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7647 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7648 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64, IEM_MC_F_64BIT); break;
7649 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7650 }
7651 break;
7652
7653 case IEMMODE_64BIT:
7654 switch (pVCpu->iem.s.enmEffAddrMode)
7655 {
7656 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7657 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32, IEM_MC_F_64BIT); break;
7658 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64, IEM_MC_F_64BIT); break;
7659 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7660 }
7661 break;
7662 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7663 }
7664}
7665
7666#undef IEM_LODS_CASE
7667
7668/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
7669#define IEM_SCAS_CASE(ValBits, AddrBits, a_fMcFlags) \
7670 IEM_MC_BEGIN(3, 2, a_fMcFlags, 0); \
7671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7672 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
7673 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
7674 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
7675 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7676 \
7677 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7678 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
7679 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
7680 IEM_MC_REF_EFLAGS(pEFlags); \
7681 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
7682 \
7683 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7684 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7685 } IEM_MC_ELSE() { \
7686 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7687 } IEM_MC_ENDIF(); \
7688 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7689 IEM_MC_END();
7690
7691/**
7692 * @opcode 0xae
7693 * @opflclass arithmetic
7694 * @opfltest df
7695 */
7696FNIEMOP_DEF(iemOp_scasb_AL_Xb)
7697{
7698 /*
7699 * Use the C implementation if a repeat prefix is encountered.
7700 */
7701 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7702 {
7703 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
7704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7705 switch (pVCpu->iem.s.enmEffAddrMode)
7706 {
7707 case IEMMODE_16BIT:
7708 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7709 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7710 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7711 iemCImpl_repe_scas_al_m16);
7712 case IEMMODE_32BIT:
7713 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7714 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7715 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7716 iemCImpl_repe_scas_al_m32);
7717 case IEMMODE_64BIT:
7718 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7719 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7720 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7721 iemCImpl_repe_scas_al_m64);
7722 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7723 }
7724 }
7725 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7726 {
7727 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
7728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7729 switch (pVCpu->iem.s.enmEffAddrMode)
7730 {
7731 case IEMMODE_16BIT:
7732 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7733 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7734 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7735 iemCImpl_repne_scas_al_m16);
7736 case IEMMODE_32BIT:
7737 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7738 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7739 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7740 iemCImpl_repne_scas_al_m32);
7741 case IEMMODE_64BIT:
7742 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7743 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7744 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7745 iemCImpl_repne_scas_al_m64);
7746 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7747 }
7748 }
7749
7750 /*
7751 * Sharing case implementation with stos[wdq] below.
7752 */
7753 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
7754 switch (pVCpu->iem.s.enmEffAddrMode)
7755 {
7756 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7757 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7758 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64, IEM_MC_F_64BIT); break;
7759 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7760 }
7761}
7762
7763
7764/**
7765 * @opcode 0xaf
7766 * @opflclass arithmetic
7767 * @opfltest df
7768 */
7769FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
7770{
7771 /*
7772 * Use the C implementation if a repeat prefix is encountered.
7773 */
7774 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7775 {
7776 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
7777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7778 switch (pVCpu->iem.s.enmEffOpSize)
7779 {
7780 case IEMMODE_16BIT:
7781 switch (pVCpu->iem.s.enmEffAddrMode)
7782 {
7783 case IEMMODE_16BIT:
7784 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7785 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7786 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7787 iemCImpl_repe_scas_ax_m16);
7788 case IEMMODE_32BIT:
7789 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7790 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7791 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7792 iemCImpl_repe_scas_ax_m32);
7793 case IEMMODE_64BIT:
7794 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7795 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7796 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7797 iemCImpl_repe_scas_ax_m64);
7798 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7799 }
7800 break;
7801 case IEMMODE_32BIT:
7802 switch (pVCpu->iem.s.enmEffAddrMode)
7803 {
7804 case IEMMODE_16BIT:
7805 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7806 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7807 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7808 iemCImpl_repe_scas_eax_m16);
7809 case IEMMODE_32BIT:
7810 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7811 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7812 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7813 iemCImpl_repe_scas_eax_m32);
7814 case IEMMODE_64BIT:
7815 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7816 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7817 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7818 iemCImpl_repe_scas_eax_m64);
7819 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7820 }
7821 case IEMMODE_64BIT:
7822 switch (pVCpu->iem.s.enmEffAddrMode)
7823 {
7824 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
7825 case IEMMODE_32BIT:
7826 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7827 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7828 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7829 iemCImpl_repe_scas_rax_m32);
7830 case IEMMODE_64BIT:
7831 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7832 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7833 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7834 iemCImpl_repe_scas_rax_m64);
7835 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7836 }
7837 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7838 }
7839 }
7840 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7841 {
7842 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
7843 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7844 switch (pVCpu->iem.s.enmEffOpSize)
7845 {
7846 case IEMMODE_16BIT:
7847 switch (pVCpu->iem.s.enmEffAddrMode)
7848 {
7849 case IEMMODE_16BIT:
7850 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7851 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7852 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7853 iemCImpl_repne_scas_ax_m16);
7854 case IEMMODE_32BIT:
7855 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7856 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7857 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7858 iemCImpl_repne_scas_ax_m32);
7859 case IEMMODE_64BIT:
7860 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7861 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7862 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7863 iemCImpl_repne_scas_ax_m64);
7864 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7865 }
7866 break;
7867 case IEMMODE_32BIT:
7868 switch (pVCpu->iem.s.enmEffAddrMode)
7869 {
7870 case IEMMODE_16BIT:
7871 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7872 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7873 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7874 iemCImpl_repne_scas_eax_m16);
7875 case IEMMODE_32BIT:
7876 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7877 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7878 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7879 iemCImpl_repne_scas_eax_m32);
7880 case IEMMODE_64BIT:
7881 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7882 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7883 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7884 iemCImpl_repne_scas_eax_m64);
7885 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7886 }
7887 case IEMMODE_64BIT:
7888 switch (pVCpu->iem.s.enmEffAddrMode)
7889 {
7890 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
7891 case IEMMODE_32BIT:
7892 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7893 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7894 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7895 iemCImpl_repne_scas_rax_m32);
7896 case IEMMODE_64BIT:
7897 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7898 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7899 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7900 iemCImpl_repne_scas_rax_m64);
7901 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7902 }
7903 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7904 }
7905 }
7906
7907 /*
7908 * Annoying double switch here.
7909 * Using ugly macro for implementing the cases, sharing it with scasb.
7910 */
7911 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
7912 switch (pVCpu->iem.s.enmEffOpSize)
7913 {
7914 case IEMMODE_16BIT:
7915 switch (pVCpu->iem.s.enmEffAddrMode)
7916 {
7917 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7918 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7919 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64, IEM_MC_F_64BIT); break;
7920 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7921 }
7922 break;
7923
7924 case IEMMODE_32BIT:
7925 switch (pVCpu->iem.s.enmEffAddrMode)
7926 {
7927 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7928 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7929 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64, IEM_MC_F_64BIT); break;
7930 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7931 }
7932 break;
7933
7934 case IEMMODE_64BIT:
7935 switch (pVCpu->iem.s.enmEffAddrMode)
7936 {
7937 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7938 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32, IEM_MC_F_64BIT); break;
7939 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64, IEM_MC_F_64BIT); break;
7940 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7941 }
7942 break;
7943 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7944 }
7945}
7946
7947#undef IEM_SCAS_CASE
7948
7949/**
7950 * Common 'mov r8, imm8' helper.
7951 */
7952FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iFixedReg)
7953{
7954 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7955 IEM_MC_BEGIN(0, 0, 0, 0);
7956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7957 IEM_MC_STORE_GREG_U8_CONST(iFixedReg, u8Imm);
7958 IEM_MC_ADVANCE_RIP_AND_FINISH();
7959 IEM_MC_END();
7960}
7961
7962
7963/**
7964 * @opcode 0xb0
7965 */
7966FNIEMOP_DEF(iemOp_mov_AL_Ib)
7967{
7968 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
7969 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7970}
7971
7972
7973/**
7974 * @opcode 0xb1
7975 */
7976FNIEMOP_DEF(iemOp_CL_Ib)
7977{
7978 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
7979 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7980}
7981
7982
7983/**
7984 * @opcode 0xb2
7985 */
7986FNIEMOP_DEF(iemOp_DL_Ib)
7987{
7988 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
7989 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7990}
7991
7992
7993/**
7994 * @opcode 0xb3
7995 */
7996FNIEMOP_DEF(iemOp_BL_Ib)
7997{
7998 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
7999 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8000}
8001
8002
8003/**
8004 * @opcode 0xb4
8005 */
8006FNIEMOP_DEF(iemOp_mov_AH_Ib)
8007{
8008 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
8009 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8010}
8011
8012
8013/**
8014 * @opcode 0xb5
8015 */
8016FNIEMOP_DEF(iemOp_CH_Ib)
8017{
8018 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
8019 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8020}
8021
8022
8023/**
8024 * @opcode 0xb6
8025 */
8026FNIEMOP_DEF(iemOp_DH_Ib)
8027{
8028 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
8029 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8030}
8031
8032
8033/**
8034 * @opcode 0xb7
8035 */
8036FNIEMOP_DEF(iemOp_BH_Ib)
8037{
8038 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
8039 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8040}
8041
8042
8043/**
8044 * Common 'mov regX,immX' helper.
8045 */
8046FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iFixedReg)
8047{
8048 switch (pVCpu->iem.s.enmEffOpSize)
8049 {
8050 case IEMMODE_16BIT:
8051 IEM_MC_BEGIN(0, 0, 0, 0);
8052 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8054 IEM_MC_STORE_GREG_U16_CONST(iFixedReg, u16Imm);
8055 IEM_MC_ADVANCE_RIP_AND_FINISH();
8056 IEM_MC_END();
8057 break;
8058
8059 case IEMMODE_32BIT:
8060 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8061 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8063 IEM_MC_STORE_GREG_U32_CONST(iFixedReg, u32Imm);
8064 IEM_MC_ADVANCE_RIP_AND_FINISH();
8065 IEM_MC_END();
8066 break;
8067
8068 case IEMMODE_64BIT:
8069 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
8070 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
8071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8072 IEM_MC_STORE_GREG_U64_CONST(iFixedReg, u64Imm);
8073 IEM_MC_ADVANCE_RIP_AND_FINISH();
8074 IEM_MC_END();
8075 break;
8076 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8077 }
8078}
8079
8080
8081/**
8082 * @opcode 0xb8
8083 */
8084FNIEMOP_DEF(iemOp_eAX_Iv)
8085{
8086 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
8087 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8088}
8089
8090
8091/**
8092 * @opcode 0xb9
8093 */
8094FNIEMOP_DEF(iemOp_eCX_Iv)
8095{
8096 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
8097 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8098}
8099
8100
8101/**
8102 * @opcode 0xba
8103 */
8104FNIEMOP_DEF(iemOp_eDX_Iv)
8105{
8106 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
8107 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8108}
8109
8110
8111/**
8112 * @opcode 0xbb
8113 */
8114FNIEMOP_DEF(iemOp_eBX_Iv)
8115{
8116 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
8117 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8118}
8119
8120
8121/**
8122 * @opcode 0xbc
8123 */
8124FNIEMOP_DEF(iemOp_eSP_Iv)
8125{
8126 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
8127 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8128}
8129
8130
8131/**
8132 * @opcode 0xbd
8133 */
8134FNIEMOP_DEF(iemOp_eBP_Iv)
8135{
8136 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
8137 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8138}
8139
8140
8141/**
8142 * @opcode 0xbe
8143 */
8144FNIEMOP_DEF(iemOp_eSI_Iv)
8145{
8146 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
8147 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8148}
8149
8150
8151/**
8152 * @opcode 0xbf
8153 */
8154FNIEMOP_DEF(iemOp_eDI_Iv)
8155{
8156 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
8157 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8158}
8159
8160
8161/**
8162 * @opcode 0xc0
8163 */
8164FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
8165{
8166 IEMOP_HLP_MIN_186();
8167 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8168
8169 /* Need to use a body macro here since the EFLAGS behaviour differs between
8170 the shifts, rotates and rotate w/ carry. Sigh. */
8171#define GRP2_BODY_Eb_Ib(a_pImplExpr) \
8172 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
8173 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8174 { \
8175 /* register */ \
8176 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8177 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0); \
8178 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8179 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
8180 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8181 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8182 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8183 IEM_MC_REF_EFLAGS(pEFlags); \
8184 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
8185 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8186 IEM_MC_END(); \
8187 } \
8188 else \
8189 { \
8190 /* memory */ \
8191 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_186, 0); \
8192 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8194 \
8195 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8197 \
8198 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8199 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
8200 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8201 \
8202 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8203 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8204 IEM_MC_FETCH_EFLAGS(EFlags); \
8205 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
8206 \
8207 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8208 IEM_MC_COMMIT_EFLAGS(EFlags); \
8209 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8210 IEM_MC_END(); \
8211 } (void)0
8212
8213 switch (IEM_GET_MODRM_REG_8(bRm))
8214 {
8215 /**
8216 * @opdone
8217 * @opmaps grp2_c0
8218 * @opcode /0
8219 * @opflclass rotate_count
8220 */
8221 case 0:
8222 {
8223 IEMOP_MNEMONIC2(MI, ROL, rol, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8224 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8225 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
8226 break;
8227 }
8228 /**
8229 * @opdone
8230 * @opmaps grp2_c0
8231 * @opcode /1
8232 * @opflclass rotate_count
8233 */
8234 case 1:
8235 {
8236 IEMOP_MNEMONIC2(MI, ROR, ror, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8237 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8238 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
8239 break;
8240 }
8241 /**
8242 * @opdone
8243 * @opmaps grp2_c0
8244 * @opcode /2
8245 * @opflclass rotate_carry_count
8246 */
8247 case 2:
8248 {
8249 IEMOP_MNEMONIC2(MI, RCL, rcl, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8250 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8251 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
8252 break;
8253 }
8254 /**
8255 * @opdone
8256 * @opmaps grp2_c0
8257 * @opcode /3
8258 * @opflclass rotate_carry_count
8259 */
8260 case 3:
8261 {
8262 IEMOP_MNEMONIC2(MI, RCR, rcr, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8263 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
8264 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
8265 break;
8266 }
8267 /**
8268 * @opdone
8269 * @opmaps grp2_c0
8270 * @opcode /4
8271 * @opflclass shift_count
8272 */
8273 case 4:
8274 {
8275 IEMOP_MNEMONIC2(MI, SHL, shl, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8276 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8277 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
8278 break;
8279 }
8280 /**
8281 * @opdone
8282 * @opmaps grp2_c0
8283 * @opcode /5
8284 * @opflclass shift_count
8285 */
8286 case 5:
8287 {
8288 IEMOP_MNEMONIC2(MI, SHR, shr, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8289 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8290 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
8291 break;
8292 }
8293 /**
8294 * @opdone
8295 * @opmaps grp2_c0
8296 * @opcode /7
8297 * @opflclass shift_count
8298 */
8299 case 7:
8300 {
8301 IEMOP_MNEMONIC2(MI, SAR, sar, Eb, Ib, DISOPTYPE_HARMLESS, 0);
8302 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8303 GRP2_BODY_Eb_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
8304 break;
8305 }
8306
8307 /** @opdone */
8308 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8309 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8310 }
8311#undef GRP2_BODY_Eb_Ib
8312}
8313
8314
8315/* Need to use a body macro here since the EFLAGS behaviour differs between
8316 the shifts, rotates and rotate w/ carry. Sigh. */
8317#define GRP2_BODY_Ev_Ib(a_pImplExpr) \
8318 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
8319 if (IEM_IS_MODRM_REG_MODE(bRm)) \
8320 { \
8321 /* register */ \
8322 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8323 switch (pVCpu->iem.s.enmEffOpSize) \
8324 { \
8325 case IEMMODE_16BIT: \
8326 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0); \
8327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8328 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
8329 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8330 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8331 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8332 IEM_MC_REF_EFLAGS(pEFlags); \
8333 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
8334 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8335 IEM_MC_END(); \
8336 break; \
8337 \
8338 case IEMMODE_32BIT: \
8339 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
8340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8341 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
8342 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8343 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8344 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8345 IEM_MC_REF_EFLAGS(pEFlags); \
8346 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
8347 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
8348 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8349 IEM_MC_END(); \
8350 break; \
8351 \
8352 case IEMMODE_64BIT: \
8353 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
8354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8355 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
8356 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8357 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
8358 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
8359 IEM_MC_REF_EFLAGS(pEFlags); \
8360 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
8361 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8362 IEM_MC_END(); \
8363 break; \
8364 \
8365 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8366 } \
8367 } \
8368 else \
8369 { \
8370 /* memory */ \
8371 switch (pVCpu->iem.s.enmEffOpSize) \
8372 { \
8373 case IEMMODE_16BIT: \
8374 IEM_MC_BEGIN(3, 3, 0, 0); \
8375 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8376 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8377 \
8378 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8380 \
8381 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8382 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
8383 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8384 \
8385 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8386 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8387 IEM_MC_FETCH_EFLAGS(EFlags); \
8388 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
8389 \
8390 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8391 IEM_MC_COMMIT_EFLAGS(EFlags); \
8392 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8393 IEM_MC_END(); \
8394 break; \
8395 \
8396 case IEMMODE_32BIT: \
8397 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
8398 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8400 \
8401 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8403 \
8404 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8405 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
8406 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8407 \
8408 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8409 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8410 IEM_MC_FETCH_EFLAGS(EFlags); \
8411 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
8412 \
8413 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8414 IEM_MC_COMMIT_EFLAGS(EFlags); \
8415 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8416 IEM_MC_END(); \
8417 break; \
8418 \
8419 case IEMMODE_64BIT: \
8420 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
8421 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
8422 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
8423 \
8424 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); \
8425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
8426 \
8427 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
8428 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
8429 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
8430 \
8431 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1); \
8432 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
8433 IEM_MC_FETCH_EFLAGS(EFlags); \
8434 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
8435 \
8436 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
8437 IEM_MC_COMMIT_EFLAGS(EFlags); \
8438 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
8439 IEM_MC_END(); \
8440 break; \
8441 \
8442 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
8443 } \
8444 } (void)0
8445
8446/**
8447 * @opmaps grp2_c1
8448 * @opcode /0
8449 * @opflclass rotate_count
8450 */
8451FNIEMOP_DEF_1(iemOp_grp2_rol_Ev_Ib, uint8_t, bRm)
8452{
8453 IEMOP_MNEMONIC2(MI, ROL, rol, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8454 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
8455}
8456
8457
8458/**
8459 * @opmaps grp2_c1
8460 * @opcode /1
8461 * @opflclass rotate_count
8462 */
8463FNIEMOP_DEF_1(iemOp_grp2_ror_Ev_Ib, uint8_t, bRm)
8464{
8465 IEMOP_MNEMONIC2(MI, ROR, ror, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8466 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
8467}
8468
8469
8470/**
8471 * @opmaps grp2_c1
8472 * @opcode /2
8473 * @opflclass rotate_carry_count
8474 */
8475FNIEMOP_DEF_1(iemOp_grp2_rcl_Ev_Ib, uint8_t, bRm)
8476{
8477 IEMOP_MNEMONIC2(MI, RCL, rcl, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8478 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
8479}
8480
8481
8482/**
8483 * @opmaps grp2_c1
8484 * @opcode /3
8485 * @opflclass rotate_carry_count
8486 */
8487FNIEMOP_DEF_1(iemOp_grp2_rcr_Ev_Ib, uint8_t, bRm)
8488{
8489 IEMOP_MNEMONIC2(MI, RCR, rcr, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8490 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
8491}
8492
8493
8494/**
8495 * @opmaps grp2_c1
8496 * @opcode /4
8497 * @opflclass shift_count
8498 */
8499FNIEMOP_DEF_1(iemOp_grp2_shl_Ev_Ib, uint8_t, bRm)
8500{
8501 IEMOP_MNEMONIC2(MI, SHL, shl, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8502 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
8503}
8504
8505
8506/**
8507 * @opmaps grp2_c1
8508 * @opcode /5
8509 * @opflclass shift_count
8510 */
8511FNIEMOP_DEF_1(iemOp_grp2_shr_Ev_Ib, uint8_t, bRm)
8512{
8513 IEMOP_MNEMONIC2(MI, SHR, shr, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8514 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
8515}
8516
8517
8518/**
8519 * @opmaps grp2_c1
8520 * @opcode /7
8521 * @opflclass shift_count
8522 */
8523FNIEMOP_DEF_1(iemOp_grp2_sar_Ev_Ib, uint8_t, bRm)
8524{
8525 IEMOP_MNEMONIC2(MI, SAR, sar, Ev, Ib, DISOPTYPE_HARMLESS, 0);
8526 GRP2_BODY_Ev_Ib(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
8527}
8528
8529#undef GRP2_BODY_Ev_Ib
8530
8531/**
8532 * @opcode 0xc1
8533 */
8534FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
8535{
8536 IEMOP_HLP_MIN_186();
8537 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8538
8539 switch (IEM_GET_MODRM_REG_8(bRm))
8540 {
8541 case 0: return FNIEMOP_CALL_1(iemOp_grp2_rol_Ev_Ib, bRm);
8542 case 1: return FNIEMOP_CALL_1(iemOp_grp2_ror_Ev_Ib, bRm);
8543 case 2: return FNIEMOP_CALL_1(iemOp_grp2_rcl_Ev_Ib, bRm);
8544 case 3: return FNIEMOP_CALL_1(iemOp_grp2_rcr_Ev_Ib, bRm);
8545 case 4: return FNIEMOP_CALL_1(iemOp_grp2_shl_Ev_Ib, bRm);
8546 case 5: return FNIEMOP_CALL_1(iemOp_grp2_shr_Ev_Ib, bRm);
8547 case 7: return FNIEMOP_CALL_1(iemOp_grp2_sar_Ev_Ib, bRm);
8548 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8549 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8550 }
8551}
8552
8553
8554/**
8555 * @opcode 0xc2
8556 */
8557FNIEMOP_DEF(iemOp_retn_Iw)
8558{
8559 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
8560 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8561 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8563 switch (pVCpu->iem.s.enmEffOpSize)
8564 {
8565 case IEMMODE_16BIT:
8566 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8567 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_iw_16, u16Imm);
8568 case IEMMODE_32BIT:
8569 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8570 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_iw_32, u16Imm);
8571 case IEMMODE_64BIT:
8572 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8573 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_iw_64, u16Imm);
8574 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8575 }
8576}
8577
8578
8579/**
8580 * @opcode 0xc3
8581 */
8582FNIEMOP_DEF(iemOp_retn)
8583{
8584 IEMOP_MNEMONIC(retn, "retn");
8585 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8587 switch (pVCpu->iem.s.enmEffOpSize)
8588 {
8589 case IEMMODE_16BIT:
8590 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8591 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_16);
8592 case IEMMODE_32BIT:
8593 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8594 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_32);
8595 case IEMMODE_64BIT:
8596 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK,
8597 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_retn_64);
8598 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8599 }
8600}
8601
8602
8603/**
8604 * @opcode 0xc4
8605 */
8606FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
8607{
8608 /* The LDS instruction is invalid 64-bit mode. In legacy and
8609 compatability mode it is invalid with MOD=3.
8610 The use as a VEX prefix is made possible by assigning the inverted
8611 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
8612 outside of 64-bit mode. VEX is not available in real or v86 mode. */
8613 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8614 if ( IEM_IS_64BIT_CODE(pVCpu)
8615 || IEM_IS_MODRM_REG_MODE(bRm) )
8616 {
8617 IEMOP_MNEMONIC(vex3_prefix, "vex3");
8618 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
8619 {
8620 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
8621 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
8622 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
8623 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
8624 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
8625 if (IEM_IS_64BIT_CODE(pVCpu))
8626 {
8627#if 1
8628 AssertCompile(IEM_OP_PRF_SIZE_REX_W == RT_BIT_32(9));
8629 pVCpu->iem.s.fPrefixes |= (uint32_t)(bVex2 & 0x80) << (9 - 7);
8630 AssertCompile(IEM_OP_PRF_REX_B == RT_BIT_32(25) && IEM_OP_PRF_REX_X == RT_BIT_32(26) && IEM_OP_PRF_REX_R == RT_BIT_32(27));
8631 pVCpu->iem.s.fPrefixes |= (uint32_t)(~bRm & 0xe0) << (25 - 5);
8632#else
8633 if (bVex2 & 0x80 /* VEX.W */)
8634 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
8635 if (~bRm & 0x20 /* VEX.~B */)
8636 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_B;
8637 if (~bRm & 0x40 /* VEX.~X */)
8638 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_X;
8639 if (~bRm & 0x80 /* VEX.~R */)
8640 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_R;
8641#endif
8642 }
8643 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
8644 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
8645 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
8646 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
8647 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
8648 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
8649
8650 switch (bRm & 0x1f)
8651 {
8652 case 1: /* 0x0f lead opcode byte. */
8653#ifdef IEM_WITH_VEX
8654 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8655#else
8656 IEMOP_BITCH_ABOUT_STUB();
8657 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8658#endif
8659
8660 case 2: /* 0x0f 0x38 lead opcode bytes. */
8661#ifdef IEM_WITH_VEX
8662 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8663#else
8664 IEMOP_BITCH_ABOUT_STUB();
8665 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8666#endif
8667
8668 case 3: /* 0x0f 0x3a lead opcode bytes. */
8669#ifdef IEM_WITH_VEX
8670 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8671#else
8672 IEMOP_BITCH_ABOUT_STUB();
8673 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8674#endif
8675
8676 default:
8677 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
8678 IEMOP_RAISE_INVALID_OPCODE_RET();
8679 }
8680 }
8681 Log(("VEX3: VEX support disabled!\n"));
8682 IEMOP_RAISE_INVALID_OPCODE_RET();
8683 }
8684
8685 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
8686 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
8687}
8688
8689
8690/**
8691 * @opcode 0xc5
8692 */
8693FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
8694{
8695 /* The LES instruction is invalid 64-bit mode. In legacy and
8696 compatability mode it is invalid with MOD=3.
8697 The use as a VEX prefix is made possible by assigning the inverted
8698 REX.R to the top MOD bit, and the top bit in the inverted register
8699 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
8700 to accessing registers 0..7 in this VEX form. */
8701 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8702 if ( IEM_IS_64BIT_CODE(pVCpu)
8703 || IEM_IS_MODRM_REG_MODE(bRm))
8704 {
8705 IEMOP_MNEMONIC(vex2_prefix, "vex2");
8706 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
8707 {
8708 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
8709 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
8710 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
8711 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
8712 AssertCompile(IEM_OP_PRF_REX_R == RT_BIT_32(27));
8713 pVCpu->iem.s.fPrefixes |= (uint32_t)(~bRm & 0x80) << (27 - 7);
8714 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
8715 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
8716 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
8717 pVCpu->iem.s.idxPrefix = bRm & 0x3;
8718
8719#ifdef IEM_WITH_VEX
8720 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8721#else
8722 IEMOP_BITCH_ABOUT_STUB();
8723 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8724#endif
8725 }
8726
8727 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
8728 Log(("VEX2: VEX support disabled!\n"));
8729 IEMOP_RAISE_INVALID_OPCODE_RET();
8730 }
8731
8732 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
8733 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
8734}
8735
8736
8737/**
8738 * @opcode 0xc6
8739 */
8740FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
8741{
8742 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8743 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
8744 IEMOP_RAISE_INVALID_OPCODE_RET();
8745 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
8746
8747 if (IEM_IS_MODRM_REG_MODE(bRm))
8748 {
8749 /* register access */
8750 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8751 IEM_MC_BEGIN(0, 0, 0, 0);
8752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8753 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u8Imm);
8754 IEM_MC_ADVANCE_RIP_AND_FINISH();
8755 IEM_MC_END();
8756 }
8757 else
8758 {
8759 /* memory access. */
8760 IEM_MC_BEGIN(0, 1, 0, 0);
8761 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8762 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8763 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8765 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
8766 IEM_MC_ADVANCE_RIP_AND_FINISH();
8767 IEM_MC_END();
8768 }
8769}
8770
8771
8772/**
8773 * @opcode 0xc7
8774 */
8775FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
8776{
8777 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8778 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Iz in this group. */
8779 IEMOP_RAISE_INVALID_OPCODE_RET();
8780 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
8781
8782 if (IEM_IS_MODRM_REG_MODE(bRm))
8783 {
8784 /* register access */
8785 switch (pVCpu->iem.s.enmEffOpSize)
8786 {
8787 case IEMMODE_16BIT:
8788 IEM_MC_BEGIN(0, 0, 0, 0);
8789 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8791 IEM_MC_STORE_GREG_U16_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u16Imm);
8792 IEM_MC_ADVANCE_RIP_AND_FINISH();
8793 IEM_MC_END();
8794 break;
8795
8796 case IEMMODE_32BIT:
8797 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8798 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8799 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8800 IEM_MC_STORE_GREG_U32_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u32Imm);
8801 IEM_MC_ADVANCE_RIP_AND_FINISH();
8802 IEM_MC_END();
8803 break;
8804
8805 case IEMMODE_64BIT:
8806 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
8807 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8809 IEM_MC_STORE_GREG_U64_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u64Imm);
8810 IEM_MC_ADVANCE_RIP_AND_FINISH();
8811 IEM_MC_END();
8812 break;
8813
8814 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8815 }
8816 }
8817 else
8818 {
8819 /* memory access. */
8820 switch (pVCpu->iem.s.enmEffOpSize)
8821 {
8822 case IEMMODE_16BIT:
8823 IEM_MC_BEGIN(0, 1, 0, 0);
8824 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8825 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
8826 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8828 IEM_MC_STORE_MEM_U16_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
8829 IEM_MC_ADVANCE_RIP_AND_FINISH();
8830 IEM_MC_END();
8831 break;
8832
8833 case IEMMODE_32BIT:
8834 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8835 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8836 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8837 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8839 IEM_MC_STORE_MEM_U32_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
8840 IEM_MC_ADVANCE_RIP_AND_FINISH();
8841 IEM_MC_END();
8842 break;
8843
8844 case IEMMODE_64BIT:
8845 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
8846 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8847 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8848 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8850 IEM_MC_STORE_MEM_U64_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
8851 IEM_MC_ADVANCE_RIP_AND_FINISH();
8852 IEM_MC_END();
8853 break;
8854
8855 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8856 }
8857 }
8858}
8859
8860
8861
8862
8863/**
8864 * @opcode 0xc8
8865 */
8866FNIEMOP_DEF(iemOp_enter_Iw_Ib)
8867{
8868 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
8869 IEMOP_HLP_MIN_186();
8870 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8871 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
8872 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
8873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8874 IEM_MC_DEFER_TO_CIMPL_3_RET(0,
8875 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8876 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP),
8877 iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
8878}
8879
8880
8881/**
8882 * @opcode 0xc9
8883 */
8884FNIEMOP_DEF(iemOp_leave)
8885{
8886 IEMOP_MNEMONIC(leave, "leave");
8887 IEMOP_HLP_MIN_186();
8888 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8890 IEM_MC_DEFER_TO_CIMPL_1_RET(0,
8891 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8892 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP),
8893 iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
8894}
8895
8896
8897/**
8898 * @opcode 0xca
8899 */
8900FNIEMOP_DEF(iemOp_retf_Iw)
8901{
8902 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
8903 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8905 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
8906 | IEM_CIMPL_F_MODE,
8907 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8908 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
8909 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
8910 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8911 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
8912 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
8913 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
8914 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8915 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
8916 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
8917 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
8918 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8919 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
8920 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
8921 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
8922 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
8923 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
8924 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
8925}
8926
8927
8928/**
8929 * @opcode 0xcb
8930 */
8931FNIEMOP_DEF(iemOp_retf)
8932{
8933 IEMOP_MNEMONIC(retf, "retf");
8934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8935 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
8936 | IEM_CIMPL_F_MODE,
8937 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8938 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
8939 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
8940 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8941 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
8942 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
8943 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
8944 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8945 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
8946 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
8947 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
8948 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8949 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
8950 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
8951 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
8952 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
8953 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
8954 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
8955}
8956
8957
8958/**
8959 * @opcode 0xcc
8960 */
8961FNIEMOP_DEF(iemOp_int3)
8962{
8963 IEMOP_MNEMONIC(int3, "int3");
8964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8965 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8966 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
8967 iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
8968}
8969
8970
8971/**
8972 * @opcode 0xcd
8973 */
8974FNIEMOP_DEF(iemOp_int_Ib)
8975{
8976 IEMOP_MNEMONIC(int_Ib, "int Ib");
8977 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
8978 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8979 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8980 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS, UINT64_MAX,
8981 iemCImpl_int, u8Int, IEMINT_INTN);
8982 /** @todo make task-switches, ring-switches, ++ return non-zero status */
8983}
8984
8985
8986/**
8987 * @opcode 0xce
8988 */
8989FNIEMOP_DEF(iemOp_into)
8990{
8991 IEMOP_MNEMONIC(into, "into");
8992 IEMOP_HLP_NO_64BIT();
8993 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8994 | IEM_CIMPL_F_BRANCH_CONDITIONAL | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
8995 UINT64_MAX,
8996 iemCImpl_int, X86_XCPT_OF, IEMINT_INTO);
8997 /** @todo make task-switches, ring-switches, ++ return non-zero status */
8998}
8999
9000
9001/**
9002 * @opcode 0xcf
9003 */
9004FNIEMOP_DEF(iemOp_iret)
9005{
9006 IEMOP_MNEMONIC(iret, "iret");
9007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9008 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
9009 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE | IEM_CIMPL_F_VMEXIT,
9010 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
9011 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
9012 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
9013 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
9014 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_DS)
9015 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
9016 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
9017 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
9018 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_ES)
9019 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
9020 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
9021 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
9022 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_FS)
9023 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
9024 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
9025 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS)
9026 | RT_BIT_64(kIemNativeGstReg_SegAttribFirst + X86_SREG_GS),
9027 iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
9028 /* Segment registers are sanitized when returning to an outer ring, or fully
9029 reloaded when returning to v86 mode. Thus the large flush list above. */
9030}
9031
9032
9033/**
9034 * @opcode 0xd0
9035 */
9036FNIEMOP_DEF(iemOp_Grp2_Eb_1)
9037{
9038 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9039
9040 /* Need to use a body macro here since the EFLAGS behaviour differs between
9041 the shifts, rotates and rotate w/ carry. Sigh. */
9042#define GRP2_BODY_Eb_1(a_pImplExpr) \
9043 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
9044 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9045 { \
9046 /* register */ \
9047 IEM_MC_BEGIN(3, 0, 0, 0); \
9048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9049 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
9050 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1); \
9051 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9052 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9053 IEM_MC_REF_EFLAGS(pEFlags); \
9054 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
9055 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9056 IEM_MC_END(); \
9057 } \
9058 else \
9059 { \
9060 /* memory */ \
9061 IEM_MC_BEGIN(3, 3, 0, 0); \
9062 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
9063 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1); \
9064 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9065 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9066 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9067 \
9068 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9069 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9070 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9071 IEM_MC_FETCH_EFLAGS(EFlags); \
9072 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
9073 \
9074 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9075 IEM_MC_COMMIT_EFLAGS(EFlags); \
9076 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9077 IEM_MC_END(); \
9078 } (void)0
9079
9080 switch (IEM_GET_MODRM_REG_8(bRm))
9081 {
9082 /**
9083 * @opdone
9084 * @opmaps grp2_d0
9085 * @opcode /0
9086 * @opflclass rotate_1
9087 */
9088 case 0:
9089 {
9090 IEMOP_MNEMONIC2(M1, ROL, rol, Eb, 1, DISOPTYPE_HARMLESS, 0);
9091 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9092 break;
9093 }
9094 /**
9095 * @opdone
9096 * @opmaps grp2_d0
9097 * @opcode /1
9098 * @opflclass rotate_1
9099 */
9100 case 1:
9101 {
9102 IEMOP_MNEMONIC2(M1, ROR, ror, Eb, 1, DISOPTYPE_HARMLESS, 0);
9103 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9104 break;
9105 }
9106 /**
9107 * @opdone
9108 * @opmaps grp2_d0
9109 * @opcode /2
9110 * @opflclass rotate_carry_1
9111 */
9112 case 2:
9113 {
9114 IEMOP_MNEMONIC2(M1, RCL, rcl, Eb, 1, DISOPTYPE_HARMLESS, 0);
9115 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9116 break;
9117 }
9118 /**
9119 * @opdone
9120 * @opmaps grp2_d0
9121 * @opcode /3
9122 * @opflclass rotate_carry_1
9123 */
9124 case 3:
9125 {
9126 IEMOP_MNEMONIC2(M1, RCR, rcr, Eb, 1, DISOPTYPE_HARMLESS, 0);
9127 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9128 break;
9129 }
9130 /**
9131 * @opdone
9132 * @opmaps grp2_d0
9133 * @opcode /4
9134 * @opflclass shift_1
9135 */
9136 case 4:
9137 {
9138 IEMOP_MNEMONIC2(M1, SHL, shl, Eb, 1, DISOPTYPE_HARMLESS, 0);
9139 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9140 break;
9141 }
9142 /**
9143 * @opdone
9144 * @opmaps grp2_d0
9145 * @opcode /5
9146 * @opflclass shift_1
9147 */
9148 case 5:
9149 {
9150 IEMOP_MNEMONIC2(M1, SHR, shr, Eb, 1, DISOPTYPE_HARMLESS, 0);
9151 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9152 break;
9153 }
9154 /**
9155 * @opdone
9156 * @opmaps grp2_d0
9157 * @opcode /7
9158 * @opflclass shift_1
9159 */
9160 case 7:
9161 {
9162 IEMOP_MNEMONIC2(M1, SAR, sar, Eb, 1, DISOPTYPE_HARMLESS, 0);
9163 GRP2_BODY_Eb_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9164 break;
9165 }
9166 /** @opdone */
9167 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9168 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
9169 }
9170#undef GRP2_BODY_Eb_1
9171}
9172
9173
9174/* Need to use a body macro here since the EFLAGS behaviour differs between
9175 the shifts, rotates and rotate w/ carry. Sigh. */
9176#define GRP2_BODY_Ev_1(a_pImplExpr) \
9177 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
9178 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9179 { \
9180 /* register */ \
9181 switch (pVCpu->iem.s.enmEffOpSize) \
9182 { \
9183 case IEMMODE_16BIT: \
9184 IEM_MC_BEGIN(3, 0, 0, 0); \
9185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9186 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9187 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9188 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9189 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9190 IEM_MC_REF_EFLAGS(pEFlags); \
9191 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
9192 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9193 IEM_MC_END(); \
9194 break; \
9195 \
9196 case IEMMODE_32BIT: \
9197 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
9198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9199 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9200 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9201 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9202 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9203 IEM_MC_REF_EFLAGS(pEFlags); \
9204 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
9205 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9206 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9207 IEM_MC_END(); \
9208 break; \
9209 \
9210 case IEMMODE_64BIT: \
9211 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
9212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9213 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9214 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9215 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9216 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9217 IEM_MC_REF_EFLAGS(pEFlags); \
9218 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
9219 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9220 IEM_MC_END(); \
9221 break; \
9222 \
9223 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9224 } \
9225 } \
9226 else \
9227 { \
9228 /* memory */ \
9229 switch (pVCpu->iem.s.enmEffOpSize) \
9230 { \
9231 case IEMMODE_16BIT: \
9232 IEM_MC_BEGIN(3, 3, 0, 0); \
9233 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9234 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9235 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9236 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9237 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9238 \
9239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9241 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9242 IEM_MC_FETCH_EFLAGS(EFlags); \
9243 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
9244 \
9245 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9246 IEM_MC_COMMIT_EFLAGS(EFlags); \
9247 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9248 IEM_MC_END(); \
9249 break; \
9250 \
9251 case IEMMODE_32BIT: \
9252 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
9253 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9254 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9255 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9256 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9257 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9258 \
9259 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9261 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9262 IEM_MC_FETCH_EFLAGS(EFlags); \
9263 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
9264 \
9265 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9266 IEM_MC_COMMIT_EFLAGS(EFlags); \
9267 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9268 IEM_MC_END(); \
9269 break; \
9270 \
9271 case IEMMODE_64BIT: \
9272 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
9273 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9274 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1); \
9275 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9276 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9277 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9278 \
9279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9281 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9282 IEM_MC_FETCH_EFLAGS(EFlags); \
9283 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
9284 \
9285 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9286 IEM_MC_COMMIT_EFLAGS(EFlags); \
9287 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9288 IEM_MC_END(); \
9289 break; \
9290 \
9291 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9292 } \
9293 } (void)0
9294
9295/**
9296 * @opmaps grp2_d1
9297 * @opcode /0
9298 * @opflclass rotate_1
9299 */
9300FNIEMOP_DEF_1(iemOp_grp2_rol_Ev_1, uint8_t, bRm)
9301{
9302 IEMOP_MNEMONIC2(M1, ROL, rol, Ev, 1, DISOPTYPE_HARMLESS, 0);
9303 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9304}
9305
9306
9307/**
9308 * @opmaps grp2_d1
9309 * @opcode /1
9310 * @opflclass rotate_1
9311 */
9312FNIEMOP_DEF_1(iemOp_grp2_ror_Ev_1, uint8_t, bRm)
9313{
9314 IEMOP_MNEMONIC2(M1, ROR, ror, Ev, 1, DISOPTYPE_HARMLESS, 0);
9315 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9316}
9317
9318
9319/**
9320 * @opmaps grp2_d1
9321 * @opcode /2
9322 * @opflclass rotate_carry_1
9323 */
9324FNIEMOP_DEF_1(iemOp_grp2_rcl_Ev_1, uint8_t, bRm)
9325{
9326 IEMOP_MNEMONIC2(M1, RCL, rcl, Ev, 1, DISOPTYPE_HARMLESS, 0);
9327 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9328}
9329
9330
9331/**
9332 * @opmaps grp2_d1
9333 * @opcode /3
9334 * @opflclass rotate_carry_1
9335 */
9336FNIEMOP_DEF_1(iemOp_grp2_rcr_Ev_1, uint8_t, bRm)
9337{
9338 IEMOP_MNEMONIC2(M1, RCR, rcr, Ev, 1, DISOPTYPE_HARMLESS, 0);
9339 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9340}
9341
9342
9343/**
9344 * @opmaps grp2_d1
9345 * @opcode /4
9346 * @opflclass shift_1
9347 */
9348FNIEMOP_DEF_1(iemOp_grp2_shl_Ev_1, uint8_t, bRm)
9349{
9350 IEMOP_MNEMONIC2(M1, SHL, shl, Ev, 1, DISOPTYPE_HARMLESS, 0);
9351 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9352}
9353
9354
9355/**
9356 * @opmaps grp2_d1
9357 * @opcode /5
9358 * @opflclass shift_1
9359 */
9360FNIEMOP_DEF_1(iemOp_grp2_shr_Ev_1, uint8_t, bRm)
9361{
9362 IEMOP_MNEMONIC2(M1, SHR, shr, Ev, 1, DISOPTYPE_HARMLESS, 0);
9363 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9364}
9365
9366
9367/**
9368 * @opmaps grp2_d1
9369 * @opcode /7
9370 * @opflclass shift_1
9371 */
9372FNIEMOP_DEF_1(iemOp_grp2_sar_Ev_1, uint8_t, bRm)
9373{
9374 IEMOP_MNEMONIC2(M1, SAR, sar, Ev, 1, DISOPTYPE_HARMLESS, 0);
9375 GRP2_BODY_Ev_1(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9376}
9377
9378#undef GRP2_BODY_Ev_1
9379
9380/**
9381 * @opcode 0xd1
9382 */
9383FNIEMOP_DEF(iemOp_Grp2_Ev_1)
9384{
9385 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9386 switch (IEM_GET_MODRM_REG_8(bRm))
9387 {
9388 case 0: return FNIEMOP_CALL_1(iemOp_grp2_rol_Ev_1, bRm);
9389 case 1: return FNIEMOP_CALL_1(iemOp_grp2_ror_Ev_1, bRm);
9390 case 2: return FNIEMOP_CALL_1(iemOp_grp2_rcl_Ev_1, bRm);
9391 case 3: return FNIEMOP_CALL_1(iemOp_grp2_rcr_Ev_1, bRm);
9392 case 4: return FNIEMOP_CALL_1(iemOp_grp2_shl_Ev_1, bRm);
9393 case 5: return FNIEMOP_CALL_1(iemOp_grp2_shr_Ev_1, bRm);
9394 case 7: return FNIEMOP_CALL_1(iemOp_grp2_sar_Ev_1, bRm);
9395 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9396 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
9397 }
9398}
9399
9400
9401/**
9402 * @opcode 0xd2
9403 */
9404FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
9405{
9406 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9407
9408 /* Need to use a body macro here since the EFLAGS behaviour differs between
9409 the shifts, rotates and rotate w/ carry. Sigh. */
9410#define GRP2_BODY_Eb_CL(a_pImplExpr) \
9411 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
9412 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9413 { \
9414 /* register */ \
9415 IEM_MC_BEGIN(3, 0, 0, 0); \
9416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9417 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
9418 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9419 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9420 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9421 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9422 IEM_MC_REF_EFLAGS(pEFlags); \
9423 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
9424 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9425 IEM_MC_END(); \
9426 } \
9427 else \
9428 { \
9429 /* memory */ \
9430 IEM_MC_BEGIN(3, 3, 0, 0); \
9431 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
9432 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9433 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9434 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9435 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9436 \
9437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9439 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9440 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9441 IEM_MC_FETCH_EFLAGS(EFlags); \
9442 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags); \
9443 \
9444 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9445 IEM_MC_COMMIT_EFLAGS(EFlags); \
9446 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9447 IEM_MC_END(); \
9448 } (void)0
9449
9450 switch (IEM_GET_MODRM_REG_8(bRm))
9451 {
9452 /**
9453 * @opdone
9454 * @opmaps grp2_d0
9455 * @opcode /0
9456 * @opflclass rotate_count
9457 */
9458 case 0:
9459 {
9460 IEMOP_MNEMONIC2EX(rol_Eb_CL, "rol Eb,CL", M_CL, ROL, rol, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9461 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9462 break;
9463 }
9464 /**
9465 * @opdone
9466 * @opmaps grp2_d0
9467 * @opcode /1
9468 * @opflclass rotate_count
9469 */
9470 case 1:
9471 {
9472 IEMOP_MNEMONIC2EX(ror_Eb_CL, "ror Eb,CL", M_CL, ROR, ror, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9473 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9474 break;
9475 }
9476 /**
9477 * @opdone
9478 * @opmaps grp2_d0
9479 * @opcode /2
9480 * @opflclass rotate_carry_count
9481 */
9482 case 2:
9483 {
9484 IEMOP_MNEMONIC2EX(rcl_Eb_CL, "rcl Eb,CL", M_CL, RCL, rcl, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9485 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9486 break;
9487 }
9488 /**
9489 * @opdone
9490 * @opmaps grp2_d0
9491 * @opcode /3
9492 * @opflclass rotate_carry_count
9493 */
9494 case 3:
9495 {
9496 IEMOP_MNEMONIC2EX(rcr_Eb_CL, "rcr Eb,CL", M_CL, RCR, rcr, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9497 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9498 break;
9499 }
9500 /**
9501 * @opdone
9502 * @opmaps grp2_d0
9503 * @opcode /4
9504 * @opflclass shift_count
9505 */
9506 case 4:
9507 {
9508 IEMOP_MNEMONIC2EX(shl_Eb_CL, "shl Eb,CL", M_CL, SHL, shl, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9509 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9510 break;
9511 }
9512 /**
9513 * @opdone
9514 * @opmaps grp2_d0
9515 * @opcode /5
9516 * @opflclass shift_count
9517 */
9518 case 5:
9519 {
9520 IEMOP_MNEMONIC2EX(shr_Eb_CL, "shr Eb,CL", M_CL, SHR, shr, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9521 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9522 break;
9523 }
9524 /**
9525 * @opdone
9526 * @opmaps grp2_d0
9527 * @opcode /7
9528 * @opflclass shift_count
9529 */
9530 case 7:
9531 {
9532 IEMOP_MNEMONIC2EX(sar_Eb_CL, "sar Eb,CL", M_CL, SAR, sar, Eb, REG_CL, DISOPTYPE_HARMLESS, 0);
9533 GRP2_BODY_Eb_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9534 break;
9535 }
9536 /** @opdone */
9537 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9538 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
9539 }
9540#undef GRP2_BODY_Eb_CL
9541}
9542
9543
9544/* Need to use a body macro here since the EFLAGS behaviour differs between
9545 the shifts, rotates and rotate w/ carry. Sigh. */
9546#define GRP2_BODY_Ev_CL(a_pImplExpr) \
9547 PCIEMOPSHIFTSIZES const pImpl = (a_pImplExpr); \
9548 if (IEM_IS_MODRM_REG_MODE(bRm)) \
9549 { \
9550 /* register */ \
9551 switch (pVCpu->iem.s.enmEffOpSize) \
9552 { \
9553 case IEMMODE_16BIT: \
9554 IEM_MC_BEGIN(3, 0, 0, 0); \
9555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9556 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9557 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9558 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9559 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9560 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9561 IEM_MC_REF_EFLAGS(pEFlags); \
9562 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
9563 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9564 IEM_MC_END(); \
9565 break; \
9566 \
9567 case IEMMODE_32BIT: \
9568 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
9569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9570 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9571 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9572 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9573 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9574 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9575 IEM_MC_REF_EFLAGS(pEFlags); \
9576 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
9577 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
9578 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9579 IEM_MC_END(); \
9580 break; \
9581 \
9582 case IEMMODE_64BIT: \
9583 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
9584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9585 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9586 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9587 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
9588 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9589 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
9590 IEM_MC_REF_EFLAGS(pEFlags); \
9591 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
9592 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9593 IEM_MC_END(); \
9594 break; \
9595 \
9596 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9597 } \
9598 } \
9599 else \
9600 { \
9601 /* memory */ \
9602 switch (pVCpu->iem.s.enmEffOpSize) \
9603 { \
9604 case IEMMODE_16BIT: \
9605 IEM_MC_BEGIN(3, 3, 0, 0); \
9606 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
9607 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9608 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9609 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9610 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9611 \
9612 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9614 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9615 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9616 IEM_MC_FETCH_EFLAGS(EFlags); \
9617 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags); \
9618 \
9619 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9620 IEM_MC_COMMIT_EFLAGS(EFlags); \
9621 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9622 IEM_MC_END(); \
9623 break; \
9624 \
9625 case IEMMODE_32BIT: \
9626 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
9627 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
9628 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9629 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9630 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9631 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9632 \
9633 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9635 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9636 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9637 IEM_MC_FETCH_EFLAGS(EFlags); \
9638 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags); \
9639 \
9640 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9641 IEM_MC_COMMIT_EFLAGS(EFlags); \
9642 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9643 IEM_MC_END(); \
9644 break; \
9645 \
9646 case IEMMODE_64BIT: \
9647 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
9648 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
9649 IEM_MC_ARG(uint8_t, cShiftArg, 1); \
9650 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
9651 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
9652 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
9653 \
9654 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
9655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
9656 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); \
9657 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
9658 IEM_MC_FETCH_EFLAGS(EFlags); \
9659 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags); \
9660 \
9661 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
9662 IEM_MC_COMMIT_EFLAGS(EFlags); \
9663 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
9664 IEM_MC_END(); \
9665 break; \
9666 \
9667 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
9668 } \
9669 } (void)0
9670
9671
9672/**
9673 * @opmaps grp2_d0
9674 * @opcode /0
9675 * @opflclass rotate_count
9676 */
9677FNIEMOP_DEF_1(iemOp_grp2_rol_Ev_CL, uint8_t, bRm)
9678{
9679 IEMOP_MNEMONIC2EX(rol_Ev_CL, "rol Ev,CL", M_CL, ROL, rol, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9680 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags));
9681}
9682
9683
9684/**
9685 * @opmaps grp2_d0
9686 * @opcode /1
9687 * @opflclass rotate_count
9688 */
9689FNIEMOP_DEF_1(iemOp_grp2_ror_Ev_CL, uint8_t, bRm)
9690{
9691 IEMOP_MNEMONIC2EX(ror_Ev_CL, "ror Ev,CL", M_CL, ROR, ror, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9692 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags));
9693}
9694
9695
9696/**
9697 * @opmaps grp2_d0
9698 * @opcode /2
9699 * @opflclass rotate_carry_count
9700 */
9701FNIEMOP_DEF_1(iemOp_grp2_rcl_Ev_CL, uint8_t, bRm)
9702{
9703 IEMOP_MNEMONIC2EX(rcl_Ev_CL, "rcl Ev,CL", M_CL, RCL, rcl, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9704 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags));
9705}
9706
9707
9708/**
9709 * @opmaps grp2_d0
9710 * @opcode /3
9711 * @opflclass rotate_carry_count
9712 */
9713FNIEMOP_DEF_1(iemOp_grp2_rcr_Ev_CL, uint8_t, bRm)
9714{
9715 IEMOP_MNEMONIC2EX(rcr_Ev_CL, "rcr Ev,CL", M_CL, RCR, rcr, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9716 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags));
9717}
9718
9719
9720/**
9721 * @opmaps grp2_d0
9722 * @opcode /4
9723 * @opflclass shift_count
9724 */
9725FNIEMOP_DEF_1(iemOp_grp2_shl_Ev_CL, uint8_t, bRm)
9726{
9727 IEMOP_MNEMONIC2EX(shl_Ev_CL, "shl Ev,CL", M_CL, SHL, shl, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9728 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags));
9729}
9730
9731
9732/**
9733 * @opmaps grp2_d0
9734 * @opcode /5
9735 * @opflclass shift_count
9736 */
9737FNIEMOP_DEF_1(iemOp_grp2_shr_Ev_CL, uint8_t, bRm)
9738{
9739 IEMOP_MNEMONIC2EX(shr_Ev_CL, "shr Ev,CL", M_CL, SHR, shr, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9740 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags));
9741}
9742
9743
9744/**
9745 * @opmaps grp2_d0
9746 * @opcode /7
9747 * @opflclass shift_count
9748 */
9749FNIEMOP_DEF_1(iemOp_grp2_sar_Ev_CL, uint8_t, bRm)
9750{
9751 IEMOP_MNEMONIC2EX(sar_Ev_CL, "sar Ev,CL", M_CL, SAR, sar, Ev, REG_CL, DISOPTYPE_HARMLESS, 0);
9752 GRP2_BODY_Ev_CL(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags));
9753}
9754
9755#undef GRP2_BODY_Ev_CL
9756
9757/**
9758 * @opcode 0xd3
9759 */
9760FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
9761{
9762 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9763 switch (IEM_GET_MODRM_REG_8(bRm))
9764 {
9765 case 0: return FNIEMOP_CALL_1(iemOp_grp2_rol_Ev_CL, bRm);
9766 case 1: return FNIEMOP_CALL_1(iemOp_grp2_ror_Ev_CL, bRm);
9767 case 2: return FNIEMOP_CALL_1(iemOp_grp2_rcl_Ev_CL, bRm);
9768 case 3: return FNIEMOP_CALL_1(iemOp_grp2_rcr_Ev_CL, bRm);
9769 case 4: return FNIEMOP_CALL_1(iemOp_grp2_shl_Ev_CL, bRm);
9770 case 5: return FNIEMOP_CALL_1(iemOp_grp2_shr_Ev_CL, bRm);
9771 case 7: return FNIEMOP_CALL_1(iemOp_grp2_sar_Ev_CL, bRm);
9772 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9773 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
9774 }
9775}
9776
9777
9778/**
9779 * @opcode 0xd4
9780 * @opflmodify cf,pf,af,zf,sf,of
9781 * @opflundef cf,af,of
9782 */
9783FNIEMOP_DEF(iemOp_aam_Ib)
9784{
9785/** @todo testcase: aam */
9786 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
9787 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
9788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9789 IEMOP_HLP_NO_64BIT();
9790 if (!bImm)
9791 IEMOP_RAISE_DIVIDE_ERROR_RET();
9792 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aam, bImm);
9793}
9794
9795
9796/**
9797 * @opcode 0xd5
9798 * @opflmodify cf,pf,af,zf,sf,of
9799 * @opflundef cf,af,of
9800 */
9801FNIEMOP_DEF(iemOp_aad_Ib)
9802{
9803/** @todo testcase: aad? */
9804 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
9805 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
9806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9807 IEMOP_HLP_NO_64BIT();
9808 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aad, bImm);
9809}
9810
9811
9812/**
9813 * @opcode 0xd6
9814 */
9815FNIEMOP_DEF(iemOp_salc)
9816{
9817 IEMOP_MNEMONIC(salc, "salc");
9818 IEMOP_HLP_NO_64BIT();
9819
9820 IEM_MC_BEGIN(0, 0, 0, 0);
9821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9822 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9823 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
9824 } IEM_MC_ELSE() {
9825 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
9826 } IEM_MC_ENDIF();
9827 IEM_MC_ADVANCE_RIP_AND_FINISH();
9828 IEM_MC_END();
9829}
9830
9831
9832/**
9833 * @opcode 0xd7
9834 */
9835FNIEMOP_DEF(iemOp_xlat)
9836{
9837 IEMOP_MNEMONIC(xlat, "xlat");
9838 switch (pVCpu->iem.s.enmEffAddrMode)
9839 {
9840 case IEMMODE_16BIT:
9841 IEM_MC_BEGIN(2, 0, 0, 0);
9842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9843 IEM_MC_LOCAL(uint8_t, u8Tmp);
9844 IEM_MC_LOCAL(uint16_t, u16Addr);
9845 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
9846 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
9847 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
9848 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9849 IEM_MC_ADVANCE_RIP_AND_FINISH();
9850 IEM_MC_END();
9851 break;
9852
9853 case IEMMODE_32BIT:
9854 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0);
9855 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9856 IEM_MC_LOCAL(uint8_t, u8Tmp);
9857 IEM_MC_LOCAL(uint32_t, u32Addr);
9858 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
9859 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
9860 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
9861 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9862 IEM_MC_ADVANCE_RIP_AND_FINISH();
9863 IEM_MC_END();
9864 break;
9865
9866 case IEMMODE_64BIT:
9867 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
9868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9869 IEM_MC_LOCAL(uint8_t, u8Tmp);
9870 IEM_MC_LOCAL(uint64_t, u64Addr);
9871 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
9872 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
9873 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
9874 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9875 IEM_MC_ADVANCE_RIP_AND_FINISH();
9876 IEM_MC_END();
9877 break;
9878
9879 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9880 }
9881}
9882
9883
9884/**
9885 * Common worker for FPU instructions working on ST0 and STn, and storing the
9886 * result in ST0.
9887 *
9888 * @param bRm Mod R/M byte.
9889 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9890 */
9891FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
9892{
9893 IEM_MC_BEGIN(3, 1, 0, 0);
9894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9895 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9896 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9897 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9898 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9899
9900 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9901 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9902 IEM_MC_PREPARE_FPU_USAGE();
9903 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9904 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
9905 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9906 } IEM_MC_ELSE() {
9907 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9908 } IEM_MC_ENDIF();
9909 IEM_MC_ADVANCE_RIP_AND_FINISH();
9910
9911 IEM_MC_END();
9912}
9913
9914
9915/**
9916 * Common worker for FPU instructions working on ST0 and STn, and only affecting
9917 * flags.
9918 *
9919 * @param bRm Mod R/M byte.
9920 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9921 */
9922FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
9923{
9924 IEM_MC_BEGIN(3, 1, 0, 0);
9925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9926 IEM_MC_LOCAL(uint16_t, u16Fsw);
9927 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9928 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9929 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9930
9931 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9932 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9933 IEM_MC_PREPARE_FPU_USAGE();
9934 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9935 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
9936 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9937 } IEM_MC_ELSE() {
9938 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
9939 } IEM_MC_ENDIF();
9940 IEM_MC_ADVANCE_RIP_AND_FINISH();
9941
9942 IEM_MC_END();
9943}
9944
9945
9946/**
9947 * Common worker for FPU instructions working on ST0 and STn, only affecting
9948 * flags, and popping when done.
9949 *
9950 * @param bRm Mod R/M byte.
9951 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9952 */
9953FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
9954{
9955 IEM_MC_BEGIN(3, 1, 0, 0);
9956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9957 IEM_MC_LOCAL(uint16_t, u16Fsw);
9958 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9959 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9960 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9961
9962 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9963 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9964 IEM_MC_PREPARE_FPU_USAGE();
9965 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9966 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
9967 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9968 } IEM_MC_ELSE() {
9969 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
9970 } IEM_MC_ENDIF();
9971 IEM_MC_ADVANCE_RIP_AND_FINISH();
9972
9973 IEM_MC_END();
9974}
9975
9976
9977/** Opcode 0xd8 11/0. */
9978FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
9979{
9980 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
9981 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
9982}
9983
9984
9985/** Opcode 0xd8 11/1. */
9986FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
9987{
9988 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
9989 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
9990}
9991
9992
9993/** Opcode 0xd8 11/2. */
9994FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
9995{
9996 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
9997 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
9998}
9999
10000
10001/** Opcode 0xd8 11/3. */
10002FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
10003{
10004 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
10005 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
10006}
10007
10008
10009/** Opcode 0xd8 11/4. */
10010FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
10011{
10012 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
10013 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
10014}
10015
10016
10017/** Opcode 0xd8 11/5. */
10018FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
10019{
10020 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
10021 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
10022}
10023
10024
10025/** Opcode 0xd8 11/6. */
10026FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
10027{
10028 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
10029 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
10030}
10031
10032
10033/** Opcode 0xd8 11/7. */
10034FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
10035{
10036 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
10037 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
10038}
10039
10040
10041/**
10042 * Common worker for FPU instructions working on ST0 and an m32r, and storing
10043 * the result in ST0.
10044 *
10045 * @param bRm Mod R/M byte.
10046 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10047 */
10048FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
10049{
10050 IEM_MC_BEGIN(3, 3, 0, 0);
10051 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10052 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10053 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
10054 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10055 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10056 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
10057
10058 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10060
10061 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10062 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10063 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10064
10065 IEM_MC_PREPARE_FPU_USAGE();
10066 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10067 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
10068 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10069 } IEM_MC_ELSE() {
10070 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10071 } IEM_MC_ENDIF();
10072 IEM_MC_ADVANCE_RIP_AND_FINISH();
10073
10074 IEM_MC_END();
10075}
10076
10077
10078/** Opcode 0xd8 !11/0. */
10079FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
10080{
10081 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
10082 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
10083}
10084
10085
10086/** Opcode 0xd8 !11/1. */
10087FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
10088{
10089 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
10090 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
10091}
10092
10093
10094/** Opcode 0xd8 !11/2. */
10095FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
10096{
10097 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
10098
10099 IEM_MC_BEGIN(3, 3, 0, 0);
10100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10101 IEM_MC_LOCAL(uint16_t, u16Fsw);
10102 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
10103 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10104 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10105 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
10106
10107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10109
10110 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10111 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10112 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10113
10114 IEM_MC_PREPARE_FPU_USAGE();
10115 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10116 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
10117 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10118 } IEM_MC_ELSE() {
10119 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10120 } IEM_MC_ENDIF();
10121 IEM_MC_ADVANCE_RIP_AND_FINISH();
10122
10123 IEM_MC_END();
10124}
10125
10126
10127/** Opcode 0xd8 !11/3. */
10128FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
10129{
10130 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
10131
10132 IEM_MC_BEGIN(3, 3, 0, 0);
10133 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10134 IEM_MC_LOCAL(uint16_t, u16Fsw);
10135 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
10136 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10137 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10138 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
10139
10140 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10142
10143 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10144 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10145 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10146
10147 IEM_MC_PREPARE_FPU_USAGE();
10148 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10149 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
10150 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10151 } IEM_MC_ELSE() {
10152 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10153 } IEM_MC_ENDIF();
10154 IEM_MC_ADVANCE_RIP_AND_FINISH();
10155
10156 IEM_MC_END();
10157}
10158
10159
10160/** Opcode 0xd8 !11/4. */
10161FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
10162{
10163 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
10164 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
10165}
10166
10167
10168/** Opcode 0xd8 !11/5. */
10169FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
10170{
10171 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
10172 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
10173}
10174
10175
10176/** Opcode 0xd8 !11/6. */
10177FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
10178{
10179 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
10180 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
10181}
10182
10183
10184/** Opcode 0xd8 !11/7. */
10185FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
10186{
10187 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
10188 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
10189}
10190
10191
10192/**
10193 * @opcode 0xd8
10194 */
10195FNIEMOP_DEF(iemOp_EscF0)
10196{
10197 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10198 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
10199
10200 if (IEM_IS_MODRM_REG_MODE(bRm))
10201 {
10202 switch (IEM_GET_MODRM_REG_8(bRm))
10203 {
10204 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
10205 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
10206 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
10207 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
10208 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
10209 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
10210 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
10211 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
10212 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10213 }
10214 }
10215 else
10216 {
10217 switch (IEM_GET_MODRM_REG_8(bRm))
10218 {
10219 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
10220 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
10221 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
10222 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
10223 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
10224 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
10225 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
10226 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
10227 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10228 }
10229 }
10230}
10231
10232
10233/** Opcode 0xd9 /0 mem32real
10234 * @sa iemOp_fld_m64r */
10235FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
10236{
10237 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
10238
10239 IEM_MC_BEGIN(2, 3, 0, 0);
10240 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10241 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10242 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
10243 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10244 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
10245
10246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10248
10249 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10250 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10251 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10252 IEM_MC_PREPARE_FPU_USAGE();
10253 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10254 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r32, pFpuRes, pr32Val);
10255 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10256 } IEM_MC_ELSE() {
10257 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10258 } IEM_MC_ENDIF();
10259 IEM_MC_ADVANCE_RIP_AND_FINISH();
10260
10261 IEM_MC_END();
10262}
10263
10264
10265/** Opcode 0xd9 !11/2 mem32real */
10266FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
10267{
10268 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
10269 IEM_MC_BEGIN(3, 3, 0, 0);
10270 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10271 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10272
10273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10274 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10275 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10276 IEM_MC_PREPARE_FPU_USAGE();
10277
10278 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10279 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
10280 IEM_MC_MEM_MAP_R32_WO(pr32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10281
10282 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10283 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10284 IEM_MC_LOCAL(uint16_t, u16Fsw);
10285 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
10286 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
10287 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
10288 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10289 } IEM_MC_ELSE() {
10290 IEM_MC_IF_FCW_IM() {
10291 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
10292 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
10293 } IEM_MC_ELSE() {
10294 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
10295 } IEM_MC_ENDIF();
10296 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10297 } IEM_MC_ENDIF();
10298 IEM_MC_ADVANCE_RIP_AND_FINISH();
10299
10300 IEM_MC_END();
10301}
10302
10303
10304/** Opcode 0xd9 !11/3 */
10305FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
10306{
10307 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
10308 IEM_MC_BEGIN(3, 3, 0, 0);
10309 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10310 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10311
10312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10313 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10314 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10315 IEM_MC_PREPARE_FPU_USAGE();
10316
10317 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10318 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
10319 IEM_MC_MEM_MAP_R32_WO(pr32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10320
10321 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10322 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10323 IEM_MC_LOCAL(uint16_t, u16Fsw);
10324 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
10325 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
10326 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
10327 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10328 } IEM_MC_ELSE() {
10329 IEM_MC_IF_FCW_IM() {
10330 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
10331 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
10332 } IEM_MC_ELSE() {
10333 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
10334 } IEM_MC_ENDIF();
10335 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10336 } IEM_MC_ENDIF();
10337 IEM_MC_ADVANCE_RIP_AND_FINISH();
10338
10339 IEM_MC_END();
10340}
10341
10342
10343/** Opcode 0xd9 !11/4 */
10344FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
10345{
10346 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
10347 IEM_MC_BEGIN(3, 0, 0, 0);
10348 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
10349 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10350
10351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10352 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10353 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10354
10355 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
10356 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
10357 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw),
10358 iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
10359 IEM_MC_END();
10360}
10361
10362
10363/** Opcode 0xd9 !11/5 */
10364FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
10365{
10366 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
10367 IEM_MC_BEGIN(1, 1, 0, 0);
10368 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10369 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10370
10371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10372 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10373 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10374
10375 IEM_MC_ARG(uint16_t, u16Fsw, 0);
10376 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10377
10378 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw),
10379 iemCImpl_fldcw, u16Fsw);
10380 IEM_MC_END();
10381}
10382
10383
10384/** Opcode 0xd9 !11/6 */
10385FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
10386{
10387 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
10388 IEM_MC_BEGIN(3, 0, 0, 0);
10389 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
10390 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10391
10392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10393 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10394 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
10395
10396 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
10397 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
10398 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
10399 iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
10400 IEM_MC_END();
10401}
10402
10403
10404/** Opcode 0xd9 !11/7 */
10405FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
10406{
10407 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
10408 IEM_MC_BEGIN(2, 0, 0, 0);
10409 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10410 IEM_MC_LOCAL(uint16_t, u16Fcw);
10411 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10413 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10414 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
10415 IEM_MC_FETCH_FCW(u16Fcw);
10416 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
10417 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
10418 IEM_MC_END();
10419}
10420
10421
10422/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
10423FNIEMOP_DEF(iemOp_fnop)
10424{
10425 IEMOP_MNEMONIC(fnop, "fnop");
10426 IEM_MC_BEGIN(0, 0, 0, 0);
10427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10428 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10429 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10430 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10431 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
10432 * intel optimizations. Investigate. */
10433 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10434 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
10435 IEM_MC_END();
10436}
10437
10438
10439/** Opcode 0xd9 11/0 stN */
10440FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
10441{
10442 IEMOP_MNEMONIC(fld_stN, "fld stN");
10443 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
10444 * indicates that it does. */
10445 IEM_MC_BEGIN(0, 2, 0, 0);
10446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10447 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
10448 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10449 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10450 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10451
10452 IEM_MC_PREPARE_FPU_USAGE();
10453 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, IEM_GET_MODRM_RM_8(bRm)) {
10454 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
10455 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
10456 } IEM_MC_ELSE() {
10457 IEM_MC_FPU_STACK_PUSH_UNDERFLOW(pVCpu->iem.s.uFpuOpcode);
10458 } IEM_MC_ENDIF();
10459
10460 IEM_MC_ADVANCE_RIP_AND_FINISH();
10461 IEM_MC_END();
10462}
10463
10464
10465/** Opcode 0xd9 11/3 stN */
10466FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
10467{
10468 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
10469 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
10470 * indicates that it does. */
10471 IEM_MC_BEGIN(2, 3, 0, 0);
10472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10473 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
10474 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
10475 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10476 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ IEM_GET_MODRM_RM_8(bRm), 0);
10477 IEM_MC_ARG_CONST(uint16_t, uFpuOpcode, /*=*/ pVCpu->iem.s.uFpuOpcode, 1);
10478 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10479 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10480
10481 IEM_MC_PREPARE_FPU_USAGE();
10482 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
10483 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
10484 IEM_MC_STORE_FPUREG_R80_SRC_REF(IEM_GET_MODRM_RM_8(bRm), pr80Value1);
10485 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10486 } IEM_MC_ELSE() {
10487 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, 0, iemCImpl_fxch_underflow, iStReg, uFpuOpcode);
10488 } IEM_MC_ENDIF();
10489
10490 IEM_MC_ADVANCE_RIP_AND_FINISH();
10491 IEM_MC_END();
10492}
10493
10494
10495/** Opcode 0xd9 11/4, 0xdd 11/2. */
10496FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
10497{
10498 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
10499
10500 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
10501 uint8_t const iDstReg = IEM_GET_MODRM_RM_8(bRm);
10502 if (!iDstReg)
10503 {
10504 IEM_MC_BEGIN(0, 1, 0, 0);
10505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10506 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
10507 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10508 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10509
10510 IEM_MC_PREPARE_FPU_USAGE();
10511 IEM_MC_IF_FPUREG_NOT_EMPTY(0) {
10512 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10513 } IEM_MC_ELSE() {
10514 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0, pVCpu->iem.s.uFpuOpcode);
10515 } IEM_MC_ENDIF();
10516
10517 IEM_MC_ADVANCE_RIP_AND_FINISH();
10518 IEM_MC_END();
10519 }
10520 else
10521 {
10522 IEM_MC_BEGIN(0, 2, 0, 0);
10523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10524 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
10525 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10526 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10527 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10528
10529 IEM_MC_PREPARE_FPU_USAGE();
10530 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10531 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
10532 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg, pVCpu->iem.s.uFpuOpcode);
10533 } IEM_MC_ELSE() {
10534 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg, pVCpu->iem.s.uFpuOpcode);
10535 } IEM_MC_ENDIF();
10536
10537 IEM_MC_ADVANCE_RIP_AND_FINISH();
10538 IEM_MC_END();
10539 }
10540}
10541
10542
10543/**
10544 * Common worker for FPU instructions working on ST0 and replaces it with the
10545 * result, i.e. unary operators.
10546 *
10547 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10548 */
10549FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
10550{
10551 IEM_MC_BEGIN(2, 1, 0, 0);
10552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10553 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10554 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10555 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10556
10557 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10558 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10559 IEM_MC_PREPARE_FPU_USAGE();
10560 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10561 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
10562 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10563 } IEM_MC_ELSE() {
10564 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10565 } IEM_MC_ENDIF();
10566 IEM_MC_ADVANCE_RIP_AND_FINISH();
10567
10568 IEM_MC_END();
10569}
10570
10571
10572/** Opcode 0xd9 0xe0. */
10573FNIEMOP_DEF(iemOp_fchs)
10574{
10575 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
10576 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
10577}
10578
10579
10580/** Opcode 0xd9 0xe1. */
10581FNIEMOP_DEF(iemOp_fabs)
10582{
10583 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
10584 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
10585}
10586
10587
10588/** Opcode 0xd9 0xe4. */
10589FNIEMOP_DEF(iemOp_ftst)
10590{
10591 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
10592 IEM_MC_BEGIN(2, 1, 0, 0);
10593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10594 IEM_MC_LOCAL(uint16_t, u16Fsw);
10595 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10596 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10597
10598 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10599 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10600 IEM_MC_PREPARE_FPU_USAGE();
10601 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10602 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_ftst_r80, pu16Fsw, pr80Value);
10603 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10604 } IEM_MC_ELSE() {
10605 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
10606 } IEM_MC_ENDIF();
10607 IEM_MC_ADVANCE_RIP_AND_FINISH();
10608
10609 IEM_MC_END();
10610}
10611
10612
10613/** Opcode 0xd9 0xe5. */
10614FNIEMOP_DEF(iemOp_fxam)
10615{
10616 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
10617 IEM_MC_BEGIN(2, 1, 0, 0);
10618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10619 IEM_MC_LOCAL(uint16_t, u16Fsw);
10620 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10621 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10622
10623 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10624 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10625 IEM_MC_PREPARE_FPU_USAGE();
10626 IEM_MC_REF_FPUREG(pr80Value, 0);
10627 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fxam_r80, pu16Fsw, pr80Value);
10628 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10629 IEM_MC_ADVANCE_RIP_AND_FINISH();
10630
10631 IEM_MC_END();
10632}
10633
10634
10635/**
10636 * Common worker for FPU instructions pushing a constant onto the FPU stack.
10637 *
10638 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10639 */
10640FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
10641{
10642 IEM_MC_BEGIN(1, 1, 0, 0);
10643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10644 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10645 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10646
10647 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10648 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10649 IEM_MC_PREPARE_FPU_USAGE();
10650 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10651 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
10652 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
10653 } IEM_MC_ELSE() {
10654 IEM_MC_FPU_STACK_PUSH_OVERFLOW(pVCpu->iem.s.uFpuOpcode);
10655 } IEM_MC_ENDIF();
10656 IEM_MC_ADVANCE_RIP_AND_FINISH();
10657
10658 IEM_MC_END();
10659}
10660
10661
10662/** Opcode 0xd9 0xe8. */
10663FNIEMOP_DEF(iemOp_fld1)
10664{
10665 IEMOP_MNEMONIC(fld1, "fld1");
10666 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
10667}
10668
10669
10670/** Opcode 0xd9 0xe9. */
10671FNIEMOP_DEF(iemOp_fldl2t)
10672{
10673 IEMOP_MNEMONIC(fldl2t, "fldl2t");
10674 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
10675}
10676
10677
10678/** Opcode 0xd9 0xea. */
10679FNIEMOP_DEF(iemOp_fldl2e)
10680{
10681 IEMOP_MNEMONIC(fldl2e, "fldl2e");
10682 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
10683}
10684
10685/** Opcode 0xd9 0xeb. */
10686FNIEMOP_DEF(iemOp_fldpi)
10687{
10688 IEMOP_MNEMONIC(fldpi, "fldpi");
10689 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
10690}
10691
10692
10693/** Opcode 0xd9 0xec. */
10694FNIEMOP_DEF(iemOp_fldlg2)
10695{
10696 IEMOP_MNEMONIC(fldlg2, "fldlg2");
10697 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
10698}
10699
10700/** Opcode 0xd9 0xed. */
10701FNIEMOP_DEF(iemOp_fldln2)
10702{
10703 IEMOP_MNEMONIC(fldln2, "fldln2");
10704 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
10705}
10706
10707
10708/** Opcode 0xd9 0xee. */
10709FNIEMOP_DEF(iemOp_fldz)
10710{
10711 IEMOP_MNEMONIC(fldz, "fldz");
10712 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
10713}
10714
10715
10716/** Opcode 0xd9 0xf0.
10717 *
10718 * The f2xm1 instruction works on values +1.0 thru -1.0, currently (the range on
10719 * 287 & 8087 was +0.5 thru 0.0 according to docs). In addition is does appear
10720 * to produce proper results for +Inf and -Inf.
10721 *
10722 * This is probably usful in the implementation pow() and similar.
10723 */
10724FNIEMOP_DEF(iemOp_f2xm1)
10725{
10726 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
10727 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
10728}
10729
10730
10731/**
10732 * Common worker for FPU instructions working on STn and ST0, storing the result
10733 * in STn, and popping the stack unless IE, DE or ZE was raised.
10734 *
10735 * @param bRm Mod R/M byte.
10736 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10737 */
10738FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
10739{
10740 IEM_MC_BEGIN(3, 1, 0, 0);
10741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10742 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10743 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10744 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10745 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10746
10747 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10748 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10749
10750 IEM_MC_PREPARE_FPU_USAGE();
10751 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
10752 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
10753 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10754 } IEM_MC_ELSE() {
10755 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10756 } IEM_MC_ENDIF();
10757 IEM_MC_ADVANCE_RIP_AND_FINISH();
10758
10759 IEM_MC_END();
10760}
10761
10762
10763/** Opcode 0xd9 0xf1. */
10764FNIEMOP_DEF(iemOp_fyl2x)
10765{
10766 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
10767 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
10768}
10769
10770
10771/**
10772 * Common worker for FPU instructions working on ST0 and having two outputs, one
10773 * replacing ST0 and one pushed onto the stack.
10774 *
10775 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10776 */
10777FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
10778{
10779 IEM_MC_BEGIN(2, 1, 0, 0);
10780 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10781 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
10782 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
10783 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10784
10785 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10786 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10787 IEM_MC_PREPARE_FPU_USAGE();
10788 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10789 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
10790 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo, pVCpu->iem.s.uFpuOpcode);
10791 } IEM_MC_ELSE() {
10792 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO(pVCpu->iem.s.uFpuOpcode);
10793 } IEM_MC_ENDIF();
10794 IEM_MC_ADVANCE_RIP_AND_FINISH();
10795
10796 IEM_MC_END();
10797}
10798
10799
10800/** Opcode 0xd9 0xf2. */
10801FNIEMOP_DEF(iemOp_fptan)
10802{
10803 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
10804 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
10805}
10806
10807
10808/** Opcode 0xd9 0xf3. */
10809FNIEMOP_DEF(iemOp_fpatan)
10810{
10811 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
10812 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
10813}
10814
10815
10816/** Opcode 0xd9 0xf4. */
10817FNIEMOP_DEF(iemOp_fxtract)
10818{
10819 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
10820 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
10821}
10822
10823
10824/** Opcode 0xd9 0xf5. */
10825FNIEMOP_DEF(iemOp_fprem1)
10826{
10827 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
10828 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
10829}
10830
10831
10832/** Opcode 0xd9 0xf6. */
10833FNIEMOP_DEF(iemOp_fdecstp)
10834{
10835 IEMOP_MNEMONIC(fdecstp, "fdecstp");
10836 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
10837 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
10838 * FINCSTP and FDECSTP. */
10839 IEM_MC_BEGIN(0, 0, 0, 0);
10840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10841
10842 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10843 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10844
10845 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10846 IEM_MC_FPU_STACK_DEC_TOP();
10847 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
10848
10849 IEM_MC_ADVANCE_RIP_AND_FINISH();
10850 IEM_MC_END();
10851}
10852
10853
10854/** Opcode 0xd9 0xf7. */
10855FNIEMOP_DEF(iemOp_fincstp)
10856{
10857 IEMOP_MNEMONIC(fincstp, "fincstp");
10858 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
10859 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
10860 * FINCSTP and FDECSTP. */
10861 IEM_MC_BEGIN(0, 0, 0, 0);
10862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10863
10864 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10865 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10866
10867 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10868 IEM_MC_FPU_STACK_INC_TOP();
10869 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
10870
10871 IEM_MC_ADVANCE_RIP_AND_FINISH();
10872 IEM_MC_END();
10873}
10874
10875
10876/** Opcode 0xd9 0xf8. */
10877FNIEMOP_DEF(iemOp_fprem)
10878{
10879 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
10880 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
10881}
10882
10883
10884/** Opcode 0xd9 0xf9. */
10885FNIEMOP_DEF(iemOp_fyl2xp1)
10886{
10887 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
10888 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
10889}
10890
10891
10892/** Opcode 0xd9 0xfa. */
10893FNIEMOP_DEF(iemOp_fsqrt)
10894{
10895 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
10896 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
10897}
10898
10899
10900/** Opcode 0xd9 0xfb. */
10901FNIEMOP_DEF(iemOp_fsincos)
10902{
10903 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
10904 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
10905}
10906
10907
10908/** Opcode 0xd9 0xfc. */
10909FNIEMOP_DEF(iemOp_frndint)
10910{
10911 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
10912 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
10913}
10914
10915
10916/** Opcode 0xd9 0xfd. */
10917FNIEMOP_DEF(iemOp_fscale)
10918{
10919 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
10920 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
10921}
10922
10923
10924/** Opcode 0xd9 0xfe. */
10925FNIEMOP_DEF(iemOp_fsin)
10926{
10927 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
10928 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
10929}
10930
10931
10932/** Opcode 0xd9 0xff. */
10933FNIEMOP_DEF(iemOp_fcos)
10934{
10935 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
10936 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
10937}
10938
10939
10940/** Used by iemOp_EscF1. */
10941IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
10942{
10943 /* 0xe0 */ iemOp_fchs,
10944 /* 0xe1 */ iemOp_fabs,
10945 /* 0xe2 */ iemOp_Invalid,
10946 /* 0xe3 */ iemOp_Invalid,
10947 /* 0xe4 */ iemOp_ftst,
10948 /* 0xe5 */ iemOp_fxam,
10949 /* 0xe6 */ iemOp_Invalid,
10950 /* 0xe7 */ iemOp_Invalid,
10951 /* 0xe8 */ iemOp_fld1,
10952 /* 0xe9 */ iemOp_fldl2t,
10953 /* 0xea */ iemOp_fldl2e,
10954 /* 0xeb */ iemOp_fldpi,
10955 /* 0xec */ iemOp_fldlg2,
10956 /* 0xed */ iemOp_fldln2,
10957 /* 0xee */ iemOp_fldz,
10958 /* 0xef */ iemOp_Invalid,
10959 /* 0xf0 */ iemOp_f2xm1,
10960 /* 0xf1 */ iemOp_fyl2x,
10961 /* 0xf2 */ iemOp_fptan,
10962 /* 0xf3 */ iemOp_fpatan,
10963 /* 0xf4 */ iemOp_fxtract,
10964 /* 0xf5 */ iemOp_fprem1,
10965 /* 0xf6 */ iemOp_fdecstp,
10966 /* 0xf7 */ iemOp_fincstp,
10967 /* 0xf8 */ iemOp_fprem,
10968 /* 0xf9 */ iemOp_fyl2xp1,
10969 /* 0xfa */ iemOp_fsqrt,
10970 /* 0xfb */ iemOp_fsincos,
10971 /* 0xfc */ iemOp_frndint,
10972 /* 0xfd */ iemOp_fscale,
10973 /* 0xfe */ iemOp_fsin,
10974 /* 0xff */ iemOp_fcos
10975};
10976
10977
10978/**
10979 * @opcode 0xd9
10980 */
10981FNIEMOP_DEF(iemOp_EscF1)
10982{
10983 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10984 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
10985
10986 if (IEM_IS_MODRM_REG_MODE(bRm))
10987 {
10988 switch (IEM_GET_MODRM_REG_8(bRm))
10989 {
10990 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
10991 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
10992 case 2:
10993 if (bRm == 0xd0)
10994 return FNIEMOP_CALL(iemOp_fnop);
10995 IEMOP_RAISE_INVALID_OPCODE_RET();
10996 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
10997 case 4:
10998 case 5:
10999 case 6:
11000 case 7:
11001 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
11002 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
11003 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11004 }
11005 }
11006 else
11007 {
11008 switch (IEM_GET_MODRM_REG_8(bRm))
11009 {
11010 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
11011 case 1: IEMOP_RAISE_INVALID_OPCODE_RET();
11012 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
11013 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
11014 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
11015 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
11016 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
11017 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
11018 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11019 }
11020 }
11021}
11022
11023
11024/** Opcode 0xda 11/0. */
11025FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
11026{
11027 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
11028 IEM_MC_BEGIN(0, 1, 0, 0);
11029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11030 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11031
11032 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11033 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11034
11035 IEM_MC_PREPARE_FPU_USAGE();
11036 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11037 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
11038 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11039 } IEM_MC_ENDIF();
11040 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11041 } IEM_MC_ELSE() {
11042 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11043 } IEM_MC_ENDIF();
11044 IEM_MC_ADVANCE_RIP_AND_FINISH();
11045
11046 IEM_MC_END();
11047}
11048
11049
11050/** Opcode 0xda 11/1. */
11051FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
11052{
11053 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
11054 IEM_MC_BEGIN(0, 1, 0, 0);
11055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11056 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11057
11058 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11059 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11060
11061 IEM_MC_PREPARE_FPU_USAGE();
11062 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11063 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
11064 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11065 } IEM_MC_ENDIF();
11066 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11067 } IEM_MC_ELSE() {
11068 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11069 } IEM_MC_ENDIF();
11070 IEM_MC_ADVANCE_RIP_AND_FINISH();
11071
11072 IEM_MC_END();
11073}
11074
11075
11076/** Opcode 0xda 11/2. */
11077FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
11078{
11079 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
11080 IEM_MC_BEGIN(0, 1, 0, 0);
11081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11082 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11083
11084 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11085 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11086
11087 IEM_MC_PREPARE_FPU_USAGE();
11088 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11089 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
11090 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11091 } IEM_MC_ENDIF();
11092 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11093 } IEM_MC_ELSE() {
11094 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11095 } IEM_MC_ENDIF();
11096 IEM_MC_ADVANCE_RIP_AND_FINISH();
11097
11098 IEM_MC_END();
11099}
11100
11101
11102/** Opcode 0xda 11/3. */
11103FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
11104{
11105 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
11106 IEM_MC_BEGIN(0, 1, 0, 0);
11107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11108 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11109
11110 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11111 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11112
11113 IEM_MC_PREPARE_FPU_USAGE();
11114 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11115 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
11116 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11117 } IEM_MC_ENDIF();
11118 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11119 } IEM_MC_ELSE() {
11120 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11121 } IEM_MC_ENDIF();
11122 IEM_MC_ADVANCE_RIP_AND_FINISH();
11123
11124 IEM_MC_END();
11125}
11126
11127
11128/**
11129 * Common worker for FPU instructions working on ST0 and ST1, only affecting
11130 * flags, and popping twice when done.
11131 *
11132 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11133 */
11134FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
11135{
11136 IEM_MC_BEGIN(3, 1, 0, 0);
11137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11138 IEM_MC_LOCAL(uint16_t, u16Fsw);
11139 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11140 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11141 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11142
11143 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11144 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11145
11146 IEM_MC_PREPARE_FPU_USAGE();
11147 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1) {
11148 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
11149 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
11150 } IEM_MC_ELSE() {
11151 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP(pVCpu->iem.s.uFpuOpcode);
11152 } IEM_MC_ENDIF();
11153 IEM_MC_ADVANCE_RIP_AND_FINISH();
11154
11155 IEM_MC_END();
11156}
11157
11158
11159/** Opcode 0xda 0xe9. */
11160FNIEMOP_DEF(iemOp_fucompp)
11161{
11162 IEMOP_MNEMONIC(fucompp, "fucompp");
11163 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fucom_r80_by_r80);
11164}
11165
11166
11167/**
11168 * Common worker for FPU instructions working on ST0 and an m32i, and storing
11169 * the result in ST0.
11170 *
11171 * @param bRm Mod R/M byte.
11172 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11173 */
11174FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
11175{
11176 IEM_MC_BEGIN(3, 3, 0, 0);
11177 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11178 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11179 IEM_MC_LOCAL(int32_t, i32Val2);
11180 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11181 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11182 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
11183
11184 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11186
11187 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11188 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11189 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11190
11191 IEM_MC_PREPARE_FPU_USAGE();
11192 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11193 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
11194 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
11195 } IEM_MC_ELSE() {
11196 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11197 } IEM_MC_ENDIF();
11198 IEM_MC_ADVANCE_RIP_AND_FINISH();
11199
11200 IEM_MC_END();
11201}
11202
11203
11204/** Opcode 0xda !11/0. */
11205FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
11206{
11207 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
11208 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
11209}
11210
11211
11212/** Opcode 0xda !11/1. */
11213FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
11214{
11215 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
11216 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
11217}
11218
11219
11220/** Opcode 0xda !11/2. */
11221FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
11222{
11223 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
11224
11225 IEM_MC_BEGIN(3, 3, 0, 0);
11226 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11227 IEM_MC_LOCAL(uint16_t, u16Fsw);
11228 IEM_MC_LOCAL(int32_t, i32Val2);
11229 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11230 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11231 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
11232
11233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11235
11236 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11237 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11238 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11239
11240 IEM_MC_PREPARE_FPU_USAGE();
11241 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11242 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
11243 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11244 } IEM_MC_ELSE() {
11245 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11246 } IEM_MC_ENDIF();
11247 IEM_MC_ADVANCE_RIP_AND_FINISH();
11248
11249 IEM_MC_END();
11250}
11251
11252
11253/** Opcode 0xda !11/3. */
11254FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
11255{
11256 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
11257
11258 IEM_MC_BEGIN(3, 3, 0, 0);
11259 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11260 IEM_MC_LOCAL(uint16_t, u16Fsw);
11261 IEM_MC_LOCAL(int32_t, i32Val2);
11262 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11263 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11264 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
11265
11266 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11267 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11268
11269 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11270 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11271 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11272
11273 IEM_MC_PREPARE_FPU_USAGE();
11274 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11275 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
11276 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11277 } IEM_MC_ELSE() {
11278 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11279 } IEM_MC_ENDIF();
11280 IEM_MC_ADVANCE_RIP_AND_FINISH();
11281
11282 IEM_MC_END();
11283}
11284
11285
11286/** Opcode 0xda !11/4. */
11287FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
11288{
11289 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
11290 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
11291}
11292
11293
11294/** Opcode 0xda !11/5. */
11295FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
11296{
11297 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
11298 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
11299}
11300
11301
11302/** Opcode 0xda !11/6. */
11303FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
11304{
11305 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
11306 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
11307}
11308
11309
11310/** Opcode 0xda !11/7. */
11311FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
11312{
11313 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
11314 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
11315}
11316
11317
11318/**
11319 * @opcode 0xda
11320 */
11321FNIEMOP_DEF(iemOp_EscF2)
11322{
11323 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11324 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
11325 if (IEM_IS_MODRM_REG_MODE(bRm))
11326 {
11327 switch (IEM_GET_MODRM_REG_8(bRm))
11328 {
11329 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
11330 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
11331 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
11332 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
11333 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
11334 case 5:
11335 if (bRm == 0xe9)
11336 return FNIEMOP_CALL(iemOp_fucompp);
11337 IEMOP_RAISE_INVALID_OPCODE_RET();
11338 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
11339 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11340 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11341 }
11342 }
11343 else
11344 {
11345 switch (IEM_GET_MODRM_REG_8(bRm))
11346 {
11347 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
11348 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
11349 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
11350 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
11351 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
11352 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
11353 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
11354 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
11355 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11356 }
11357 }
11358}
11359
11360
11361/** Opcode 0xdb !11/0. */
11362FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
11363{
11364 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
11365
11366 IEM_MC_BEGIN(2, 3, 0, 0);
11367 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11368 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11369 IEM_MC_LOCAL(int32_t, i32Val);
11370 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11371 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
11372
11373 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11375
11376 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11377 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11378 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11379
11380 IEM_MC_PREPARE_FPU_USAGE();
11381 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11382 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i32, pFpuRes, pi32Val);
11383 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11384 } IEM_MC_ELSE() {
11385 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11386 } IEM_MC_ENDIF();
11387 IEM_MC_ADVANCE_RIP_AND_FINISH();
11388
11389 IEM_MC_END();
11390}
11391
11392
11393/** Opcode 0xdb !11/1. */
11394FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
11395{
11396 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
11397 IEM_MC_BEGIN(3, 3, 0, 0);
11398 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11400
11401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11402 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11403 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11404 IEM_MC_PREPARE_FPU_USAGE();
11405
11406 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11407 IEM_MC_ARG(int32_t *, pi32Dst, 1);
11408 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11409
11410 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11411 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11412 IEM_MC_LOCAL(uint16_t, u16Fsw);
11413 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11414 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
11415 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11416 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11417 } IEM_MC_ELSE() {
11418 IEM_MC_IF_FCW_IM() {
11419 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
11420 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11421 } IEM_MC_ELSE() {
11422 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11423 } IEM_MC_ENDIF();
11424 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11425 } IEM_MC_ENDIF();
11426 IEM_MC_ADVANCE_RIP_AND_FINISH();
11427
11428 IEM_MC_END();
11429}
11430
11431
11432/** Opcode 0xdb !11/2. */
11433FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
11434{
11435 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
11436 IEM_MC_BEGIN(3, 3, 0, 0);
11437 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11439
11440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11441 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11442 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11443 IEM_MC_PREPARE_FPU_USAGE();
11444
11445 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11446 IEM_MC_ARG(int32_t *, pi32Dst, 1);
11447 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11448
11449 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11450 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11451 IEM_MC_LOCAL(uint16_t, u16Fsw);
11452 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11453 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
11454 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11455 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11456 } IEM_MC_ELSE() {
11457 IEM_MC_IF_FCW_IM() {
11458 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
11459 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11460 } IEM_MC_ELSE() {
11461 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11462 } IEM_MC_ENDIF();
11463 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11464 } IEM_MC_ENDIF();
11465 IEM_MC_ADVANCE_RIP_AND_FINISH();
11466
11467 IEM_MC_END();
11468}
11469
11470
11471/** Opcode 0xdb !11/3. */
11472FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
11473{
11474 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
11475 IEM_MC_BEGIN(3, 2, 0, 0);
11476 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11477 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11478
11479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11480 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11481 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11482 IEM_MC_PREPARE_FPU_USAGE();
11483
11484 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11485 IEM_MC_ARG(int32_t *, pi32Dst, 1);
11486 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11487
11488 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11489 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11490 IEM_MC_LOCAL(uint16_t, u16Fsw);
11491 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11492 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
11493 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11494 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11495 } IEM_MC_ELSE() {
11496 IEM_MC_IF_FCW_IM() {
11497 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
11498 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11499 } IEM_MC_ELSE() {
11500 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11501 } IEM_MC_ENDIF();
11502 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11503 } IEM_MC_ENDIF();
11504 IEM_MC_ADVANCE_RIP_AND_FINISH();
11505
11506 IEM_MC_END();
11507}
11508
11509
11510/** Opcode 0xdb !11/5. */
11511FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
11512{
11513 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
11514
11515 IEM_MC_BEGIN(2, 3, 0, 0);
11516 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11517 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11518 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
11519 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11520 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
11521
11522 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11524
11525 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11526 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11527 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11528
11529 IEM_MC_PREPARE_FPU_USAGE();
11530 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11531 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
11532 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11533 } IEM_MC_ELSE() {
11534 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11535 } IEM_MC_ENDIF();
11536 IEM_MC_ADVANCE_RIP_AND_FINISH();
11537
11538 IEM_MC_END();
11539}
11540
11541
11542/** Opcode 0xdb !11/7. */
11543FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
11544{
11545 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
11546 IEM_MC_BEGIN(3, 3, 0, 0);
11547 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11548 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11549
11550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11551 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11552 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11553 IEM_MC_PREPARE_FPU_USAGE();
11554
11555 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11556 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
11557 IEM_MC_MEM_MAP_R80_WO(pr80Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11558
11559 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11560 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11561 IEM_MC_LOCAL(uint16_t, u16Fsw);
11562 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11563 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
11564 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11565 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11566 } IEM_MC_ELSE() {
11567 IEM_MC_IF_FCW_IM() {
11568 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
11569 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11570 } IEM_MC_ELSE() {
11571 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11572 } IEM_MC_ENDIF();
11573 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11574 } IEM_MC_ENDIF();
11575 IEM_MC_ADVANCE_RIP_AND_FINISH();
11576
11577 IEM_MC_END();
11578}
11579
11580
11581/** Opcode 0xdb 11/0. */
11582FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
11583{
11584 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
11585 IEM_MC_BEGIN(0, 1, 0, 0);
11586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11587 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11588
11589 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11590 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11591
11592 IEM_MC_PREPARE_FPU_USAGE();
11593 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11594 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF) {
11595 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11596 } IEM_MC_ENDIF();
11597 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11598 } IEM_MC_ELSE() {
11599 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11600 } IEM_MC_ENDIF();
11601 IEM_MC_ADVANCE_RIP_AND_FINISH();
11602
11603 IEM_MC_END();
11604}
11605
11606
11607/** Opcode 0xdb 11/1. */
11608FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
11609{
11610 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
11611 IEM_MC_BEGIN(0, 1, 0, 0);
11612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11613 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11614
11615 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11616 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11617
11618 IEM_MC_PREPARE_FPU_USAGE();
11619 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11620 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11621 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11622 } IEM_MC_ENDIF();
11623 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11624 } IEM_MC_ELSE() {
11625 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11626 } IEM_MC_ENDIF();
11627 IEM_MC_ADVANCE_RIP_AND_FINISH();
11628
11629 IEM_MC_END();
11630}
11631
11632
11633/** Opcode 0xdb 11/2. */
11634FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
11635{
11636 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
11637 IEM_MC_BEGIN(0, 1, 0, 0);
11638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11639 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11640
11641 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11642 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11643
11644 IEM_MC_PREPARE_FPU_USAGE();
11645 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11646 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
11647 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11648 } IEM_MC_ENDIF();
11649 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11650 } IEM_MC_ELSE() {
11651 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11652 } IEM_MC_ENDIF();
11653 IEM_MC_ADVANCE_RIP_AND_FINISH();
11654
11655 IEM_MC_END();
11656}
11657
11658
11659/** Opcode 0xdb 11/3. */
11660FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
11661{
11662 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
11663 IEM_MC_BEGIN(0, 1, 0, 0);
11664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11665 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11666
11667 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11668 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11669
11670 IEM_MC_PREPARE_FPU_USAGE();
11671 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11672 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF) {
11673 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11674 } IEM_MC_ENDIF();
11675 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11676 } IEM_MC_ELSE() {
11677 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11678 } IEM_MC_ENDIF();
11679 IEM_MC_ADVANCE_RIP_AND_FINISH();
11680
11681 IEM_MC_END();
11682}
11683
11684
11685/** Opcode 0xdb 0xe0. */
11686FNIEMOP_DEF(iemOp_fneni)
11687{
11688 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
11689 IEM_MC_BEGIN(0, 0, 0, 0);
11690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11691 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11692 IEM_MC_ADVANCE_RIP_AND_FINISH();
11693 IEM_MC_END();
11694}
11695
11696
11697/** Opcode 0xdb 0xe1. */
11698FNIEMOP_DEF(iemOp_fndisi)
11699{
11700 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
11701 IEM_MC_BEGIN(0, 0, 0, 0);
11702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11703 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11704 IEM_MC_ADVANCE_RIP_AND_FINISH();
11705 IEM_MC_END();
11706}
11707
11708
11709/** Opcode 0xdb 0xe2. */
11710FNIEMOP_DEF(iemOp_fnclex)
11711{
11712 IEMOP_MNEMONIC(fnclex, "fnclex");
11713 IEM_MC_BEGIN(0, 0, 0, 0);
11714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11715 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11716 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11717 IEM_MC_CLEAR_FSW_EX();
11718 IEM_MC_ADVANCE_RIP_AND_FINISH();
11719 IEM_MC_END();
11720}
11721
11722
11723/** Opcode 0xdb 0xe3. */
11724FNIEMOP_DEF(iemOp_fninit)
11725{
11726 IEMOP_MNEMONIC(fninit, "fninit");
11727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11728 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
11729 iemCImpl_finit, false /*fCheckXcpts*/);
11730}
11731
11732
11733/** Opcode 0xdb 0xe4. */
11734FNIEMOP_DEF(iemOp_fnsetpm)
11735{
11736 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
11737 IEM_MC_BEGIN(0, 0, 0, 0);
11738 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11739 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11740 IEM_MC_ADVANCE_RIP_AND_FINISH();
11741 IEM_MC_END();
11742}
11743
11744
11745/** Opcode 0xdb 0xe5. */
11746FNIEMOP_DEF(iemOp_frstpm)
11747{
11748 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
11749#if 0 /* #UDs on newer CPUs */
11750 IEM_MC_BEGIN(0, 0, 0, 0);
11751 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11752 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11753 IEM_MC_ADVANCE_RIP_AND_FINISH();
11754 IEM_MC_END();
11755 return VINF_SUCCESS;
11756#else
11757 IEMOP_RAISE_INVALID_OPCODE_RET();
11758#endif
11759}
11760
11761
11762/** Opcode 0xdb 11/5. */
11763FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
11764{
11765 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
11766 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
11767 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), true /*fUCmp*/,
11768 0 /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11769}
11770
11771
11772/** Opcode 0xdb 11/6. */
11773FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
11774{
11775 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
11776 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
11777 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
11778 false /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11779}
11780
11781
11782/**
11783 * @opcode 0xdb
11784 */
11785FNIEMOP_DEF(iemOp_EscF3)
11786{
11787 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11788 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
11789 if (IEM_IS_MODRM_REG_MODE(bRm))
11790 {
11791 switch (IEM_GET_MODRM_REG_8(bRm))
11792 {
11793 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
11794 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
11795 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
11796 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
11797 case 4:
11798 switch (bRm)
11799 {
11800 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
11801 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
11802 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
11803 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
11804 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
11805 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
11806 case 0xe6: IEMOP_RAISE_INVALID_OPCODE_RET();
11807 case 0xe7: IEMOP_RAISE_INVALID_OPCODE_RET();
11808 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11809 }
11810 break;
11811 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
11812 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
11813 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11814 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11815 }
11816 }
11817 else
11818 {
11819 switch (IEM_GET_MODRM_REG_8(bRm))
11820 {
11821 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
11822 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
11823 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
11824 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
11825 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
11826 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
11827 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
11828 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
11829 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11830 }
11831 }
11832}
11833
11834
11835/**
11836 * Common worker for FPU instructions working on STn and ST0, and storing the
11837 * result in STn unless IE, DE or ZE was raised.
11838 *
11839 * @param bRm Mod R/M byte.
11840 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11841 */
11842FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
11843{
11844 IEM_MC_BEGIN(3, 1, 0, 0);
11845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11846 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11847 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11848 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11849 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11850
11851 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11852 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11853
11854 IEM_MC_PREPARE_FPU_USAGE();
11855 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
11856 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
11857 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11858 } IEM_MC_ELSE() {
11859 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11860 } IEM_MC_ENDIF();
11861 IEM_MC_ADVANCE_RIP_AND_FINISH();
11862
11863 IEM_MC_END();
11864}
11865
11866
11867/** Opcode 0xdc 11/0. */
11868FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
11869{
11870 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
11871 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
11872}
11873
11874
11875/** Opcode 0xdc 11/1. */
11876FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
11877{
11878 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
11879 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
11880}
11881
11882
11883/** Opcode 0xdc 11/4. */
11884FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
11885{
11886 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
11887 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
11888}
11889
11890
11891/** Opcode 0xdc 11/5. */
11892FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
11893{
11894 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
11895 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
11896}
11897
11898
11899/** Opcode 0xdc 11/6. */
11900FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
11901{
11902 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
11903 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
11904}
11905
11906
11907/** Opcode 0xdc 11/7. */
11908FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
11909{
11910 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
11911 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
11912}
11913
11914
11915/**
11916 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
11917 * memory operand, and storing the result in ST0.
11918 *
11919 * @param bRm Mod R/M byte.
11920 * @param pfnImpl Pointer to the instruction implementation (assembly).
11921 */
11922FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
11923{
11924 IEM_MC_BEGIN(3, 3, 0, 0);
11925 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11926 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11927 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
11928 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11929 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
11930 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
11931
11932 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11934 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11935 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11936
11937 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11938 IEM_MC_PREPARE_FPU_USAGE();
11939 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0) {
11940 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
11941 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11942 } IEM_MC_ELSE() {
11943 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11944 } IEM_MC_ENDIF();
11945 IEM_MC_ADVANCE_RIP_AND_FINISH();
11946
11947 IEM_MC_END();
11948}
11949
11950
11951/** Opcode 0xdc !11/0. */
11952FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
11953{
11954 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
11955 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
11956}
11957
11958
11959/** Opcode 0xdc !11/1. */
11960FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
11961{
11962 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
11963 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
11964}
11965
11966
11967/** Opcode 0xdc !11/2. */
11968FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
11969{
11970 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
11971
11972 IEM_MC_BEGIN(3, 3, 0, 0);
11973 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11974 IEM_MC_LOCAL(uint16_t, u16Fsw);
11975 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
11976 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11977 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11978 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
11979
11980 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11982
11983 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11984 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11985 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11986
11987 IEM_MC_PREPARE_FPU_USAGE();
11988 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11989 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
11990 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11991 } IEM_MC_ELSE() {
11992 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11993 } IEM_MC_ENDIF();
11994 IEM_MC_ADVANCE_RIP_AND_FINISH();
11995
11996 IEM_MC_END();
11997}
11998
11999
12000/** Opcode 0xdc !11/3. */
12001FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
12002{
12003 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
12004
12005 IEM_MC_BEGIN(3, 3, 0, 0);
12006 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12007 IEM_MC_LOCAL(uint16_t, u16Fsw);
12008 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
12009 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12010 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12011 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
12012
12013 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12015
12016 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12017 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12018 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12019
12020 IEM_MC_PREPARE_FPU_USAGE();
12021 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12022 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
12023 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12024 } IEM_MC_ELSE() {
12025 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12026 } IEM_MC_ENDIF();
12027 IEM_MC_ADVANCE_RIP_AND_FINISH();
12028
12029 IEM_MC_END();
12030}
12031
12032
12033/** Opcode 0xdc !11/4. */
12034FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
12035{
12036 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
12037 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
12038}
12039
12040
12041/** Opcode 0xdc !11/5. */
12042FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
12043{
12044 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
12045 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
12046}
12047
12048
12049/** Opcode 0xdc !11/6. */
12050FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
12051{
12052 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
12053 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
12054}
12055
12056
12057/** Opcode 0xdc !11/7. */
12058FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
12059{
12060 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
12061 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
12062}
12063
12064
12065/**
12066 * @opcode 0xdc
12067 */
12068FNIEMOP_DEF(iemOp_EscF4)
12069{
12070 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12071 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
12072 if (IEM_IS_MODRM_REG_MODE(bRm))
12073 {
12074 switch (IEM_GET_MODRM_REG_8(bRm))
12075 {
12076 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
12077 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
12078 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
12079 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
12080 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
12081 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
12082 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
12083 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
12084 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12085 }
12086 }
12087 else
12088 {
12089 switch (IEM_GET_MODRM_REG_8(bRm))
12090 {
12091 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
12092 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
12093 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
12094 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
12095 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
12096 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
12097 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
12098 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
12099 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12100 }
12101 }
12102}
12103
12104
12105/** Opcode 0xdd !11/0.
12106 * @sa iemOp_fld_m32r */
12107FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
12108{
12109 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
12110
12111 IEM_MC_BEGIN(2, 3, 0, 0);
12112 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12113 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12114 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
12115 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12116 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
12117
12118 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12120 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12121 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12122
12123 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12124 IEM_MC_PREPARE_FPU_USAGE();
12125 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12126 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r64, pFpuRes, pr64Val);
12127 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12128 } IEM_MC_ELSE() {
12129 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12130 } IEM_MC_ENDIF();
12131 IEM_MC_ADVANCE_RIP_AND_FINISH();
12132
12133 IEM_MC_END();
12134}
12135
12136
12137/** Opcode 0xdd !11/0. */
12138FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
12139{
12140 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
12141 IEM_MC_BEGIN(3, 3, 0, 0);
12142 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12143 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12144
12145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12146 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12147 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12148 IEM_MC_PREPARE_FPU_USAGE();
12149
12150 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12151 IEM_MC_ARG(int64_t *, pi64Dst, 1);
12152 IEM_MC_MEM_MAP_I64_WO(pi64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12153
12154 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12155 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12156 IEM_MC_LOCAL(uint16_t, u16Fsw);
12157 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12158 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
12159 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12160 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12161 } IEM_MC_ELSE() {
12162 IEM_MC_IF_FCW_IM() {
12163 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
12164 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12165 } IEM_MC_ELSE() {
12166 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12167 } IEM_MC_ENDIF();
12168 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12169 } IEM_MC_ENDIF();
12170 IEM_MC_ADVANCE_RIP_AND_FINISH();
12171
12172 IEM_MC_END();
12173}
12174
12175
12176/** Opcode 0xdd !11/0. */
12177FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
12178{
12179 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
12180 IEM_MC_BEGIN(3, 3, 0, 0);
12181 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12182 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12183
12184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12185 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12186 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12187 IEM_MC_PREPARE_FPU_USAGE();
12188
12189 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12190 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
12191 IEM_MC_MEM_MAP_R64_WO(pr64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12192
12193 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12194 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12195 IEM_MC_LOCAL(uint16_t, u16Fsw);
12196 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12197 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
12198 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12199 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12200 } IEM_MC_ELSE() {
12201 IEM_MC_IF_FCW_IM() {
12202 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
12203 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12204 } IEM_MC_ELSE() {
12205 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12206 } IEM_MC_ENDIF();
12207 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12208 } IEM_MC_ENDIF();
12209 IEM_MC_ADVANCE_RIP_AND_FINISH();
12210
12211 IEM_MC_END();
12212}
12213
12214
12215
12216
12217/** Opcode 0xdd !11/0. */
12218FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
12219{
12220 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
12221 IEM_MC_BEGIN(3, 3, 0, 0);
12222 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12223 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12224
12225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12226 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12227 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12228 IEM_MC_PREPARE_FPU_USAGE();
12229
12230 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12231 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
12232 IEM_MC_MEM_MAP_R64_WO(pr64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12233
12234 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12235 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12236 IEM_MC_LOCAL(uint16_t, u16Fsw);
12237 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12238 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
12239 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12240 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12241 } IEM_MC_ELSE() {
12242 IEM_MC_IF_FCW_IM() {
12243 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
12244 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12245 } IEM_MC_ELSE() {
12246 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12247 } IEM_MC_ENDIF();
12248 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12249 } IEM_MC_ENDIF();
12250 IEM_MC_ADVANCE_RIP_AND_FINISH();
12251
12252 IEM_MC_END();
12253}
12254
12255
12256/** Opcode 0xdd !11/0. */
12257FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
12258{
12259 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
12260 IEM_MC_BEGIN(3, 0, 0, 0);
12261 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
12262 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12263
12264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12265 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12266 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12267
12268 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
12269 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
12270 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
12271 iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
12272 IEM_MC_END();
12273}
12274
12275
12276/** Opcode 0xdd !11/0. */
12277FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
12278{
12279 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
12280 IEM_MC_BEGIN(3, 0, 0, 0);
12281 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
12282 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12283
12284 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12285 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12286 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); /* Note! Implicit fninit after the save, do not use FOR_READ here! */
12287
12288 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
12289 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
12290 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, RT_BIT_64(kIemNativeGstReg_FpuFcw) | RT_BIT_64(kIemNativeGstReg_FpuFsw),
12291 iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
12292 IEM_MC_END();
12293}
12294
12295/** Opcode 0xdd !11/0. */
12296FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
12297{
12298 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
12299
12300 IEM_MC_BEGIN(0, 2, 0, 0);
12301 IEM_MC_LOCAL(uint16_t, u16Tmp);
12302 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12303
12304 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12306 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12307
12308 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
12309 IEM_MC_FETCH_FSW(u16Tmp);
12310 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
12311 IEM_MC_ADVANCE_RIP_AND_FINISH();
12312
12313/** @todo Debug / drop a hint to the verifier that things may differ
12314 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
12315 * NT4SP1. (X86_FSW_PE) */
12316 IEM_MC_END();
12317}
12318
12319
12320/** Opcode 0xdd 11/0. */
12321FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
12322{
12323 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
12324 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
12325 unmodified. */
12326 IEM_MC_BEGIN(0, 0, 0, 0);
12327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12328
12329 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12330 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12331
12332 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12333 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
12334 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12335
12336 IEM_MC_ADVANCE_RIP_AND_FINISH();
12337 IEM_MC_END();
12338}
12339
12340
12341/** Opcode 0xdd 11/1. */
12342FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
12343{
12344 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
12345 IEM_MC_BEGIN(0, 2, 0, 0);
12346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12347 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
12348 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12349 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12350 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12351
12352 IEM_MC_PREPARE_FPU_USAGE();
12353 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12354 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
12355 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
12356 } IEM_MC_ELSE() {
12357 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
12358 } IEM_MC_ENDIF();
12359
12360 IEM_MC_ADVANCE_RIP_AND_FINISH();
12361 IEM_MC_END();
12362}
12363
12364
12365/** Opcode 0xdd 11/3. */
12366FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
12367{
12368 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
12369 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
12370}
12371
12372
12373/** Opcode 0xdd 11/4. */
12374FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
12375{
12376 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
12377 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
12378}
12379
12380
12381/**
12382 * @opcode 0xdd
12383 */
12384FNIEMOP_DEF(iemOp_EscF5)
12385{
12386 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12387 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
12388 if (IEM_IS_MODRM_REG_MODE(bRm))
12389 {
12390 switch (IEM_GET_MODRM_REG_8(bRm))
12391 {
12392 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
12393 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
12394 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
12395 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
12396 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
12397 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
12398 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
12399 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
12400 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12401 }
12402 }
12403 else
12404 {
12405 switch (IEM_GET_MODRM_REG_8(bRm))
12406 {
12407 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
12408 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
12409 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
12410 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
12411 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
12412 case 5: IEMOP_RAISE_INVALID_OPCODE_RET();
12413 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
12414 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
12415 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12416 }
12417 }
12418}
12419
12420
12421/** Opcode 0xde 11/0. */
12422FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
12423{
12424 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
12425 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
12426}
12427
12428
12429/** Opcode 0xde 11/0. */
12430FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
12431{
12432 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
12433 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
12434}
12435
12436
12437/** Opcode 0xde 0xd9. */
12438FNIEMOP_DEF(iemOp_fcompp)
12439{
12440 IEMOP_MNEMONIC(fcompp, "fcompp");
12441 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fcom_r80_by_r80);
12442}
12443
12444
12445/** Opcode 0xde 11/4. */
12446FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
12447{
12448 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
12449 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
12450}
12451
12452
12453/** Opcode 0xde 11/5. */
12454FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
12455{
12456 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
12457 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
12458}
12459
12460
12461/** Opcode 0xde 11/6. */
12462FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
12463{
12464 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
12465 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
12466}
12467
12468
12469/** Opcode 0xde 11/7. */
12470FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
12471{
12472 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
12473 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
12474}
12475
12476
12477/**
12478 * Common worker for FPU instructions working on ST0 and an m16i, and storing
12479 * the result in ST0.
12480 *
12481 * @param bRm Mod R/M byte.
12482 * @param pfnAImpl Pointer to the instruction implementation (assembly).
12483 */
12484FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
12485{
12486 IEM_MC_BEGIN(3, 3, 0, 0);
12487 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12488 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12489 IEM_MC_LOCAL(int16_t, i16Val2);
12490 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12491 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12492 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
12493
12494 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12496
12497 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12498 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12499 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12500
12501 IEM_MC_PREPARE_FPU_USAGE();
12502 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12503 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
12504 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
12505 } IEM_MC_ELSE() {
12506 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
12507 } IEM_MC_ENDIF();
12508 IEM_MC_ADVANCE_RIP_AND_FINISH();
12509
12510 IEM_MC_END();
12511}
12512
12513
12514/** Opcode 0xde !11/0. */
12515FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
12516{
12517 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
12518 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
12519}
12520
12521
12522/** Opcode 0xde !11/1. */
12523FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
12524{
12525 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
12526 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
12527}
12528
12529
12530/** Opcode 0xde !11/2. */
12531FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
12532{
12533 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
12534
12535 IEM_MC_BEGIN(3, 3, 0, 0);
12536 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12537 IEM_MC_LOCAL(uint16_t, u16Fsw);
12538 IEM_MC_LOCAL(int16_t, i16Val2);
12539 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12540 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12541 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
12542
12543 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12545
12546 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12547 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12548 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12549
12550 IEM_MC_PREPARE_FPU_USAGE();
12551 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12552 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
12553 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12554 } IEM_MC_ELSE() {
12555 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12556 } IEM_MC_ENDIF();
12557 IEM_MC_ADVANCE_RIP_AND_FINISH();
12558
12559 IEM_MC_END();
12560}
12561
12562
12563/** Opcode 0xde !11/3. */
12564FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
12565{
12566 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
12567
12568 IEM_MC_BEGIN(3, 3, 0, 0);
12569 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12570 IEM_MC_LOCAL(uint16_t, u16Fsw);
12571 IEM_MC_LOCAL(int16_t, i16Val2);
12572 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
12573 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
12574 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
12575
12576 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12578
12579 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12580 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12581 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12582
12583 IEM_MC_PREPARE_FPU_USAGE();
12584 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
12585 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
12586 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12587 } IEM_MC_ELSE() {
12588 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12589 } IEM_MC_ENDIF();
12590 IEM_MC_ADVANCE_RIP_AND_FINISH();
12591
12592 IEM_MC_END();
12593}
12594
12595
12596/** Opcode 0xde !11/4. */
12597FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
12598{
12599 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
12600 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
12601}
12602
12603
12604/** Opcode 0xde !11/5. */
12605FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
12606{
12607 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
12608 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
12609}
12610
12611
12612/** Opcode 0xde !11/6. */
12613FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
12614{
12615 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
12616 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
12617}
12618
12619
12620/** Opcode 0xde !11/7. */
12621FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
12622{
12623 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
12624 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
12625}
12626
12627
12628/**
12629 * @opcode 0xde
12630 */
12631FNIEMOP_DEF(iemOp_EscF6)
12632{
12633 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12634 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
12635 if (IEM_IS_MODRM_REG_MODE(bRm))
12636 {
12637 switch (IEM_GET_MODRM_REG_8(bRm))
12638 {
12639 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
12640 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
12641 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
12642 case 3: if (bRm == 0xd9)
12643 return FNIEMOP_CALL(iemOp_fcompp);
12644 IEMOP_RAISE_INVALID_OPCODE_RET();
12645 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
12646 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
12647 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
12648 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
12649 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12650 }
12651 }
12652 else
12653 {
12654 switch (IEM_GET_MODRM_REG_8(bRm))
12655 {
12656 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
12657 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
12658 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
12659 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
12660 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
12661 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
12662 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
12663 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
12664 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12665 }
12666 }
12667}
12668
12669
12670/** Opcode 0xdf 11/0.
12671 * Undocument instruction, assumed to work like ffree + fincstp. */
12672FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
12673{
12674 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
12675 IEM_MC_BEGIN(0, 0, 0, 0);
12676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12677
12678 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12679 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12680
12681 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12682 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
12683 IEM_MC_FPU_STACK_INC_TOP();
12684 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12685
12686 IEM_MC_ADVANCE_RIP_AND_FINISH();
12687 IEM_MC_END();
12688}
12689
12690
12691/** Opcode 0xdf 0xe0. */
12692FNIEMOP_DEF(iemOp_fnstsw_ax)
12693{
12694 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
12695 IEM_MC_BEGIN(0, 1, 0, 0);
12696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12697 IEM_MC_LOCAL(uint16_t, u16Tmp);
12698 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12699 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
12700 IEM_MC_FETCH_FSW(u16Tmp);
12701 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
12702 IEM_MC_ADVANCE_RIP_AND_FINISH();
12703 IEM_MC_END();
12704}
12705
12706
12707/** Opcode 0xdf 11/5. */
12708FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
12709{
12710 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
12711 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
12712 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
12713 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
12714}
12715
12716
12717/** Opcode 0xdf 11/6. */
12718FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
12719{
12720 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
12721 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
12722 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
12723 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
12724}
12725
12726
12727/** Opcode 0xdf !11/0. */
12728FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
12729{
12730 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
12731
12732 IEM_MC_BEGIN(2, 3, 0, 0);
12733 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12734 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12735 IEM_MC_LOCAL(int16_t, i16Val);
12736 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12737 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
12738
12739 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12741
12742 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12743 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12744 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12745
12746 IEM_MC_PREPARE_FPU_USAGE();
12747 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12748 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i16, pFpuRes, pi16Val);
12749 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12750 } IEM_MC_ELSE() {
12751 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12752 } IEM_MC_ENDIF();
12753 IEM_MC_ADVANCE_RIP_AND_FINISH();
12754
12755 IEM_MC_END();
12756}
12757
12758
12759/** Opcode 0xdf !11/1. */
12760FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
12761{
12762 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
12763 IEM_MC_BEGIN(3, 3, 0, 0);
12764 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12766
12767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12768 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12769 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12770 IEM_MC_PREPARE_FPU_USAGE();
12771
12772 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12773 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12774 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12775
12776 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12777 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12778 IEM_MC_LOCAL(uint16_t, u16Fsw);
12779 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12780 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12781 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12782 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12783 } IEM_MC_ELSE() {
12784 IEM_MC_IF_FCW_IM() {
12785 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12786 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12787 } IEM_MC_ELSE() {
12788 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12789 } IEM_MC_ENDIF();
12790 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12791 } IEM_MC_ENDIF();
12792 IEM_MC_ADVANCE_RIP_AND_FINISH();
12793
12794 IEM_MC_END();
12795}
12796
12797
12798/** Opcode 0xdf !11/2. */
12799FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
12800{
12801 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
12802 IEM_MC_BEGIN(3, 3, 0, 0);
12803 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12804 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12805
12806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12807 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12808 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12809 IEM_MC_PREPARE_FPU_USAGE();
12810
12811 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12812 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12813 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12814
12815 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12816 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12817 IEM_MC_LOCAL(uint16_t, u16Fsw);
12818 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12819 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12820 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12821 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12822 } IEM_MC_ELSE() {
12823 IEM_MC_IF_FCW_IM() {
12824 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12825 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12826 } IEM_MC_ELSE() {
12827 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12828 } IEM_MC_ENDIF();
12829 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12830 } IEM_MC_ENDIF();
12831 IEM_MC_ADVANCE_RIP_AND_FINISH();
12832
12833 IEM_MC_END();
12834}
12835
12836
12837/** Opcode 0xdf !11/3. */
12838FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
12839{
12840 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
12841 IEM_MC_BEGIN(3, 3, 0, 0);
12842 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12843 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12844
12845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12846 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12847 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12848 IEM_MC_PREPARE_FPU_USAGE();
12849
12850 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12851 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12852 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12853
12854 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12855 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12856 IEM_MC_LOCAL(uint16_t, u16Fsw);
12857 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12858 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12859 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12860 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12861 } IEM_MC_ELSE() {
12862 IEM_MC_IF_FCW_IM() {
12863 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12864 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12865 } IEM_MC_ELSE() {
12866 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12867 } IEM_MC_ENDIF();
12868 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12869 } IEM_MC_ENDIF();
12870 IEM_MC_ADVANCE_RIP_AND_FINISH();
12871
12872 IEM_MC_END();
12873}
12874
12875
12876/** Opcode 0xdf !11/4. */
12877FNIEMOP_DEF_1(iemOp_fbld_m80d, uint8_t, bRm)
12878{
12879 IEMOP_MNEMONIC(fbld_m80d, "fbld m80d");
12880
12881 IEM_MC_BEGIN(2, 3, 0, 0);
12882 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12883 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12884 IEM_MC_LOCAL(RTPBCD80U, d80Val);
12885 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12886 IEM_MC_ARG_LOCAL_REF(PCRTPBCD80U, pd80Val, d80Val, 1);
12887
12888 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12890
12891 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12892 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12893 IEM_MC_FETCH_MEM_D80(d80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12894
12895 IEM_MC_PREPARE_FPU_USAGE();
12896 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12897 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_d80, pFpuRes, pd80Val);
12898 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12899 } IEM_MC_ELSE() {
12900 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12901 } IEM_MC_ENDIF();
12902 IEM_MC_ADVANCE_RIP_AND_FINISH();
12903
12904 IEM_MC_END();
12905}
12906
12907
12908/** Opcode 0xdf !11/5. */
12909FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
12910{
12911 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
12912
12913 IEM_MC_BEGIN(2, 3, 0, 0);
12914 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12915 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12916 IEM_MC_LOCAL(int64_t, i64Val);
12917 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12918 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
12919
12920 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12922
12923 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12924 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12925 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12926
12927 IEM_MC_PREPARE_FPU_USAGE();
12928 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12929 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i64, pFpuRes, pi64Val);
12930 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12931 } IEM_MC_ELSE() {
12932 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12933 } IEM_MC_ENDIF();
12934 IEM_MC_ADVANCE_RIP_AND_FINISH();
12935
12936 IEM_MC_END();
12937}
12938
12939
12940/** Opcode 0xdf !11/6. */
12941FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
12942{
12943 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
12944 IEM_MC_BEGIN(3, 3, 0, 0);
12945 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12946 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12947
12948 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12949 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12950 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12951 IEM_MC_PREPARE_FPU_USAGE();
12952
12953 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12954 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
12955 IEM_MC_MEM_MAP_D80_WO(pd80Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12956
12957 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12958 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12959 IEM_MC_LOCAL(uint16_t, u16Fsw);
12960 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12961 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
12962 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12963 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12964 } IEM_MC_ELSE() {
12965 IEM_MC_IF_FCW_IM() {
12966 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
12967 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12968 } IEM_MC_ELSE() {
12969 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12970 } IEM_MC_ENDIF();
12971 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12972 } IEM_MC_ENDIF();
12973 IEM_MC_ADVANCE_RIP_AND_FINISH();
12974
12975 IEM_MC_END();
12976}
12977
12978
12979/** Opcode 0xdf !11/7. */
12980FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
12981{
12982 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
12983 IEM_MC_BEGIN(3, 3, 0, 0);
12984 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12985 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12986
12987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12988 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12989 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12990 IEM_MC_PREPARE_FPU_USAGE();
12991
12992 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12993 IEM_MC_ARG(int64_t *, pi64Dst, 1);
12994 IEM_MC_MEM_MAP_I64_WO(pi64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12995
12996 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12997 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12998 IEM_MC_LOCAL(uint16_t, u16Fsw);
12999 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
13000 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
13001 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
13002 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13003 } IEM_MC_ELSE() {
13004 IEM_MC_IF_FCW_IM() {
13005 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
13006 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
13007 } IEM_MC_ELSE() {
13008 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
13009 } IEM_MC_ENDIF();
13010 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
13011 } IEM_MC_ENDIF();
13012 IEM_MC_ADVANCE_RIP_AND_FINISH();
13013
13014 IEM_MC_END();
13015}
13016
13017
13018/**
13019 * @opcode 0xdf
13020 */
13021FNIEMOP_DEF(iemOp_EscF7)
13022{
13023 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13024 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdf & 0x7);
13025 if (IEM_IS_MODRM_REG_MODE(bRm))
13026 {
13027 switch (IEM_GET_MODRM_REG_8(bRm))
13028 {
13029 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
13030 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
13031 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
13032 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
13033 case 4: if (bRm == 0xe0)
13034 return FNIEMOP_CALL(iemOp_fnstsw_ax);
13035 IEMOP_RAISE_INVALID_OPCODE_RET();
13036 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
13037 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
13038 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
13039 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13040 }
13041 }
13042 else
13043 {
13044 switch (IEM_GET_MODRM_REG_8(bRm))
13045 {
13046 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
13047 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
13048 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
13049 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
13050 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
13051 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
13052 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
13053 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
13054 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13055 }
13056 }
13057}
13058
13059
13060/**
13061 * @opcode 0xe0
13062 * @opfltest zf
13063 */
13064FNIEMOP_DEF(iemOp_loopne_Jb)
13065{
13066 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
13067 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13068 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13069
13070 switch (pVCpu->iem.s.enmEffAddrMode)
13071 {
13072 case IEMMODE_16BIT:
13073 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13075 IEM_MC_IF_CX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
13076 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13077 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13078 } IEM_MC_ELSE() {
13079 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13080 IEM_MC_ADVANCE_RIP_AND_FINISH();
13081 } IEM_MC_ENDIF();
13082 IEM_MC_END();
13083 break;
13084
13085 case IEMMODE_32BIT:
13086 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13088 IEM_MC_IF_ECX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
13089 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13090 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13091 } IEM_MC_ELSE() {
13092 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13093 IEM_MC_ADVANCE_RIP_AND_FINISH();
13094 } IEM_MC_ENDIF();
13095 IEM_MC_END();
13096 break;
13097
13098 case IEMMODE_64BIT:
13099 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13101 IEM_MC_IF_RCX_IS_NOT_ONE_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
13102 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13103 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13104 } IEM_MC_ELSE() {
13105 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13106 IEM_MC_ADVANCE_RIP_AND_FINISH();
13107 } IEM_MC_ENDIF();
13108 IEM_MC_END();
13109 break;
13110
13111 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13112 }
13113}
13114
13115
13116/**
13117 * @opcode 0xe1
13118 * @opfltest zf
13119 */
13120FNIEMOP_DEF(iemOp_loope_Jb)
13121{
13122 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
13123 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13124 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13125
13126 switch (pVCpu->iem.s.enmEffAddrMode)
13127 {
13128 case IEMMODE_16BIT:
13129 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13131 IEM_MC_IF_CX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
13132 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13133 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13134 } IEM_MC_ELSE() {
13135 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13136 IEM_MC_ADVANCE_RIP_AND_FINISH();
13137 } IEM_MC_ENDIF();
13138 IEM_MC_END();
13139 break;
13140
13141 case IEMMODE_32BIT:
13142 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13144 IEM_MC_IF_ECX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
13145 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13146 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13147 } IEM_MC_ELSE() {
13148 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13149 IEM_MC_ADVANCE_RIP_AND_FINISH();
13150 } IEM_MC_ENDIF();
13151 IEM_MC_END();
13152 break;
13153
13154 case IEMMODE_64BIT:
13155 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13157 IEM_MC_IF_RCX_IS_NOT_ONE_AND_EFL_BIT_SET(X86_EFL_ZF) {
13158 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13159 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13160 } IEM_MC_ELSE() {
13161 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13162 IEM_MC_ADVANCE_RIP_AND_FINISH();
13163 } IEM_MC_ENDIF();
13164 IEM_MC_END();
13165 break;
13166
13167 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13168 }
13169}
13170
13171
13172/**
13173 * @opcode 0xe2
13174 */
13175FNIEMOP_DEF(iemOp_loop_Jb)
13176{
13177 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
13178 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13179 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13180
13181 /** @todo Check out the \#GP case if EIP < CS.Base or EIP > CS.Limit when
13182 * using the 32-bit operand size override. How can that be restarted? See
13183 * weird pseudo code in intel manual. */
13184
13185 /* NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
13186 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
13187 * the loop causes guest crashes, but when logging it's nice to skip a few million
13188 * lines of useless output. */
13189#if defined(LOG_ENABLED)
13190 if ((LogIs3Enabled() || LogIs4Enabled()) && -(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm)
13191 switch (pVCpu->iem.s.enmEffAddrMode)
13192 {
13193 case IEMMODE_16BIT:
13194 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13196 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
13197 IEM_MC_ADVANCE_RIP_AND_FINISH();
13198 IEM_MC_END();
13199 break;
13200
13201 case IEMMODE_32BIT:
13202 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13203 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13204 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
13205 IEM_MC_ADVANCE_RIP_AND_FINISH();
13206 IEM_MC_END();
13207 break;
13208
13209 case IEMMODE_64BIT:
13210 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13212 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
13213 IEM_MC_ADVANCE_RIP_AND_FINISH();
13214 IEM_MC_END();
13215 break;
13216
13217 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13218 }
13219#endif
13220
13221 switch (pVCpu->iem.s.enmEffAddrMode)
13222 {
13223 case IEMMODE_16BIT:
13224 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13226 IEM_MC_IF_CX_IS_NOT_ONE() {
13227 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
13228 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13229 } IEM_MC_ELSE() {
13230 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
13231 IEM_MC_ADVANCE_RIP_AND_FINISH();
13232 } IEM_MC_ENDIF();
13233 IEM_MC_END();
13234 break;
13235
13236 case IEMMODE_32BIT:
13237 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13239 IEM_MC_IF_ECX_IS_NOT_ONE() {
13240 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
13241 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13242 } IEM_MC_ELSE() {
13243 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
13244 IEM_MC_ADVANCE_RIP_AND_FINISH();
13245 } IEM_MC_ENDIF();
13246 IEM_MC_END();
13247 break;
13248
13249 case IEMMODE_64BIT:
13250 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13252 IEM_MC_IF_RCX_IS_NOT_ONE() {
13253 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
13254 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13255 } IEM_MC_ELSE() {
13256 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
13257 IEM_MC_ADVANCE_RIP_AND_FINISH();
13258 } IEM_MC_ENDIF();
13259 IEM_MC_END();
13260 break;
13261
13262 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13263 }
13264}
13265
13266
13267/**
13268 * @opcode 0xe3
13269 */
13270FNIEMOP_DEF(iemOp_jecxz_Jb)
13271{
13272 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
13273 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13274 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
13275
13276 switch (pVCpu->iem.s.enmEffAddrMode)
13277 {
13278 case IEMMODE_16BIT:
13279 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
13280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13281 IEM_MC_IF_CX_IS_NZ() {
13282 IEM_MC_ADVANCE_RIP_AND_FINISH();
13283 } IEM_MC_ELSE() {
13284 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13285 } IEM_MC_ENDIF();
13286 IEM_MC_END();
13287 break;
13288
13289 case IEMMODE_32BIT:
13290 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13292 IEM_MC_IF_ECX_IS_NZ() {
13293 IEM_MC_ADVANCE_RIP_AND_FINISH();
13294 } IEM_MC_ELSE() {
13295 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13296 } IEM_MC_ENDIF();
13297 IEM_MC_END();
13298 break;
13299
13300 case IEMMODE_64BIT:
13301 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
13302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13303 IEM_MC_IF_RCX_IS_NZ() {
13304 IEM_MC_ADVANCE_RIP_AND_FINISH();
13305 } IEM_MC_ELSE() {
13306 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13307 } IEM_MC_ENDIF();
13308 IEM_MC_END();
13309 break;
13310
13311 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13312 }
13313}
13314
13315
13316/**
13317 * @opcode 0xe4
13318 * @opfltest iopl
13319 */
13320FNIEMOP_DEF(iemOp_in_AL_Ib)
13321{
13322 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
13323 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13325 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13326 iemCImpl_in, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13327}
13328
13329
13330/**
13331 * @opcode 0xe5
13332 * @opfltest iopl
13333 */
13334FNIEMOP_DEF(iemOp_in_eAX_Ib)
13335{
13336 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
13337 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13339 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13340 iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
13341 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13342}
13343
13344
13345/**
13346 * @opcode 0xe6
13347 * @opfltest iopl
13348 */
13349FNIEMOP_DEF(iemOp_out_Ib_AL)
13350{
13351 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
13352 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13354 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
13355 iemCImpl_out, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13356}
13357
13358
13359/**
13360 * @opcode 0xe7
13361 * @opfltest iopl
13362 */
13363FNIEMOP_DEF(iemOp_out_Ib_eAX)
13364{
13365 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
13366 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13368 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
13369 iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
13370 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
13371}
13372
13373
13374/**
13375 * @opcode 0xe8
13376 */
13377FNIEMOP_DEF(iemOp_call_Jv)
13378{
13379 IEMOP_MNEMONIC(call_Jv, "call Jv");
13380 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13381 switch (pVCpu->iem.s.enmEffOpSize)
13382 {
13383 case IEMMODE_16BIT:
13384 {
13385 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13386 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
13387 iemCImpl_call_rel_16, (int16_t)u16Imm);
13388 }
13389
13390 case IEMMODE_32BIT:
13391 {
13392 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13393 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
13394 iemCImpl_call_rel_32, (int32_t)u32Imm);
13395 }
13396
13397 case IEMMODE_64BIT:
13398 {
13399 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13400 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
13401 iemCImpl_call_rel_64, u64Imm);
13402 }
13403
13404 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13405 }
13406}
13407
13408
13409/**
13410 * @opcode 0xe9
13411 */
13412FNIEMOP_DEF(iemOp_jmp_Jv)
13413{
13414 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
13415 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13416 switch (pVCpu->iem.s.enmEffOpSize)
13417 {
13418 case IEMMODE_16BIT:
13419 IEM_MC_BEGIN(0, 0, 0, 0);
13420 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
13421 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13422 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
13423 IEM_MC_END();
13424 break;
13425
13426 case IEMMODE_64BIT:
13427 case IEMMODE_32BIT:
13428 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
13429 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
13430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13431 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
13432 IEM_MC_END();
13433 break;
13434
13435 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13436 }
13437}
13438
13439
13440/**
13441 * @opcode 0xea
13442 */
13443FNIEMOP_DEF(iemOp_jmp_Ap)
13444{
13445 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
13446 IEMOP_HLP_NO_64BIT();
13447
13448 /* Decode the far pointer address and pass it on to the far call C implementation. */
13449 uint32_t off32Seg;
13450 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
13451 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
13452 else
13453 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
13454 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
13455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13456 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
13457 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, UINT64_MAX,
13458 iemCImpl_FarJmp, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
13459 /** @todo make task-switches, ring-switches, ++ return non-zero status */
13460}
13461
13462
13463/**
13464 * @opcode 0xeb
13465 */
13466FNIEMOP_DEF(iemOp_jmp_Jb)
13467{
13468 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
13469 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
13470 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13471
13472 IEM_MC_BEGIN(0, 0, 0, 0);
13473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13474 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
13475 IEM_MC_END();
13476}
13477
13478
13479/**
13480 * @opcode 0xec
13481 * @opfltest iopl
13482 */
13483FNIEMOP_DEF(iemOp_in_AL_DX)
13484{
13485 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
13486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13487 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
13488 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13489 iemCImpl_in_eAX_DX, 1, pVCpu->iem.s.enmEffAddrMode);
13490}
13491
13492
13493/**
13494 * @opcode 0xed
13495 * @opfltest iopl
13496 */
13497FNIEMOP_DEF(iemOp_in_eAX_DX)
13498{
13499 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
13500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13501 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
13502 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
13503 iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
13504 pVCpu->iem.s.enmEffAddrMode);
13505}
13506
13507
13508/**
13509 * @opcode 0xee
13510 * @opfltest iopl
13511 */
13512FNIEMOP_DEF(iemOp_out_DX_AL)
13513{
13514 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
13515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13516 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
13517 iemCImpl_out_DX_eAX, 1, pVCpu->iem.s.enmEffAddrMode);
13518}
13519
13520
13521/**
13522 * @opcode 0xef
13523 * @opfltest iopl
13524 */
13525FNIEMOP_DEF(iemOp_out_DX_eAX)
13526{
13527 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
13528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13529 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
13530 iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
13531 pVCpu->iem.s.enmEffAddrMode);
13532}
13533
13534
13535/**
13536 * @opcode 0xf0
13537 */
13538FNIEMOP_DEF(iemOp_lock)
13539{
13540 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
13541 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
13542
13543 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
13544 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
13545}
13546
13547
13548/**
13549 * @opcode 0xf1
13550 */
13551FNIEMOP_DEF(iemOp_int1)
13552{
13553 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
13554 /** @todo Does not generate \#UD on 286, or so they say... Was allegedly a
13555 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
13556 * LOADALL memo. Needs some testing. */
13557 IEMOP_HLP_MIN_386();
13558 /** @todo testcase! */
13559 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
13560 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
13561 iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
13562}
13563
13564
13565/**
13566 * @opcode 0xf2
13567 */
13568FNIEMOP_DEF(iemOp_repne)
13569{
13570 /* This overrides any previous REPE prefix. */
13571 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
13572 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
13573 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
13574
13575 /* For the 4 entry opcode tables, REPNZ overrides any previous
13576 REPZ and operand size prefixes. */
13577 pVCpu->iem.s.idxPrefix = 3;
13578
13579 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
13580 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
13581}
13582
13583
13584/**
13585 * @opcode 0xf3
13586 */
13587FNIEMOP_DEF(iemOp_repe)
13588{
13589 /* This overrides any previous REPNE prefix. */
13590 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
13591 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
13592 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
13593
13594 /* For the 4 entry opcode tables, REPNZ overrides any previous
13595 REPNZ and operand size prefixes. */
13596 pVCpu->iem.s.idxPrefix = 2;
13597
13598 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
13599 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
13600}
13601
13602
13603/**
13604 * @opcode 0xf4
13605 */
13606FNIEMOP_DEF(iemOp_hlt)
13607{
13608 IEMOP_MNEMONIC(hlt, "hlt");
13609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13610 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, 0, iemCImpl_hlt);
13611}
13612
13613
13614/**
13615 * @opcode 0xf5
13616 * @opflmodify cf
13617 */
13618FNIEMOP_DEF(iemOp_cmc)
13619{
13620 IEMOP_MNEMONIC(cmc, "cmc");
13621 IEM_MC_BEGIN(0, 0, 0, 0);
13622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13623 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
13624 IEM_MC_ADVANCE_RIP_AND_FINISH();
13625 IEM_MC_END();
13626}
13627
13628
13629/**
13630 * Body for of 'inc/dec/not/neg Eb'.
13631 */
13632#define IEMOP_BODY_UNARY_Eb(a_bRm, a_fnNormalU8, a_fnLockedU8) \
13633 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
13634 { \
13635 /* register access */ \
13636 IEM_MC_BEGIN(2, 0, 0, 0); \
13637 IEMOP_HLP_DONE_DECODING(); \
13638 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
13639 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13640 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
13641 IEM_MC_REF_EFLAGS(pEFlags); \
13642 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
13643 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13644 IEM_MC_END(); \
13645 } \
13646 else \
13647 { \
13648 /* memory access. */ \
13649 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
13650 { \
13651 IEM_MC_BEGIN(2, 2, 0, 0); \
13652 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
13653 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13654 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13655 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13656 \
13657 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
13658 IEMOP_HLP_DONE_DECODING(); \
13659 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13660 IEM_MC_FETCH_EFLAGS(EFlags); \
13661 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
13662 \
13663 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13664 IEM_MC_COMMIT_EFLAGS(EFlags); \
13665 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13666 IEM_MC_END(); \
13667 } \
13668 else \
13669 { \
13670 IEM_MC_BEGIN(2, 2, 0, 0); \
13671 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
13672 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13673 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13674 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13675 \
13676 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
13677 IEMOP_HLP_DONE_DECODING(); \
13678 IEM_MC_MEM_MAP_U8_ATOMIC(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13679 IEM_MC_FETCH_EFLAGS(EFlags); \
13680 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU8, pu8Dst, pEFlags); \
13681 \
13682 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
13683 IEM_MC_COMMIT_EFLAGS(EFlags); \
13684 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13685 IEM_MC_END(); \
13686 } \
13687 } \
13688 (void)0
13689
13690
13691/**
13692 * Body for 'inc/dec/not/neg Ev' (groups 3 and 5).
13693 */
13694#define IEMOP_BODY_UNARY_Ev(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
13695 if (IEM_IS_MODRM_REG_MODE(bRm)) \
13696 { \
13697 /* \
13698 * Register target \
13699 */ \
13700 switch (pVCpu->iem.s.enmEffOpSize) \
13701 { \
13702 case IEMMODE_16BIT: \
13703 IEM_MC_BEGIN(2, 0, 0, 0); \
13704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13705 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13706 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13707 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13708 IEM_MC_REF_EFLAGS(pEFlags); \
13709 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
13710 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13711 IEM_MC_END(); \
13712 break; \
13713 \
13714 case IEMMODE_32BIT: \
13715 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0); \
13716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13717 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13718 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13719 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13720 IEM_MC_REF_EFLAGS(pEFlags); \
13721 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
13722 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
13723 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13724 IEM_MC_END(); \
13725 break; \
13726 \
13727 case IEMMODE_64BIT: \
13728 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0); \
13729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13730 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13731 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13732 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13733 IEM_MC_REF_EFLAGS(pEFlags); \
13734 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
13735 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13736 IEM_MC_END(); \
13737 break; \
13738 \
13739 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13740 } \
13741 } \
13742 else \
13743 { \
13744 /* \
13745 * Memory target. \
13746 */ \
13747 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
13748 { \
13749 switch (pVCpu->iem.s.enmEffOpSize) \
13750 { \
13751 case IEMMODE_16BIT: \
13752 IEM_MC_BEGIN(2, 3, 0, 0); \
13753 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13754 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13755 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13756 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13757 \
13758 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13759 IEMOP_HLP_DONE_DECODING(); \
13760 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13761 IEM_MC_FETCH_EFLAGS(EFlags); \
13762 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
13763 \
13764 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13765 IEM_MC_COMMIT_EFLAGS(EFlags); \
13766 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13767 IEM_MC_END(); \
13768 break; \
13769 \
13770 case IEMMODE_32BIT: \
13771 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386, 0); \
13772 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13773 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13774 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13775 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13776 \
13777 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13778 IEMOP_HLP_DONE_DECODING(); \
13779 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13780 IEM_MC_FETCH_EFLAGS(EFlags); \
13781 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
13782 \
13783 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13784 IEM_MC_COMMIT_EFLAGS(EFlags); \
13785 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13786 IEM_MC_END(); \
13787 break; \
13788 \
13789 case IEMMODE_64BIT: \
13790 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT, 0); \
13791 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13792 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13793 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13794 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13795 \
13796 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13797 IEMOP_HLP_DONE_DECODING(); \
13798 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13799 IEM_MC_FETCH_EFLAGS(EFlags); \
13800 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
13801 \
13802 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13803 IEM_MC_COMMIT_EFLAGS(EFlags); \
13804 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13805 IEM_MC_END(); \
13806 break; \
13807 \
13808 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13809 } \
13810 } \
13811 else \
13812 { \
13813 (void)0
13814
13815#define IEMOP_BODY_UNARY_Ev_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
13816 switch (pVCpu->iem.s.enmEffOpSize) \
13817 { \
13818 case IEMMODE_16BIT: \
13819 IEM_MC_BEGIN(2, 3, 0, 0); \
13820 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13821 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13822 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13823 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13824 \
13825 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13826 IEMOP_HLP_DONE_DECODING(); \
13827 IEM_MC_MEM_MAP_U16_ATOMIC(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13828 IEM_MC_FETCH_EFLAGS(EFlags); \
13829 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU16, pu16Dst, pEFlags); \
13830 \
13831 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
13832 IEM_MC_COMMIT_EFLAGS(EFlags); \
13833 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13834 IEM_MC_END(); \
13835 break; \
13836 \
13837 case IEMMODE_32BIT: \
13838 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386, 0); \
13839 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13840 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13841 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13842 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13843 \
13844 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13845 IEMOP_HLP_DONE_DECODING(); \
13846 IEM_MC_MEM_MAP_U32_ATOMIC(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13847 IEM_MC_FETCH_EFLAGS(EFlags); \
13848 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU32, pu32Dst, pEFlags); \
13849 \
13850 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
13851 IEM_MC_COMMIT_EFLAGS(EFlags); \
13852 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13853 IEM_MC_END(); \
13854 break; \
13855 \
13856 case IEMMODE_64BIT: \
13857 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT, 0); \
13858 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13859 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13860 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13861 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13862 \
13863 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13864 IEMOP_HLP_DONE_DECODING(); \
13865 IEM_MC_MEM_MAP_U64_ATOMIC(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13866 IEM_MC_FETCH_EFLAGS(EFlags); \
13867 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU64, pu64Dst, pEFlags); \
13868 \
13869 IEM_MC_MEM_COMMIT_AND_UNMAP_ATOMIC(bUnmapInfo); \
13870 IEM_MC_COMMIT_EFLAGS(EFlags); \
13871 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13872 IEM_MC_END(); \
13873 break; \
13874 \
13875 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13876 } \
13877 } \
13878 } \
13879 (void)0
13880
13881
13882/**
13883 * @opmaps grp3_f6
13884 * @opcode /0
13885 * @opflclass logical
13886 * @todo also /1
13887 */
13888FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
13889{
13890 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
13891 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
13892
13893 if (IEM_IS_MODRM_REG_MODE(bRm))
13894 {
13895 /* register access */
13896 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13897 IEM_MC_BEGIN(3, 0, 0, 0);
13898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13899 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13900 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
13901 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13902 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13903 IEM_MC_REF_EFLAGS(pEFlags);
13904 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
13905 IEM_MC_ADVANCE_RIP_AND_FINISH();
13906 IEM_MC_END();
13907 }
13908 else
13909 {
13910 /* memory access. */
13911 IEM_MC_BEGIN(3, 3, 0, 0);
13912 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13913 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13914
13915 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13917
13918 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13919 IEM_MC_ARG(uint8_t const *, pu8Dst, 0);
13920 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13921
13922 IEM_MC_ARG_CONST(uint8_t, u8Src, u8Imm, 1);
13923 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13924 IEM_MC_FETCH_EFLAGS(EFlags);
13925 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
13926
13927 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
13928 IEM_MC_COMMIT_EFLAGS(EFlags);
13929 IEM_MC_ADVANCE_RIP_AND_FINISH();
13930 IEM_MC_END();
13931 }
13932}
13933
13934
13935/* Body for opcode 0xf6 variations /4, /5, /6 and /7. */
13936#define IEMOP_GRP3_MUL_DIV_EB(bRm, a_pfnU8Expr) \
13937 PFNIEMAIMPLMULDIVU8 const pfnU8 = (a_pfnU8Expr); \
13938 if (IEM_IS_MODRM_REG_MODE(bRm)) \
13939 { \
13940 /* register access */ \
13941 IEM_MC_BEGIN(3, 1, 0, 0); \
13942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13943 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
13944 IEM_MC_ARG(uint8_t, u8Value, 1); \
13945 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
13946 IEM_MC_LOCAL(int32_t, rc); \
13947 \
13948 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13949 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
13950 IEM_MC_REF_EFLAGS(pEFlags); \
13951 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags); \
13952 IEM_MC_IF_LOCAL_IS_Z(rc) { \
13953 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13954 } IEM_MC_ELSE() { \
13955 IEM_MC_RAISE_DIVIDE_ERROR(); \
13956 } IEM_MC_ENDIF(); \
13957 \
13958 IEM_MC_END(); \
13959 } \
13960 else \
13961 { \
13962 /* memory access. */ \
13963 IEM_MC_BEGIN(3, 2, 0, 0); \
13964 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
13965 IEM_MC_ARG(uint8_t, u8Value, 1); \
13966 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
13967 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13968 IEM_MC_LOCAL(int32_t, rc); \
13969 \
13970 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13971 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13972 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13973 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
13974 IEM_MC_REF_EFLAGS(pEFlags); \
13975 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags); \
13976 IEM_MC_IF_LOCAL_IS_Z(rc) { \
13977 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13978 } IEM_MC_ELSE() { \
13979 IEM_MC_RAISE_DIVIDE_ERROR(); \
13980 } IEM_MC_ENDIF(); \
13981 \
13982 IEM_MC_END(); \
13983 } (void)0
13984
13985
13986/* Body for opcode 0xf7 variant /4, /5, /6 and /7. */
13987#define IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, a_pImplExpr) \
13988 PCIEMOPMULDIVSIZES const pImpl = (a_pImplExpr); \
13989 if (IEM_IS_MODRM_REG_MODE(bRm)) \
13990 { \
13991 /* register access */ \
13992 switch (pVCpu->iem.s.enmEffOpSize) \
13993 { \
13994 case IEMMODE_16BIT: \
13995 IEM_MC_BEGIN(4, 1, 0, 0); \
13996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13997 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
13998 IEM_MC_ARG(uint16_t *, pu16DX, 1); \
13999 IEM_MC_ARG(uint16_t, u16Value, 2); \
14000 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14001 IEM_MC_LOCAL(int32_t, rc); \
14002 \
14003 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14004 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
14005 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX); \
14006 IEM_MC_REF_EFLAGS(pEFlags); \
14007 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags); \
14008 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14009 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14010 } IEM_MC_ELSE() { \
14011 IEM_MC_RAISE_DIVIDE_ERROR(); \
14012 } IEM_MC_ENDIF(); \
14013 \
14014 IEM_MC_END(); \
14015 break; \
14016 \
14017 case IEMMODE_32BIT: \
14018 IEM_MC_BEGIN(4, 1, IEM_MC_F_MIN_386, 0); \
14019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14020 IEM_MC_ARG(uint32_t *, pu32AX, 0); \
14021 IEM_MC_ARG(uint32_t *, pu32DX, 1); \
14022 IEM_MC_ARG(uint32_t, u32Value, 2); \
14023 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14024 IEM_MC_LOCAL(int32_t, rc); \
14025 \
14026 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14027 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX); \
14028 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX); \
14029 IEM_MC_REF_EFLAGS(pEFlags); \
14030 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags); \
14031 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14032 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
14033 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX); \
14034 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14035 } IEM_MC_ELSE() { \
14036 IEM_MC_RAISE_DIVIDE_ERROR(); \
14037 } IEM_MC_ENDIF(); \
14038 \
14039 IEM_MC_END(); \
14040 break; \
14041 \
14042 case IEMMODE_64BIT: \
14043 IEM_MC_BEGIN(4, 1, IEM_MC_F_64BIT, 0); \
14044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14045 IEM_MC_ARG(uint64_t *, pu64AX, 0); \
14046 IEM_MC_ARG(uint64_t *, pu64DX, 1); \
14047 IEM_MC_ARG(uint64_t, u64Value, 2); \
14048 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14049 IEM_MC_LOCAL(int32_t, rc); \
14050 \
14051 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
14052 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX); \
14053 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX); \
14054 IEM_MC_REF_EFLAGS(pEFlags); \
14055 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags); \
14056 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14057 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14058 } IEM_MC_ELSE() { \
14059 IEM_MC_RAISE_DIVIDE_ERROR(); \
14060 } IEM_MC_ENDIF(); \
14061 \
14062 IEM_MC_END(); \
14063 break; \
14064 \
14065 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14066 } \
14067 } \
14068 else \
14069 { \
14070 /* memory access. */ \
14071 switch (pVCpu->iem.s.enmEffOpSize) \
14072 { \
14073 case IEMMODE_16BIT: \
14074 IEM_MC_BEGIN(4, 2, 0, 0); \
14075 IEM_MC_ARG(uint16_t *, pu16AX, 0); \
14076 IEM_MC_ARG(uint16_t *, pu16DX, 1); \
14077 IEM_MC_ARG(uint16_t, u16Value, 2); \
14078 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14079 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14080 IEM_MC_LOCAL(int32_t, rc); \
14081 \
14082 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14084 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14085 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX); \
14086 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX); \
14087 IEM_MC_REF_EFLAGS(pEFlags); \
14088 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags); \
14089 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14090 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14091 } IEM_MC_ELSE() { \
14092 IEM_MC_RAISE_DIVIDE_ERROR(); \
14093 } IEM_MC_ENDIF(); \
14094 \
14095 IEM_MC_END(); \
14096 break; \
14097 \
14098 case IEMMODE_32BIT: \
14099 IEM_MC_BEGIN(4, 2, IEM_MC_F_MIN_386, 0); \
14100 IEM_MC_ARG(uint32_t *, pu32AX, 0); \
14101 IEM_MC_ARG(uint32_t *, pu32DX, 1); \
14102 IEM_MC_ARG(uint32_t, u32Value, 2); \
14103 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14104 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14105 IEM_MC_LOCAL(int32_t, rc); \
14106 \
14107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14109 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14110 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX); \
14111 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX); \
14112 IEM_MC_REF_EFLAGS(pEFlags); \
14113 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags); \
14114 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14115 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
14116 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX); \
14117 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14118 } IEM_MC_ELSE() { \
14119 IEM_MC_RAISE_DIVIDE_ERROR(); \
14120 } IEM_MC_ENDIF(); \
14121 \
14122 IEM_MC_END(); \
14123 break; \
14124 \
14125 case IEMMODE_64BIT: \
14126 IEM_MC_BEGIN(4, 2, IEM_MC_F_64BIT, 0); \
14127 IEM_MC_ARG(uint64_t *, pu64AX, 0); \
14128 IEM_MC_ARG(uint64_t *, pu64DX, 1); \
14129 IEM_MC_ARG(uint64_t, u64Value, 2); \
14130 IEM_MC_ARG(uint32_t *, pEFlags, 3); \
14131 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
14132 IEM_MC_LOCAL(int32_t, rc); \
14133 \
14134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
14135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14136 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
14137 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX); \
14138 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX); \
14139 IEM_MC_REF_EFLAGS(pEFlags); \
14140 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags); \
14141 IEM_MC_IF_LOCAL_IS_Z(rc) { \
14142 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
14143 } IEM_MC_ELSE() { \
14144 IEM_MC_RAISE_DIVIDE_ERROR(); \
14145 } IEM_MC_ENDIF(); \
14146 \
14147 IEM_MC_END(); \
14148 break; \
14149 \
14150 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14151 } \
14152 } (void)0
14153
14154
14155/**
14156 * @opmaps grp3_f6
14157 * @opcode /2
14158 * @opflclass unchanged
14159 */
14160FNIEMOP_DEF_1(iemOp_grp3_not_Eb, uint8_t, bRm)
14161{
14162/** @todo does not modify EFLAGS. */
14163 IEMOP_MNEMONIC(not_Eb, "not Eb");
14164 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_not_u8, iemAImpl_not_u8_locked);
14165}
14166
14167
14168/**
14169 * @opmaps grp3_f6
14170 * @opcode /3
14171 * @opflclass arithmetic
14172 */
14173FNIEMOP_DEF_1(iemOp_grp3_neg_Eb, uint8_t, bRm)
14174{
14175 IEMOP_MNEMONIC(net_Eb, "neg Eb");
14176 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_neg_u8, iemAImpl_neg_u8_locked);
14177}
14178
14179
14180/**
14181 * @opcode 0xf6
14182 */
14183FNIEMOP_DEF(iemOp_Grp3_Eb)
14184{
14185 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14186 switch (IEM_GET_MODRM_REG_8(bRm))
14187 {
14188 case 0:
14189 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
14190 case 1:
14191 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
14192 case 2:
14193 return FNIEMOP_CALL_1(iemOp_grp3_not_Eb, bRm);
14194 case 3:
14195 return FNIEMOP_CALL_1(iemOp_grp3_neg_Eb, bRm);
14196 case 4:
14197 {
14198 /**
14199 * @opdone
14200 * @opmaps grp3_f6
14201 * @opcode /4
14202 * @opflclass multiply
14203 */
14204 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
14205 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14206 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
14207 break;
14208 }
14209 case 5:
14210 {
14211 /**
14212 * @opdone
14213 * @opmaps grp3_f6
14214 * @opcode /5
14215 * @opflclass multiply
14216 */
14217 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
14218 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14219 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
14220 break;
14221 }
14222 case 6:
14223 {
14224 /**
14225 * @opdone
14226 * @opmaps grp3_f6
14227 * @opcode /6
14228 * @opflclass division
14229 */
14230 IEMOP_MNEMONIC(div_Eb, "div Eb");
14231 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14232 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
14233 break;
14234 }
14235 case 7:
14236 {
14237 /**
14238 * @opdone
14239 * @opmaps grp3_f6
14240 * @opcode /7
14241 * @opflclass division
14242 */
14243 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
14244 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14245 IEMOP_GRP3_MUL_DIV_EB(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
14246 break;
14247 }
14248 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14249 }
14250}
14251
14252
14253/**
14254 * @opmaps grp3_f7
14255 * @opcode /0
14256 * @opflclass logical
14257 */
14258FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
14259{
14260 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
14261 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
14262
14263 if (IEM_IS_MODRM_REG_MODE(bRm))
14264 {
14265 /* register access */
14266 switch (pVCpu->iem.s.enmEffOpSize)
14267 {
14268 case IEMMODE_16BIT:
14269 IEM_MC_BEGIN(3, 0, 0, 0);
14270 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
14271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14272 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
14273 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
14274 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14275 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
14276 IEM_MC_REF_EFLAGS(pEFlags);
14277 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
14278 IEM_MC_ADVANCE_RIP_AND_FINISH();
14279 IEM_MC_END();
14280 break;
14281
14282 case IEMMODE_32BIT:
14283 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
14284 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
14285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14286 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
14287 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
14288 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14289 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
14290 IEM_MC_REF_EFLAGS(pEFlags);
14291 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
14292 /* No clearing the high dword here - test doesn't write back the result. */
14293 IEM_MC_ADVANCE_RIP_AND_FINISH();
14294 IEM_MC_END();
14295 break;
14296
14297 case IEMMODE_64BIT:
14298 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
14299 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
14300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14301 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
14302 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
14303 IEM_MC_ARG(uint32_t *, pEFlags, 2);
14304 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
14305 IEM_MC_REF_EFLAGS(pEFlags);
14306 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
14307 IEM_MC_ADVANCE_RIP_AND_FINISH();
14308 IEM_MC_END();
14309 break;
14310
14311 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14312 }
14313 }
14314 else
14315 {
14316 /* memory access. */
14317 switch (pVCpu->iem.s.enmEffOpSize)
14318 {
14319 case IEMMODE_16BIT:
14320 IEM_MC_BEGIN(3, 3, 0, 0);
14321 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14322 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
14323
14324 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
14325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14326
14327 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
14328 IEM_MC_ARG(uint16_t const *, pu16Dst, 0);
14329 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14330
14331 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
14332 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14333 IEM_MC_FETCH_EFLAGS(EFlags);
14334 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
14335
14336 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
14337 IEM_MC_COMMIT_EFLAGS(EFlags);
14338 IEM_MC_ADVANCE_RIP_AND_FINISH();
14339 IEM_MC_END();
14340 break;
14341
14342 case IEMMODE_32BIT:
14343 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
14344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14345 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
14346
14347 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
14348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14349
14350 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
14351 IEM_MC_ARG(uint32_t const *, pu32Dst, 0);
14352 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14353
14354 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
14355 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14356 IEM_MC_FETCH_EFLAGS(EFlags);
14357 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
14358
14359 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
14360 IEM_MC_COMMIT_EFLAGS(EFlags);
14361 IEM_MC_ADVANCE_RIP_AND_FINISH();
14362 IEM_MC_END();
14363 break;
14364
14365 case IEMMODE_64BIT:
14366 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
14367 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
14368 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
14369
14370 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
14371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14372
14373 IEM_MC_ARG(uint64_t const *, pu64Dst, 0);
14374 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
14375 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
14376
14377 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1);
14378 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
14379 IEM_MC_FETCH_EFLAGS(EFlags);
14380 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
14381
14382 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
14383 IEM_MC_COMMIT_EFLAGS(EFlags);
14384 IEM_MC_ADVANCE_RIP_AND_FINISH();
14385 IEM_MC_END();
14386 break;
14387
14388 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14389 }
14390 }
14391}
14392
14393
14394/**
14395 * @opmaps grp3_f7
14396 * @opcode /2
14397 * @opflclass unchanged
14398 */
14399FNIEMOP_DEF_1(iemOp_grp3_not_Ev, uint8_t, bRm)
14400{
14401/** @todo does not modify EFLAGS */
14402 IEMOP_MNEMONIC(not_Ev, "not Ev");
14403 IEMOP_BODY_UNARY_Ev( iemAImpl_not_u16, iemAImpl_not_u32, iemAImpl_not_u64);
14404 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_not_u16_locked, iemAImpl_not_u32_locked, iemAImpl_not_u64_locked);
14405}
14406
14407
14408/**
14409 * @opmaps grp3_f7
14410 * @opcode /3
14411 * @opflclass arithmetic
14412 */
14413FNIEMOP_DEF_1(iemOp_grp3_neg_Ev, uint8_t, bRm)
14414{
14415 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
14416 IEMOP_BODY_UNARY_Ev( iemAImpl_neg_u16, iemAImpl_neg_u32, iemAImpl_neg_u64);
14417 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_neg_u16_locked, iemAImpl_neg_u32_locked, iemAImpl_neg_u64_locked);
14418}
14419
14420
14421/**
14422 * @opmaps grp3_f7
14423 * @opcode /4
14424 * @opflclass multiply
14425 */
14426FNIEMOP_DEF_1(iemOp_grp3_mul_Ev, uint8_t, bRm)
14427{
14428 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
14429 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14430 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
14431}
14432
14433
14434/**
14435 * @opmaps grp3_f7
14436 * @opcode /5
14437 * @opflclass multiply
14438 */
14439FNIEMOP_DEF_1(iemOp_grp3_imul_Ev, uint8_t, bRm)
14440{
14441 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
14442 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
14443 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
14444}
14445
14446
14447/**
14448 * @opmaps grp3_f7
14449 * @opcode /6
14450 * @opflclass division
14451 */
14452FNIEMOP_DEF_1(iemOp_grp3_div_Ev, uint8_t, bRm)
14453{
14454 IEMOP_MNEMONIC(div_Ev, "div Ev");
14455 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14456 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
14457}
14458
14459
14460/**
14461 * @opmaps grp3_f7
14462 * @opcode /7
14463 * @opflclass division
14464 */
14465FNIEMOP_DEF_1(iemOp_grp3_idiv_Ev, uint8_t, bRm)
14466{
14467 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
14468 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
14469 IEMOP_BODY_GRP3_MUL_DIV_EV(bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
14470}
14471
14472
14473/**
14474 * @opcode 0xf7
14475 */
14476FNIEMOP_DEF(iemOp_Grp3_Ev)
14477{
14478 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14479 switch (IEM_GET_MODRM_REG_8(bRm))
14480 {
14481 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
14482 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
14483 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Ev, bRm);
14484 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Ev, bRm);
14485 case 4: return FNIEMOP_CALL_1(iemOp_grp3_mul_Ev, bRm);
14486 case 5: return FNIEMOP_CALL_1(iemOp_grp3_imul_Ev, bRm);
14487 case 6: return FNIEMOP_CALL_1(iemOp_grp3_div_Ev, bRm);
14488 case 7: return FNIEMOP_CALL_1(iemOp_grp3_idiv_Ev, bRm);
14489 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14490 }
14491}
14492
14493
14494/**
14495 * @opcode 0xf8
14496 * @opflmodify cf
14497 * @opflclear cf
14498 */
14499FNIEMOP_DEF(iemOp_clc)
14500{
14501 IEMOP_MNEMONIC(clc, "clc");
14502 IEM_MC_BEGIN(0, 0, 0, 0);
14503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14504 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
14505 IEM_MC_ADVANCE_RIP_AND_FINISH();
14506 IEM_MC_END();
14507}
14508
14509
14510/**
14511 * @opcode 0xf9
14512 * @opflmodify cf
14513 * @opflset cf
14514 */
14515FNIEMOP_DEF(iemOp_stc)
14516{
14517 IEMOP_MNEMONIC(stc, "stc");
14518 IEM_MC_BEGIN(0, 0, 0, 0);
14519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14520 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
14521 IEM_MC_ADVANCE_RIP_AND_FINISH();
14522 IEM_MC_END();
14523}
14524
14525
14526/**
14527 * @opcode 0xfa
14528 * @opfltest iopl,vm
14529 * @opflmodify if,vif
14530 */
14531FNIEMOP_DEF(iemOp_cli)
14532{
14533 IEMOP_MNEMONIC(cli, "cli");
14534 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14535 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_CHECK_IRQ_BEFORE, 0, iemCImpl_cli);
14536}
14537
14538
14539/**
14540 * @opcode 0xfb
14541 * @opfltest iopl,vm
14542 * @opflmodify if,vif
14543 */
14544FNIEMOP_DEF(iemOp_sti)
14545{
14546 IEMOP_MNEMONIC(sti, "sti");
14547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14548 IEM_MC_DEFER_TO_CIMPL_0_RET( IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_AFTER
14549 | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_INHIBIT_SHADOW, 0, iemCImpl_sti);
14550}
14551
14552
14553/**
14554 * @opcode 0xfc
14555 * @opflmodify df
14556 * @opflclear df
14557 */
14558FNIEMOP_DEF(iemOp_cld)
14559{
14560 IEMOP_MNEMONIC(cld, "cld");
14561 IEM_MC_BEGIN(0, 0, 0, 0);
14562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14563 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
14564 IEM_MC_ADVANCE_RIP_AND_FINISH();
14565 IEM_MC_END();
14566}
14567
14568
14569/**
14570 * @opcode 0xfd
14571 * @opflmodify df
14572 * @opflset df
14573 */
14574FNIEMOP_DEF(iemOp_std)
14575{
14576 IEMOP_MNEMONIC(std, "std");
14577 IEM_MC_BEGIN(0, 0, 0, 0);
14578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14579 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
14580 IEM_MC_ADVANCE_RIP_AND_FINISH();
14581 IEM_MC_END();
14582}
14583
14584
14585/**
14586 * @opmaps grp4
14587 * @opcode /0
14588 * @opflclass incdec
14589 */
14590FNIEMOP_DEF_1(iemOp_Grp4_inc_Eb, uint8_t, bRm)
14591{
14592 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
14593 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_inc_u8, iemAImpl_inc_u8_locked);
14594}
14595
14596
14597/**
14598 * @opmaps grp4
14599 * @opcode /1
14600 * @opflclass incdec
14601 */
14602FNIEMOP_DEF_1(iemOp_Grp4_dec_Eb, uint8_t, bRm)
14603{
14604 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
14605 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_dec_u8, iemAImpl_dec_u8_locked);
14606}
14607
14608
14609/**
14610 * @opcode 0xfe
14611 */
14612FNIEMOP_DEF(iemOp_Grp4)
14613{
14614 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14615 switch (IEM_GET_MODRM_REG_8(bRm))
14616 {
14617 case 0: return FNIEMOP_CALL_1(iemOp_Grp4_inc_Eb, bRm);
14618 case 1: return FNIEMOP_CALL_1(iemOp_Grp4_dec_Eb, bRm);
14619 default:
14620 /** @todo is the eff-addr decoded? */
14621 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
14622 IEMOP_RAISE_INVALID_OPCODE_RET();
14623 }
14624}
14625
14626/**
14627 * @opmaps grp5
14628 * @opcode /0
14629 * @opflclass incdec
14630 */
14631FNIEMOP_DEF_1(iemOp_Grp5_inc_Ev, uint8_t, bRm)
14632{
14633 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
14634 IEMOP_BODY_UNARY_Ev( iemAImpl_inc_u16, iemAImpl_inc_u32, iemAImpl_inc_u64);
14635 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_inc_u16_locked, iemAImpl_inc_u32_locked, iemAImpl_inc_u64_locked);
14636}
14637
14638
14639/**
14640 * @opmaps grp5
14641 * @opcode /1
14642 * @opflclass incdec
14643 */
14644FNIEMOP_DEF_1(iemOp_Grp5_dec_Ev, uint8_t, bRm)
14645{
14646 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
14647 IEMOP_BODY_UNARY_Ev( iemAImpl_dec_u16, iemAImpl_dec_u32, iemAImpl_dec_u64);
14648 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_dec_u16_locked, iemAImpl_dec_u32_locked, iemAImpl_dec_u64_locked);
14649}
14650
14651
14652/**
14653 * Opcode 0xff /2.
14654 * @param bRm The RM byte.
14655 */
14656FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
14657{
14658 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
14659 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
14660
14661 if (IEM_IS_MODRM_REG_MODE(bRm))
14662 {
14663 /* The new RIP is taken from a register. */
14664 switch (pVCpu->iem.s.enmEffOpSize)
14665 {
14666 case IEMMODE_16BIT:
14667 IEM_MC_BEGIN(1, 0, 0, 0);
14668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14669 IEM_MC_ARG(uint16_t, u16Target, 0);
14670 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14671 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_16, u16Target);
14672 IEM_MC_END();
14673 break;
14674
14675 case IEMMODE_32BIT:
14676 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_386, 0);
14677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14678 IEM_MC_ARG(uint32_t, u32Target, 0);
14679 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14680 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_32, u32Target);
14681 IEM_MC_END();
14682 break;
14683
14684 case IEMMODE_64BIT:
14685 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT, 0);
14686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14687 IEM_MC_ARG(uint64_t, u64Target, 0);
14688 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14689 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_64, u64Target);
14690 IEM_MC_END();
14691 break;
14692
14693 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14694 }
14695 }
14696 else
14697 {
14698 /* The new RIP is taken from a register. */
14699 switch (pVCpu->iem.s.enmEffOpSize)
14700 {
14701 case IEMMODE_16BIT:
14702 IEM_MC_BEGIN(1, 1, 0, 0);
14703 IEM_MC_ARG(uint16_t, u16Target, 0);
14704 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14705 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14707 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14708 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_16, u16Target);
14709 IEM_MC_END();
14710 break;
14711
14712 case IEMMODE_32BIT:
14713 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_386, 0);
14714 IEM_MC_ARG(uint32_t, u32Target, 0);
14715 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14716 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14718 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14719 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_32, u32Target);
14720 IEM_MC_END();
14721 break;
14722
14723 case IEMMODE_64BIT:
14724 IEM_MC_BEGIN(1, 1, IEM_MC_F_64BIT, 0);
14725 IEM_MC_ARG(uint64_t, u64Target, 0);
14726 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14727 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14729 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14730 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_64, u64Target);
14731 IEM_MC_END();
14732 break;
14733
14734 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14735 }
14736 }
14737}
14738
14739#define IEMOP_BODY_GRP5_FAR_EP(a_bRm, a_fnCImpl, a_fCImplExtra) \
14740 /* Registers? How?? */ \
14741 if (RT_LIKELY(IEM_IS_MODRM_MEM_MODE(a_bRm))) \
14742 { /* likely */ } \
14743 else \
14744 IEMOP_RAISE_INVALID_OPCODE_RET(); /* callf eax is not legal */ \
14745 \
14746 /* 64-bit mode: Default is 32-bit, but only intel respects a REX.W prefix. */ \
14747 /** @todo what does VIA do? */ \
14748 if (!IEM_IS_64BIT_CODE(pVCpu) || pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT || IEM_IS_GUEST_CPU_INTEL(pVCpu)) \
14749 { /* likely */ } \
14750 else \
14751 pVCpu->iem.s.enmEffOpSize = IEMMODE_32BIT; \
14752 \
14753 /* Far pointer loaded from memory. */ \
14754 switch (pVCpu->iem.s.enmEffOpSize) \
14755 { \
14756 case IEMMODE_16BIT: \
14757 IEM_MC_BEGIN(3, 1, 0, 0); \
14758 IEM_MC_ARG(uint16_t, u16Sel, 0); \
14759 IEM_MC_ARG(uint16_t, offSeg, 1); \
14760 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2); \
14761 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
14762 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
14763 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14764 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
14765 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2); \
14766 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
14767 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 0, \
14768 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
14769 IEM_MC_END(); \
14770 break; \
14771 \
14772 case IEMMODE_32BIT: \
14773 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0); \
14774 IEM_MC_ARG(uint16_t, u16Sel, 0); \
14775 IEM_MC_ARG(uint32_t, offSeg, 1); \
14776 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2); \
14777 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
14778 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
14779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14780 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
14781 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4); \
14782 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
14783 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 0, \
14784 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
14785 IEM_MC_END(); \
14786 break; \
14787 \
14788 case IEMMODE_64BIT: \
14789 Assert(!IEM_IS_GUEST_CPU_AMD(pVCpu)); \
14790 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0); \
14791 IEM_MC_ARG(uint16_t, u16Sel, 0); \
14792 IEM_MC_ARG(uint64_t, offSeg, 1); \
14793 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_64BIT, 2); \
14794 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
14795 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
14796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14797 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
14798 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8); \
14799 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
14800 | IEM_CIMPL_F_MODE /* no gates */, 0, \
14801 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
14802 IEM_MC_END(); \
14803 break; \
14804 \
14805 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14806 } do {} while (0)
14807
14808
14809/**
14810 * Opcode 0xff /3.
14811 * @param bRm The RM byte.
14812 */
14813FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
14814{
14815 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
14816 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_callf, IEM_CIMPL_F_BRANCH_STACK);
14817}
14818
14819
14820/**
14821 * Opcode 0xff /4.
14822 * @param bRm The RM byte.
14823 */
14824FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
14825{
14826 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
14827 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
14828
14829 if (IEM_IS_MODRM_REG_MODE(bRm))
14830 {
14831 /* The new RIP is taken from a register. */
14832 switch (pVCpu->iem.s.enmEffOpSize)
14833 {
14834 case IEMMODE_16BIT:
14835 IEM_MC_BEGIN(0, 1, 0, 0);
14836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14837 IEM_MC_LOCAL(uint16_t, u16Target);
14838 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14839 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
14840 IEM_MC_END();
14841 break;
14842
14843 case IEMMODE_32BIT:
14844 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
14845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14846 IEM_MC_LOCAL(uint32_t, u32Target);
14847 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14848 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
14849 IEM_MC_END();
14850 break;
14851
14852 case IEMMODE_64BIT:
14853 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
14854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14855 IEM_MC_LOCAL(uint64_t, u64Target);
14856 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14857 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
14858 IEM_MC_END();
14859 break;
14860
14861 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14862 }
14863 }
14864 else
14865 {
14866 /* The new RIP is taken from a memory location. */
14867 switch (pVCpu->iem.s.enmEffOpSize)
14868 {
14869 case IEMMODE_16BIT:
14870 IEM_MC_BEGIN(0, 2, 0, 0);
14871 IEM_MC_LOCAL(uint16_t, u16Target);
14872 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14873 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14874 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14875 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14876 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
14877 IEM_MC_END();
14878 break;
14879
14880 case IEMMODE_32BIT:
14881 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
14882 IEM_MC_LOCAL(uint32_t, u32Target);
14883 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14886 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14887 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
14888 IEM_MC_END();
14889 break;
14890
14891 case IEMMODE_64BIT:
14892 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
14893 IEM_MC_LOCAL(uint64_t, u64Target);
14894 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14895 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14897 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14898 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
14899 IEM_MC_END();
14900 break;
14901
14902 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14903 }
14904 }
14905}
14906
14907
14908/**
14909 * Opcode 0xff /5.
14910 * @param bRm The RM byte.
14911 */
14912FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
14913{
14914 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
14915 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_FarJmp, 0);
14916}
14917
14918
14919/**
14920 * Opcode 0xff /6.
14921 * @param bRm The RM byte.
14922 */
14923FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
14924{
14925 IEMOP_MNEMONIC(push_Ev, "push Ev");
14926
14927 /* Registers are handled by a common worker. */
14928 if (IEM_IS_MODRM_REG_MODE(bRm))
14929 return FNIEMOP_CALL_1(iemOpCommonPushGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
14930
14931 /* Memory we do here. */
14932 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
14933 switch (pVCpu->iem.s.enmEffOpSize)
14934 {
14935 case IEMMODE_16BIT:
14936 IEM_MC_BEGIN(0, 2, 0, 0);
14937 IEM_MC_LOCAL(uint16_t, u16Src);
14938 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14939 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14941 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14942 IEM_MC_PUSH_U16(u16Src);
14943 IEM_MC_ADVANCE_RIP_AND_FINISH();
14944 IEM_MC_END();
14945 break;
14946
14947 case IEMMODE_32BIT:
14948 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
14949 IEM_MC_LOCAL(uint32_t, u32Src);
14950 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14951 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14953 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14954 IEM_MC_PUSH_U32(u32Src);
14955 IEM_MC_ADVANCE_RIP_AND_FINISH();
14956 IEM_MC_END();
14957 break;
14958
14959 case IEMMODE_64BIT:
14960 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
14961 IEM_MC_LOCAL(uint64_t, u64Src);
14962 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14963 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14965 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14966 IEM_MC_PUSH_U64(u64Src);
14967 IEM_MC_ADVANCE_RIP_AND_FINISH();
14968 IEM_MC_END();
14969 break;
14970
14971 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14972 }
14973}
14974
14975
14976/**
14977 * @opcode 0xff
14978 */
14979FNIEMOP_DEF(iemOp_Grp5)
14980{
14981 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14982 switch (IEM_GET_MODRM_REG_8(bRm))
14983 {
14984 case 0:
14985 return FNIEMOP_CALL_1(iemOp_Grp5_inc_Ev, bRm);
14986 case 1:
14987 return FNIEMOP_CALL_1(iemOp_Grp5_dec_Ev, bRm);
14988 case 2:
14989 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
14990 case 3:
14991 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
14992 case 4:
14993 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
14994 case 5:
14995 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
14996 case 6:
14997 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
14998 case 7:
14999 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
15000 IEMOP_RAISE_INVALID_OPCODE_RET();
15001 }
15002 AssertFailedReturn(VERR_IEM_IPE_3);
15003}
15004
15005
15006
15007const PFNIEMOP g_apfnOneByteMap[256] =
15008{
15009 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
15010 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
15011 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
15012 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
15013 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
15014 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
15015 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
15016 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
15017 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
15018 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
15019 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
15020 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
15021 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
15022 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
15023 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
15024 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
15025 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
15026 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
15027 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
15028 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
15029 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
15030 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
15031 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
15032 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
15033 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
15034 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
15035 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
15036 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
15037 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
15038 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
15039 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
15040 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
15041 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
15042 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
15043 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
15044 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
15045 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
15046 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
15047 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
15048 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
15049 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
15050 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
15051 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
15052 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
15053 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
15054 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
15055 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
15056 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
15057 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
15058 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
15059 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
15060 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
15061 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
15062 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
15063 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
15064 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
15065 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
15066 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
15067 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
15068 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
15069 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
15070 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
15071 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
15072 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
15073};
15074
15075
15076/** @} */
15077
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette