VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstOneByte.cpp.h@ 102443

Last change on this file since 102443 was 102437, checked in by vboxsync, 16 months ago

VMM/IEM: LOCK prefix and IEM_F_X86_DISREGARD_LOCK cleanups. Don't want the latter to influence other uses of the LOCK prefix than the atomic instructions. Don't test for LOCK-prefix win reg-variants of cmpxchg, raise #UD instead as per spec (copy paste fun). bugref:10371

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 534.1 KB
Line 
1/* $Id: IEMAllInstOneByte.cpp.h 102437 2023-12-03 11:27:41Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*******************************************************************************
30* Global Variables *
31*******************************************************************************/
32extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
33
34/* Instruction group definitions: */
35
36/** @defgroup og_gen General
37 * @{ */
38 /** @defgroup og_gen_arith Arithmetic
39 * @{ */
40 /** @defgroup og_gen_arith_bin Binary numbers */
41 /** @defgroup og_gen_arith_dec Decimal numbers */
42 /** @} */
43/** @} */
44
45/** @defgroup og_stack Stack
46 * @{ */
47 /** @defgroup og_stack_sreg Segment registers */
48/** @} */
49
50/** @defgroup og_prefix Prefixes */
51/** @defgroup og_escapes Escape bytes */
52
53
54
55/** @name One byte opcodes.
56 * @{
57 */
58
59/**
60 * Body for instructions like ADD, AND, OR, TEST, CMP, ++ with a byte
61 * memory/register as the destination.
62 *
63 * Used with IEMOP_BODY_BINARY_rm_r8_NO_LOCK or IEMOP_BODY_BINARY_rm_r8_LOCKED.
64 */
65#define IEMOP_BODY_BINARY_rm_r8_RW(a_fnNormalU8) \
66 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
67 \
68 /* \
69 * If rm is denoting a register, no more instruction bytes. \
70 */ \
71 if (IEM_IS_MODRM_REG_MODE(bRm)) \
72 { \
73 IEM_MC_BEGIN(3, 0, 0, 0); \
74 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
75 IEM_MC_ARG(uint8_t, u8Src, 1); \
76 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
77 \
78 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
79 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
80 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
81 IEM_MC_REF_EFLAGS(pEFlags); \
82 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
83 \
84 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
85 IEM_MC_END(); \
86 } \
87 else \
88 { \
89 /* \
90 * We're accessing memory. \
91 * Note! We're putting the eflags on the stack here so we can commit them \
92 * after the memory. \
93 */ \
94 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
95 { \
96 IEM_MC_BEGIN(3, 3, 0, 0); \
97 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
98 IEM_MC_ARG(uint8_t, u8Src, 1); \
99 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
101 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
102 \
103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
104 IEMOP_HLP_DONE_DECODING(); \
105 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
106 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
107 IEM_MC_FETCH_EFLAGS(EFlags); \
108 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
109 \
110 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
111 IEM_MC_COMMIT_EFLAGS(EFlags); \
112 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
113 IEM_MC_END(); \
114 } \
115 else \
116 { \
117 (void)0
118
119/**
120 * Body for instructions like TEST & CMP, ++ with a byte memory/registers as
121 * operands.
122 *
123 * Used with IEMOP_BODY_BINARY_rm_r8_NO_LOCK or IEMOP_BODY_BINARY_rm_r8_LOCKED.
124 */
125#define IEMOP_BODY_BINARY_rm_r8_RO(a_fnNormalU8) \
126 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
127 \
128 /* \
129 * If rm is denoting a register, no more instruction bytes. \
130 */ \
131 if (IEM_IS_MODRM_REG_MODE(bRm)) \
132 { \
133 IEM_MC_BEGIN(3, 0, 0, 0); \
134 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
135 IEM_MC_ARG(uint8_t, u8Src, 1); \
136 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
137 \
138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
139 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
140 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
141 IEM_MC_REF_EFLAGS(pEFlags); \
142 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
143 \
144 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
145 IEM_MC_END(); \
146 } \
147 else \
148 { \
149 /* \
150 * We're accessing memory. \
151 * Note! We're putting the eflags on the stack here so we can commit them \
152 * after the memory. \
153 */ \
154 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
155 { \
156 IEM_MC_BEGIN(3, 3, 0, 0); \
157 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
158 IEM_MC_ARG(uint8_t, u8Src, 1); \
159 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
160 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
161 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
162 \
163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
164 IEMOP_HLP_DONE_DECODING(); \
165 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
166 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
167 IEM_MC_FETCH_EFLAGS(EFlags); \
168 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
169 \
170 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
171 IEM_MC_COMMIT_EFLAGS(EFlags); \
172 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
173 IEM_MC_END(); \
174 } \
175 else \
176 { \
177 (void)0
178
179#define IEMOP_BODY_BINARY_rm_r8_NO_LOCK() \
180 IEMOP_HLP_DONE_DECODING(); \
181 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
182 } \
183 } \
184 (void)0
185
186#define IEMOP_BODY_BINARY_rm_r8_LOCKED(a_fnLockedU8) \
187 IEM_MC_BEGIN(3, 3, 0, 0); \
188 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
189 IEM_MC_ARG(uint8_t, u8Src, 1); \
190 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
192 IEM_MC_LOCAL(uint8_t, bMapInfoDst); \
193 \
194 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
195 IEMOP_HLP_DONE_DECODING(); \
196 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bMapInfoDst, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
197 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
198 IEM_MC_FETCH_EFLAGS(EFlags); \
199 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
200 \
201 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bMapInfoDst); \
202 IEM_MC_COMMIT_EFLAGS(EFlags); \
203 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
204 IEM_MC_END(); \
205 } \
206 } \
207 (void)0
208
209/**
210 * Body for byte instructions like ADD, AND, OR, ++ with a register as the
211 * destination.
212 */
213#define IEMOP_BODY_BINARY_r8_rm(a_fnNormalU8) \
214 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
215 \
216 /* \
217 * If rm is denoting a register, no more instruction bytes. \
218 */ \
219 if (IEM_IS_MODRM_REG_MODE(bRm)) \
220 { \
221 IEM_MC_BEGIN(3, 0, 0, 0); \
222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
223 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
224 IEM_MC_ARG(uint8_t, u8Src, 1); \
225 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
226 \
227 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \
228 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
229 IEM_MC_REF_EFLAGS(pEFlags); \
230 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
231 \
232 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
233 IEM_MC_END(); \
234 } \
235 else \
236 { \
237 /* \
238 * We're accessing memory. \
239 */ \
240 IEM_MC_BEGIN(3, 1, 0, 0); \
241 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
242 IEM_MC_ARG(uint8_t, u8Src, 1); \
243 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
244 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
245 \
246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
248 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
249 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \
250 IEM_MC_REF_EFLAGS(pEFlags); \
251 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
252 \
253 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
254 IEM_MC_END(); \
255 } \
256 (void)0
257
258
259/**
260 * Body for word/dword/qword instructions like ADD, AND, OR, ++ with
261 * memory/register as the destination.
262 */
263#define IEMOP_BODY_BINARY_rm_rv_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
264 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
265 \
266 /* \
267 * If rm is denoting a register, no more instruction bytes. \
268 */ \
269 if (IEM_IS_MODRM_REG_MODE(bRm)) \
270 { \
271 switch (pVCpu->iem.s.enmEffOpSize) \
272 { \
273 case IEMMODE_16BIT: \
274 IEM_MC_BEGIN(3, 0, 0, 0); \
275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
276 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
277 IEM_MC_ARG(uint16_t, u16Src, 1); \
278 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
279 \
280 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
281 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
282 IEM_MC_REF_EFLAGS(pEFlags); \
283 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
284 \
285 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
286 IEM_MC_END(); \
287 break; \
288 \
289 case IEMMODE_32BIT: \
290 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
292 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
293 IEM_MC_ARG(uint32_t, u32Src, 1); \
294 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
295 \
296 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
297 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
298 IEM_MC_REF_EFLAGS(pEFlags); \
299 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
300 \
301 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
302 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
303 IEM_MC_END(); \
304 break; \
305 \
306 case IEMMODE_64BIT: \
307 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
309 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
310 IEM_MC_ARG(uint64_t, u64Src, 1); \
311 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
312 \
313 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
314 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
315 IEM_MC_REF_EFLAGS(pEFlags); \
316 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
317 \
318 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
319 IEM_MC_END(); \
320 break; \
321 \
322 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
323 } \
324 } \
325 else \
326 { \
327 /* \
328 * We're accessing memory. \
329 * Note! We're putting the eflags on the stack here so we can commit them \
330 * after the memory. \
331 */ \
332 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
333 { \
334 switch (pVCpu->iem.s.enmEffOpSize) \
335 { \
336 case IEMMODE_16BIT: \
337 IEM_MC_BEGIN(3, 3, 0, 0); \
338 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
339 IEM_MC_ARG(uint16_t, u16Src, 1); \
340 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
341 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
342 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
343 \
344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
345 IEMOP_HLP_DONE_DECODING(); \
346 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
347 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
348 IEM_MC_FETCH_EFLAGS(EFlags); \
349 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
350 \
351 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
352 IEM_MC_COMMIT_EFLAGS(EFlags); \
353 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
354 IEM_MC_END(); \
355 break; \
356 \
357 case IEMMODE_32BIT: \
358 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
359 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
360 IEM_MC_ARG(uint32_t, u32Src, 1); \
361 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
362 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
363 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
364 \
365 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
366 IEMOP_HLP_DONE_DECODING(); \
367 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
368 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
369 IEM_MC_FETCH_EFLAGS(EFlags); \
370 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
371 \
372 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
373 IEM_MC_COMMIT_EFLAGS(EFlags); \
374 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
375 IEM_MC_END(); \
376 break; \
377 \
378 case IEMMODE_64BIT: \
379 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
380 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
381 IEM_MC_ARG(uint64_t, u64Src, 1); \
382 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
383 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
384 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
385 \
386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
387 IEMOP_HLP_DONE_DECODING(); \
388 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
389 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
390 IEM_MC_FETCH_EFLAGS(EFlags); \
391 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
392 \
393 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
394 IEM_MC_COMMIT_EFLAGS(EFlags); \
395 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
396 IEM_MC_END(); \
397 break; \
398 \
399 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
400 } \
401 } \
402 else \
403 { \
404 (void)0
405/* Separate macro to work around parsing issue in IEMAllInstPython.py */
406#define IEMOP_BODY_BINARY_rm_rv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
407 switch (pVCpu->iem.s.enmEffOpSize) \
408 { \
409 case IEMMODE_16BIT: \
410 IEM_MC_BEGIN(3, 3, 0, 0); \
411 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
412 IEM_MC_ARG(uint16_t, u16Src, 1); \
413 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
414 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
415 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
416 \
417 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
418 IEMOP_HLP_DONE_DECODING(); \
419 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
420 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
421 IEM_MC_FETCH_EFLAGS(EFlags); \
422 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
423 \
424 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
425 IEM_MC_COMMIT_EFLAGS(EFlags); \
426 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
427 IEM_MC_END(); \
428 break; \
429 \
430 case IEMMODE_32BIT: \
431 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
432 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
433 IEM_MC_ARG(uint32_t, u32Src, 1); \
434 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
436 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
437 \
438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
439 IEMOP_HLP_DONE_DECODING(); \
440 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
441 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
442 IEM_MC_FETCH_EFLAGS(EFlags); \
443 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
444 \
445 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo /* CMP,TEST */); \
446 IEM_MC_COMMIT_EFLAGS(EFlags); \
447 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
448 IEM_MC_END(); \
449 break; \
450 \
451 case IEMMODE_64BIT: \
452 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
453 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
454 IEM_MC_ARG(uint64_t, u64Src, 1); \
455 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \
456 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
457 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
458 \
459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
460 IEMOP_HLP_DONE_DECODING(); \
461 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
462 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
463 IEM_MC_FETCH_EFLAGS(EFlags); \
464 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
465 \
466 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
467 IEM_MC_COMMIT_EFLAGS(EFlags); \
468 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
469 IEM_MC_END(); \
470 break; \
471 \
472 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
473 } \
474 } \
475 } \
476 (void)0
477
478/**
479 * Body for read-only word/dword/qword instructions like TEST and CMP with
480 * memory/register as the destination.
481 */
482#define IEMOP_BODY_BINARY_rm_rv_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
483 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \
484 \
485 /* \
486 * If rm is denoting a register, no more instruction bytes. \
487 */ \
488 if (IEM_IS_MODRM_REG_MODE(bRm)) \
489 { \
490 switch (pVCpu->iem.s.enmEffOpSize) \
491 { \
492 case IEMMODE_16BIT: \
493 IEM_MC_BEGIN(3, 0, 0, 0); \
494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
495 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
496 IEM_MC_ARG(uint16_t, u16Src, 1); \
497 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
498 \
499 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
500 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
501 IEM_MC_REF_EFLAGS(pEFlags); \
502 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
503 \
504 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
505 IEM_MC_END(); \
506 break; \
507 \
508 case IEMMODE_32BIT: \
509 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
510 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
511 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
512 IEM_MC_ARG(uint32_t, u32Src, 1); \
513 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
514 \
515 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
516 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
517 IEM_MC_REF_EFLAGS(pEFlags); \
518 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
519 \
520 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
521 IEM_MC_END(); \
522 break; \
523 \
524 case IEMMODE_64BIT: \
525 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
527 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
528 IEM_MC_ARG(uint64_t, u64Src, 1); \
529 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
530 \
531 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
532 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
533 IEM_MC_REF_EFLAGS(pEFlags); \
534 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
535 \
536 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
537 IEM_MC_END(); \
538 break; \
539 \
540 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
541 } \
542 } \
543 else \
544 { \
545 /* \
546 * We're accessing memory. \
547 * Note! We're putting the eflags on the stack here so we can commit them \
548 * after the memory. \
549 */ \
550 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
551 { \
552 switch (pVCpu->iem.s.enmEffOpSize) \
553 { \
554 case IEMMODE_16BIT: \
555 IEM_MC_BEGIN(3, 3, 0, 0); \
556 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
557 IEM_MC_ARG(uint16_t, u16Src, 1); \
558 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
560 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
561 \
562 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
563 IEMOP_HLP_DONE_DECODING(); \
564 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
565 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
566 IEM_MC_FETCH_EFLAGS(EFlags); \
567 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
568 \
569 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
570 IEM_MC_COMMIT_EFLAGS(EFlags); \
571 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
572 IEM_MC_END(); \
573 break; \
574 \
575 case IEMMODE_32BIT: \
576 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
577 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
578 IEM_MC_ARG(uint32_t, u32Src, 1); \
579 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
580 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
581 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
582 \
583 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
584 IEMOP_HLP_DONE_DECODING(); \
585 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
586 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
587 IEM_MC_FETCH_EFLAGS(EFlags); \
588 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
589 \
590 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
591 IEM_MC_COMMIT_EFLAGS(EFlags); \
592 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
593 IEM_MC_END(); \
594 break; \
595 \
596 case IEMMODE_64BIT: \
597 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
598 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
599 IEM_MC_ARG(uint64_t, u64Src, 1); \
600 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
602 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
603 \
604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
605 IEMOP_HLP_DONE_DECODING(); \
606 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
607 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \
608 IEM_MC_FETCH_EFLAGS(EFlags); \
609 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
610 \
611 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
612 IEM_MC_COMMIT_EFLAGS(EFlags); \
613 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
614 IEM_MC_END(); \
615 break; \
616 \
617 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
618 } \
619 } \
620 else \
621 { \
622 IEMOP_HLP_DONE_DECODING(); \
623 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
624 } \
625 } \
626 (void)0
627
628
629/**
630 * Body for instructions like ADD, AND, OR, ++ with working on AL with
631 * a byte immediate.
632 */
633#define IEMOP_BODY_BINARY_AL_Ib(a_fnNormalU8) \
634 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
635 \
636 IEM_MC_BEGIN(3, 0, 0, 0); \
637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
638 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
639 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1); \
640 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
641 \
642 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX); \
643 IEM_MC_REF_EFLAGS(pEFlags); \
644 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
645 \
646 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
647 IEM_MC_END()
648
649/**
650 * Body for instructions like ADD, AND, OR, ++ with working on
651 * AX/EAX/RAX with a word/dword immediate.
652 */
653#define IEMOP_BODY_BINARY_rAX_Iz(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fModifiesDstReg) \
654 switch (pVCpu->iem.s.enmEffOpSize) \
655 { \
656 case IEMMODE_16BIT: \
657 { \
658 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
659 \
660 IEM_MC_BEGIN(3, 0, 0, 0); \
661 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
662 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
663 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1); \
664 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
665 \
666 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX); \
667 IEM_MC_REF_EFLAGS(pEFlags); \
668 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
669 \
670 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
671 IEM_MC_END(); \
672 } \
673 \
674 case IEMMODE_32BIT: \
675 { \
676 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
677 \
678 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
680 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
681 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1); \
682 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
683 \
684 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX); \
685 IEM_MC_REF_EFLAGS(pEFlags); \
686 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
687 \
688 if (a_fModifiesDstReg) \
689 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX); \
690 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
691 IEM_MC_END(); \
692 } \
693 \
694 case IEMMODE_64BIT: \
695 { \
696 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
697 \
698 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
699 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
700 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
701 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1); \
702 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
703 \
704 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX); \
705 IEM_MC_REF_EFLAGS(pEFlags); \
706 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
707 \
708 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
709 IEM_MC_END(); \
710 } \
711 \
712 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
713 } \
714 (void)0
715
716
717
718/* Instruction specification format - work in progress: */
719
720/**
721 * @opcode 0x00
722 * @opmnemonic add
723 * @op1 rm:Eb
724 * @op2 reg:Gb
725 * @opmaps one
726 * @openc ModR/M
727 * @opflmodify cf,pf,af,zf,sf,of
728 * @ophints harmless ignores_op_sizes
729 * @opstats add_Eb_Gb
730 * @opgroup og_gen_arith_bin
731 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
732 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
733 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
734 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
735 */
736FNIEMOP_DEF(iemOp_add_Eb_Gb)
737{
738 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
739 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_add_u8);
740 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_add_u8_locked);
741}
742
743
744/**
745 * @opcode 0x01
746 * @opgroup og_gen_arith_bin
747 * @opflmodify cf,pf,af,zf,sf,of
748 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
749 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
750 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
751 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
752 */
753FNIEMOP_DEF(iemOp_add_Ev_Gv)
754{
755 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
756 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
757 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
758}
759
760
761/**
762 * @opcode 0x02
763 * @opgroup og_gen_arith_bin
764 * @opflmodify cf,pf,af,zf,sf,of
765 * @opcopytests iemOp_add_Eb_Gb
766 */
767FNIEMOP_DEF(iemOp_add_Gb_Eb)
768{
769 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
770 IEMOP_BODY_BINARY_r8_rm(iemAImpl_add_u8);
771}
772
773
774/**
775 * @opcode 0x03
776 * @opgroup og_gen_arith_bin
777 * @opflmodify cf,pf,af,zf,sf,of
778 * @opcopytests iemOp_add_Ev_Gv
779 */
780FNIEMOP_DEF(iemOp_add_Gv_Ev)
781{
782 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
783 IEMOP_BODY_BINARY_rv_rm(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1, 0);
784}
785
786
787/**
788 * @opcode 0x04
789 * @opgroup og_gen_arith_bin
790 * @opflmodify cf,pf,af,zf,sf,of
791 * @opcopytests iemOp_add_Eb_Gb
792 */
793FNIEMOP_DEF(iemOp_add_Al_Ib)
794{
795 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
796 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_add_u8);
797}
798
799
800/**
801 * @opcode 0x05
802 * @opgroup og_gen_arith_bin
803 * @opflmodify cf,pf,af,zf,sf,of
804 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
805 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
806 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
807 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
808 */
809FNIEMOP_DEF(iemOp_add_eAX_Iz)
810{
811 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
812 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1);
813}
814
815
816/**
817 * @opcode 0x06
818 * @opgroup og_stack_sreg
819 */
820FNIEMOP_DEF(iemOp_push_ES)
821{
822 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
823 IEMOP_HLP_NO_64BIT();
824 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
825}
826
827
828/**
829 * @opcode 0x07
830 * @opgroup og_stack_sreg
831 */
832FNIEMOP_DEF(iemOp_pop_ES)
833{
834 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
835 IEMOP_HLP_NO_64BIT();
836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
837 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE,
838 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
839 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
840 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
841 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES),
842 iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
843}
844
845
846/**
847 * @opcode 0x08
848 * @opgroup og_gen_arith_bin
849 * @opflmodify cf,pf,af,zf,sf,of
850 * @opflundef af
851 * @opflclear of,cf
852 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
853 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
854 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
855 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
856 */
857FNIEMOP_DEF(iemOp_or_Eb_Gb)
858{
859 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
860 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
861 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_or_u8);
862 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_or_u8_locked);
863}
864
865
866/*
867 * @opcode 0x09
868 * @opgroup og_gen_arith_bin
869 * @opflmodify cf,pf,af,zf,sf,of
870 * @opflundef af
871 * @opflclear of,cf
872 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
873 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
874 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
875 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
876 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
877 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
878 */
879FNIEMOP_DEF(iemOp_or_Ev_Gv)
880{
881 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
882 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
883 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
884 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
885}
886
887
888/**
889 * @opcode 0x0a
890 * @opgroup og_gen_arith_bin
891 * @opflmodify cf,pf,af,zf,sf,of
892 * @opflundef af
893 * @opflclear of,cf
894 * @opcopytests iemOp_or_Eb_Gb
895 */
896FNIEMOP_DEF(iemOp_or_Gb_Eb)
897{
898 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
899 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
900 IEMOP_BODY_BINARY_r8_rm(iemAImpl_or_u8);
901}
902
903
904/**
905 * @opcode 0x0b
906 * @opgroup og_gen_arith_bin
907 * @opflmodify cf,pf,af,zf,sf,of
908 * @opflundef af
909 * @opflclear of,cf
910 * @opcopytests iemOp_or_Ev_Gv
911 */
912FNIEMOP_DEF(iemOp_or_Gv_Ev)
913{
914 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
915 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
916 IEMOP_BODY_BINARY_rv_rm(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1, 0);
917}
918
919
920/**
921 * @opcode 0x0c
922 * @opgroup og_gen_arith_bin
923 * @opflmodify cf,pf,af,zf,sf,of
924 * @opflundef af
925 * @opflclear of,cf
926 * @opcopytests iemOp_or_Eb_Gb
927 */
928FNIEMOP_DEF(iemOp_or_Al_Ib)
929{
930 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
931 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
932 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_or_u8);
933}
934
935
936/**
937 * @opcode 0x0d
938 * @opgroup og_gen_arith_bin
939 * @opflmodify cf,pf,af,zf,sf,of
940 * @opflundef af
941 * @opflclear of,cf
942 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
943 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
944 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
945 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
946 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
947 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
948 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
949 */
950FNIEMOP_DEF(iemOp_or_eAX_Iz)
951{
952 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
953 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
954 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1);
955}
956
957
958/**
959 * @opcode 0x0e
960 * @opgroup og_stack_sreg
961 */
962FNIEMOP_DEF(iemOp_push_CS)
963{
964 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, 0);
965 IEMOP_HLP_NO_64BIT();
966 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
967}
968
969
970/**
971 * @opcode 0x0f
972 * @opmnemonic EscTwo0f
973 * @openc two0f
974 * @opdisenum OP_2B_ESC
975 * @ophints harmless
976 * @opgroup og_escapes
977 */
978FNIEMOP_DEF(iemOp_2byteEscape)
979{
980#if 0 /// @todo def VBOX_STRICT
981 /* Sanity check the table the first time around. */
982 static bool s_fTested = false;
983 if (RT_LIKELY(s_fTested)) { /* likely */ }
984 else
985 {
986 s_fTested = true;
987 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
988 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
989 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
990 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
991 }
992#endif
993
994 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
995 {
996 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
997 IEMOP_HLP_MIN_286();
998 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
999 }
1000 /* @opdone */
1001
1002 /*
1003 * On the 8086 this is a POP CS instruction.
1004 * For the time being we don't specify this this.
1005 */
1006 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_X86_INVALID_64, IEMOPHINT_SKIP_PYTHON);
1007 IEMOP_HLP_NO_64BIT();
1008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1009 /** @todo eliminate END_TB here */
1010 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_END_TB,
1011 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1012 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_CS),
1013 iemCImpl_pop_Sreg, X86_SREG_CS, pVCpu->iem.s.enmEffOpSize);
1014}
1015
1016/**
1017 * @opcode 0x10
1018 * @opgroup og_gen_arith_bin
1019 * @opfltest cf
1020 * @opflmodify cf,pf,af,zf,sf,of
1021 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1022 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1023 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1024 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1025 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1026 */
1027FNIEMOP_DEF(iemOp_adc_Eb_Gb)
1028{
1029 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1030 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_adc_u8);
1031 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_adc_u8_locked);
1032}
1033
1034
1035/**
1036 * @opcode 0x11
1037 * @opgroup og_gen_arith_bin
1038 * @opfltest cf
1039 * @opflmodify cf,pf,af,zf,sf,of
1040 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
1041 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
1042 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
1043 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
1044 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
1045 */
1046FNIEMOP_DEF(iemOp_adc_Ev_Gv)
1047{
1048 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1049 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
1050 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
1051}
1052
1053
1054/**
1055 * @opcode 0x12
1056 * @opgroup og_gen_arith_bin
1057 * @opfltest cf
1058 * @opflmodify cf,pf,af,zf,sf,of
1059 * @opcopytests iemOp_adc_Eb_Gb
1060 */
1061FNIEMOP_DEF(iemOp_adc_Gb_Eb)
1062{
1063 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1064 IEMOP_BODY_BINARY_r8_rm(iemAImpl_adc_u8);
1065}
1066
1067
1068/**
1069 * @opcode 0x13
1070 * @opgroup og_gen_arith_bin
1071 * @opfltest cf
1072 * @opflmodify cf,pf,af,zf,sf,of
1073 * @opcopytests iemOp_adc_Ev_Gv
1074 */
1075FNIEMOP_DEF(iemOp_adc_Gv_Ev)
1076{
1077 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1078 IEMOP_BODY_BINARY_rv_rm(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1, 0);
1079}
1080
1081
1082/**
1083 * @opcode 0x14
1084 * @opgroup og_gen_arith_bin
1085 * @opfltest cf
1086 * @opflmodify cf,pf,af,zf,sf,of
1087 * @opcopytests iemOp_adc_Eb_Gb
1088 */
1089FNIEMOP_DEF(iemOp_adc_Al_Ib)
1090{
1091 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1092 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_adc_u8);
1093}
1094
1095
1096/**
1097 * @opcode 0x15
1098 * @opgroup og_gen_arith_bin
1099 * @opfltest cf
1100 * @opflmodify cf,pf,af,zf,sf,of
1101 * @opcopytests iemOp_adc_Ev_Gv
1102 */
1103FNIEMOP_DEF(iemOp_adc_eAX_Iz)
1104{
1105 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1106 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1);
1107}
1108
1109
1110/**
1111 * @opcode 0x16
1112 */
1113FNIEMOP_DEF(iemOp_push_SS)
1114{
1115 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1116 IEMOP_HLP_NO_64BIT();
1117 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
1118}
1119
1120
1121/**
1122 * @opcode 0x17
1123 * @opgroup og_gen_arith_bin
1124 * @opfltest cf
1125 * @opflmodify cf,pf,af,zf,sf,of
1126 */
1127FNIEMOP_DEF(iemOp_pop_SS)
1128{
1129 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
1130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1131 IEMOP_HLP_NO_64BIT();
1132 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_INHIBIT_SHADOW,
1133 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1134 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_SS)
1135 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_SS)
1136 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_SS),
1137 iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
1138}
1139
1140
1141/**
1142 * @opcode 0x18
1143 * @opgroup og_gen_arith_bin
1144 * @opfltest cf
1145 * @opflmodify cf,pf,af,zf,sf,of
1146 */
1147FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
1148{
1149 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1150 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_sbb_u8);
1151 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sbb_u8_locked);
1152}
1153
1154
1155/**
1156 * @opcode 0x19
1157 * @opgroup og_gen_arith_bin
1158 * @opfltest cf
1159 * @opflmodify cf,pf,af,zf,sf,of
1160 */
1161FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
1162{
1163 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1164 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
1165 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
1166}
1167
1168
1169/**
1170 * @opcode 0x1a
1171 * @opgroup og_gen_arith_bin
1172 * @opfltest cf
1173 * @opflmodify cf,pf,af,zf,sf,of
1174 */
1175FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
1176{
1177 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1178 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sbb_u8);
1179}
1180
1181
1182/**
1183 * @opcode 0x1b
1184 * @opgroup og_gen_arith_bin
1185 * @opfltest cf
1186 * @opflmodify cf,pf,af,zf,sf,of
1187 */
1188FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
1189{
1190 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1191 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1, 0);
1192}
1193
1194
1195/**
1196 * @opcode 0x1c
1197 * @opgroup og_gen_arith_bin
1198 * @opfltest cf
1199 * @opflmodify cf,pf,af,zf,sf,of
1200 */
1201FNIEMOP_DEF(iemOp_sbb_Al_Ib)
1202{
1203 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1204 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sbb_u8);
1205}
1206
1207
1208/**
1209 * @opcode 0x1d
1210 * @opgroup og_gen_arith_bin
1211 * @opfltest cf
1212 * @opflmodify cf,pf,af,zf,sf,of
1213 */
1214FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
1215{
1216 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1217 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1);
1218}
1219
1220
1221/**
1222 * @opcode 0x1e
1223 * @opgroup og_stack_sreg
1224 */
1225FNIEMOP_DEF(iemOp_push_DS)
1226{
1227 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0);
1228 IEMOP_HLP_NO_64BIT();
1229 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
1230}
1231
1232
1233/**
1234 * @opcode 0x1f
1235 * @opgroup og_stack_sreg
1236 */
1237FNIEMOP_DEF(iemOp_pop_DS)
1238{
1239 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
1240 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1241 IEMOP_HLP_NO_64BIT();
1242 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE,
1243 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
1244 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
1245 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
1246 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS),
1247 iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
1248}
1249
1250
1251/**
1252 * @opcode 0x20
1253 * @opgroup og_gen_arith_bin
1254 * @opflmodify cf,pf,af,zf,sf,of
1255 * @opflundef af
1256 * @opflclear of,cf
1257 */
1258FNIEMOP_DEF(iemOp_and_Eb_Gb)
1259{
1260 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1261 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1262 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_and_u8);
1263 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_and_u8_locked);
1264}
1265
1266
1267/**
1268 * @opcode 0x21
1269 * @opgroup og_gen_arith_bin
1270 * @opflmodify cf,pf,af,zf,sf,of
1271 * @opflundef af
1272 * @opflclear of,cf
1273 */
1274FNIEMOP_DEF(iemOp_and_Ev_Gv)
1275{
1276 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1277 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1278 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
1279 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
1280}
1281
1282
1283/**
1284 * @opcode 0x22
1285 * @opgroup og_gen_arith_bin
1286 * @opflmodify cf,pf,af,zf,sf,of
1287 * @opflundef af
1288 * @opflclear of,cf
1289 */
1290FNIEMOP_DEF(iemOp_and_Gb_Eb)
1291{
1292 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1293 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1294 IEMOP_BODY_BINARY_r8_rm(iemAImpl_and_u8);
1295}
1296
1297
1298/**
1299 * @opcode 0x23
1300 * @opgroup og_gen_arith_bin
1301 * @opflmodify cf,pf,af,zf,sf,of
1302 * @opflundef af
1303 * @opflclear of,cf
1304 */
1305FNIEMOP_DEF(iemOp_and_Gv_Ev)
1306{
1307 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1308 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1309 IEMOP_BODY_BINARY_rv_rm(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1, 0);
1310}
1311
1312
1313/**
1314 * @opcode 0x24
1315 * @opgroup og_gen_arith_bin
1316 * @opflmodify cf,pf,af,zf,sf,of
1317 * @opflundef af
1318 * @opflclear of,cf
1319 */
1320FNIEMOP_DEF(iemOp_and_Al_Ib)
1321{
1322 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
1323 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1324 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_and_u8);
1325}
1326
1327
1328/**
1329 * @opcode 0x25
1330 * @opgroup og_gen_arith_bin
1331 * @opflmodify cf,pf,af,zf,sf,of
1332 * @opflundef af
1333 * @opflclear of,cf
1334 */
1335FNIEMOP_DEF(iemOp_and_eAX_Iz)
1336{
1337 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1338 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1339 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1);
1340}
1341
1342
1343/**
1344 * @opcode 0x26
1345 * @opmnemonic SEG
1346 * @op1 ES
1347 * @opgroup og_prefix
1348 * @openc prefix
1349 * @opdisenum OP_SEG
1350 * @ophints harmless
1351 */
1352FNIEMOP_DEF(iemOp_seg_ES)
1353{
1354 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
1355 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
1356 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
1357
1358 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1359 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1360}
1361
1362
1363/**
1364 * @opcode 0x27
1365 * @opfltest af,cf
1366 * @opflmodify cf,pf,af,zf,sf,of
1367 * @opflundef of
1368 */
1369FNIEMOP_DEF(iemOp_daa)
1370{
1371 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1372 IEMOP_HLP_NO_64BIT();
1373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1374 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1375 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_daa);
1376}
1377
1378
1379/**
1380 * @opcode 0x28
1381 * @opgroup og_gen_arith_bin
1382 * @opflmodify cf,pf,af,zf,sf,of
1383 */
1384FNIEMOP_DEF(iemOp_sub_Eb_Gb)
1385{
1386 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1387 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_sub_u8);
1388 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sub_u8_locked);
1389}
1390
1391
1392/**
1393 * @opcode 0x29
1394 * @opgroup og_gen_arith_bin
1395 * @opflmodify cf,pf,af,zf,sf,of
1396 */
1397FNIEMOP_DEF(iemOp_sub_Ev_Gv)
1398{
1399 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1400 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
1401 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
1402}
1403
1404
1405/**
1406 * @opcode 0x2a
1407 * @opgroup og_gen_arith_bin
1408 * @opflmodify cf,pf,af,zf,sf,of
1409 */
1410FNIEMOP_DEF(iemOp_sub_Gb_Eb)
1411{
1412 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1413 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sub_u8);
1414}
1415
1416
1417/**
1418 * @opcode 0x2b
1419 * @opgroup og_gen_arith_bin
1420 * @opflmodify cf,pf,af,zf,sf,of
1421 */
1422FNIEMOP_DEF(iemOp_sub_Gv_Ev)
1423{
1424 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1425 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1, 0);
1426}
1427
1428
1429/**
1430 * @opcode 0x2c
1431 * @opgroup og_gen_arith_bin
1432 * @opflmodify cf,pf,af,zf,sf,of
1433 */
1434FNIEMOP_DEF(iemOp_sub_Al_Ib)
1435{
1436 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1437 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sub_u8);
1438}
1439
1440
1441/**
1442 * @opcode 0x2d
1443 * @opgroup og_gen_arith_bin
1444 * @opflmodify cf,pf,af,zf,sf,of
1445 */
1446FNIEMOP_DEF(iemOp_sub_eAX_Iz)
1447{
1448 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
1449 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1);
1450}
1451
1452
1453/**
1454 * @opcode 0x2e
1455 * @opmnemonic SEG
1456 * @op1 CS
1457 * @opgroup og_prefix
1458 * @openc prefix
1459 * @opdisenum OP_SEG
1460 * @ophints harmless
1461 */
1462FNIEMOP_DEF(iemOp_seg_CS)
1463{
1464 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
1465 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
1466 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
1467
1468 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1469 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1470}
1471
1472
1473/**
1474 * @opcode 0x2f
1475 * @opfltest af,cf
1476 * @opflmodify cf,pf,af,zf,sf,of
1477 * @opflundef of
1478 */
1479FNIEMOP_DEF(iemOp_das)
1480{
1481 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL register use */
1482 IEMOP_HLP_NO_64BIT();
1483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1484 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1485 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_das);
1486}
1487
1488
1489/**
1490 * @opcode 0x30
1491 * @opgroup og_gen_arith_bin
1492 * @opflmodify cf,pf,af,zf,sf,of
1493 * @opflundef af
1494 * @opflclear of,cf
1495 */
1496FNIEMOP_DEF(iemOp_xor_Eb_Gb)
1497{
1498 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
1499 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1500 IEMOP_BODY_BINARY_rm_r8_RW( iemAImpl_xor_u8);
1501 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_xor_u8_locked);
1502}
1503
1504
1505/**
1506 * @opcode 0x31
1507 * @opgroup og_gen_arith_bin
1508 * @opflmodify cf,pf,af,zf,sf,of
1509 * @opflundef af
1510 * @opflclear of,cf
1511 */
1512FNIEMOP_DEF(iemOp_xor_Ev_Gv)
1513{
1514 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
1515 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1516 IEMOP_BODY_BINARY_rm_rv_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
1517 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
1518}
1519
1520
1521/**
1522 * @opcode 0x32
1523 * @opgroup og_gen_arith_bin
1524 * @opflmodify cf,pf,af,zf,sf,of
1525 * @opflundef af
1526 * @opflclear of,cf
1527 */
1528FNIEMOP_DEF(iemOp_xor_Gb_Eb)
1529{
1530 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1531 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1532 IEMOP_BODY_BINARY_r8_rm(iemAImpl_xor_u8);
1533}
1534
1535
1536/**
1537 * @opcode 0x33
1538 * @opgroup og_gen_arith_bin
1539 * @opflmodify cf,pf,af,zf,sf,of
1540 * @opflundef af
1541 * @opflclear of,cf
1542 */
1543FNIEMOP_DEF(iemOp_xor_Gv_Ev)
1544{
1545 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
1546 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1547 IEMOP_BODY_BINARY_rv_rm(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1, 0);
1548}
1549
1550
1551/**
1552 * @opcode 0x34
1553 * @opgroup og_gen_arith_bin
1554 * @opflmodify cf,pf,af,zf,sf,of
1555 * @opflundef af
1556 * @opflclear of,cf
1557 */
1558FNIEMOP_DEF(iemOp_xor_Al_Ib)
1559{
1560 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1561 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1562 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_xor_u8);
1563}
1564
1565
1566/**
1567 * @opcode 0x35
1568 * @opgroup og_gen_arith_bin
1569 * @opflmodify cf,pf,af,zf,sf,of
1570 * @opflundef af
1571 * @opflclear of,cf
1572 */
1573FNIEMOP_DEF(iemOp_xor_eAX_Iz)
1574{
1575 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1576 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
1577 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1);
1578}
1579
1580
1581/**
1582 * @opcode 0x36
1583 * @opmnemonic SEG
1584 * @op1 SS
1585 * @opgroup og_prefix
1586 * @openc prefix
1587 * @opdisenum OP_SEG
1588 * @ophints harmless
1589 */
1590FNIEMOP_DEF(iemOp_seg_SS)
1591{
1592 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
1593 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
1594 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
1595
1596 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1597 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1598}
1599
1600
1601/**
1602 * @opcode 0x37
1603 * @opfltest af,cf
1604 * @opflmodify cf,pf,af,zf,sf,of
1605 * @opflundef pf,zf,sf,of
1606 * @opgroup og_gen_arith_dec
1607 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
1608 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
1609 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1610 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1611 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1612 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
1613 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
1614 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
1615 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
1616 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
1617 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
1618 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
1619 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
1620 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
1621 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
1622 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1623 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1624 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
1625 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
1626 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
1627 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
1628 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
1629 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
1630 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
1631 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
1632 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1633 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
1634 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
1635 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
1636 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
1637 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
1638 */
1639FNIEMOP_DEF(iemOp_aaa)
1640{
1641 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1642 IEMOP_HLP_NO_64BIT();
1643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1644 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
1645
1646 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aaa);
1647}
1648
1649
1650/**
1651 * @opcode 0x38
1652 */
1653FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
1654{
1655 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
1656 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_cmp_u8);
1657 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
1658}
1659
1660
1661/**
1662 * @opcode 0x39
1663 */
1664FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
1665{
1666 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
1667 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
1668}
1669
1670
1671/**
1672 * @opcode 0x3a
1673 */
1674FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
1675{
1676 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
1677 IEMOP_BODY_BINARY_r8_rm(iemAImpl_cmp_u8);
1678}
1679
1680
1681/**
1682 * @opcode 0x3b
1683 */
1684FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
1685{
1686 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
1687 IEMOP_BODY_BINARY_rv_rm(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0, 0);
1688}
1689
1690
1691/**
1692 * @opcode 0x3c
1693 */
1694FNIEMOP_DEF(iemOp_cmp_Al_Ib)
1695{
1696 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
1697 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_cmp_u8);
1698}
1699
1700
1701/**
1702 * @opcode 0x3d
1703 */
1704FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1705{
1706 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1707 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0);
1708}
1709
1710
1711/**
1712 * @opcode 0x3e
1713 */
1714FNIEMOP_DEF(iemOp_seg_DS)
1715{
1716 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1717 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1718 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1719
1720 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1721 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1722}
1723
1724
1725/**
1726 * @opcode 0x3f
1727 * @opfltest af,cf
1728 * @opflmodify cf,pf,af,zf,sf,of
1729 * @opflundef pf,zf,sf,of
1730 * @opgroup og_gen_arith_dec
1731 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1732 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1733 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1734 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1735 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1736 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1737 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1738 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1739 * @optest amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1740 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1741 * @optest amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1742 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1743 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1744 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1745 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1746 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1747 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1748 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1749 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1750 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1751 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
1752 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1753 * @optest amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
1754 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1755 * @optest amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
1756 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1757 * @optest amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
1758 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1759 * @optest amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
1760 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1761 * @optest amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
1762 * @optest intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1763 * @optest amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1764 * @optest intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1765 * @optest amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1766 */
1767FNIEMOP_DEF(iemOp_aas)
1768{
1769 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_X86_INVALID_64, 0); /* express implicit AL/AX register use */
1770 IEMOP_HLP_NO_64BIT();
1771 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1772 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1773
1774 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aas);
1775}
1776
1777
1778/**
1779 * Common 'inc/dec register' helper.
1780 *
1781 * Not for 64-bit code, only for what became the rex prefixes.
1782 */
1783#define IEMOP_BODY_UNARY_GReg(a_fnNormalU16, a_fnNormalU32, a_iReg) \
1784 switch (pVCpu->iem.s.enmEffOpSize) \
1785 { \
1786 case IEMMODE_16BIT: \
1787 IEM_MC_BEGIN(2, 0, IEM_MC_F_NOT_64BIT, 0); \
1788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1789 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
1790 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1791 IEM_MC_REF_GREG_U16(pu16Dst, a_iReg); \
1792 IEM_MC_REF_EFLAGS(pEFlags); \
1793 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
1794 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1795 IEM_MC_END(); \
1796 break; \
1797 \
1798 case IEMMODE_32BIT: \
1799 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0); \
1800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
1801 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
1802 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
1803 IEM_MC_REF_GREG_U32(pu32Dst, a_iReg); \
1804 IEM_MC_REF_EFLAGS(pEFlags); \
1805 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
1806 IEM_MC_CLEAR_HIGH_GREG_U64(a_iReg); \
1807 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
1808 IEM_MC_END(); \
1809 break; \
1810 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
1811 } \
1812 (void)0
1813
1814/**
1815 * @opcode 0x40
1816 */
1817FNIEMOP_DEF(iemOp_inc_eAX)
1818{
1819 /*
1820 * This is a REX prefix in 64-bit mode.
1821 */
1822 if (IEM_IS_64BIT_CODE(pVCpu))
1823 {
1824 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1825 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1826
1827 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1828 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1829 }
1830
1831 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1832 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xAX);
1833}
1834
1835
1836/**
1837 * @opcode 0x41
1838 */
1839FNIEMOP_DEF(iemOp_inc_eCX)
1840{
1841 /*
1842 * This is a REX prefix in 64-bit mode.
1843 */
1844 if (IEM_IS_64BIT_CODE(pVCpu))
1845 {
1846 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1847 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1848 pVCpu->iem.s.uRexB = 1 << 3;
1849
1850 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1851 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1852 }
1853
1854 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1855 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xCX);
1856}
1857
1858
1859/**
1860 * @opcode 0x42
1861 */
1862FNIEMOP_DEF(iemOp_inc_eDX)
1863{
1864 /*
1865 * This is a REX prefix in 64-bit mode.
1866 */
1867 if (IEM_IS_64BIT_CODE(pVCpu))
1868 {
1869 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1870 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1871 pVCpu->iem.s.uRexIndex = 1 << 3;
1872
1873 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1874 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1875 }
1876
1877 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1878 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDX);
1879}
1880
1881
1882
1883/**
1884 * @opcode 0x43
1885 */
1886FNIEMOP_DEF(iemOp_inc_eBX)
1887{
1888 /*
1889 * This is a REX prefix in 64-bit mode.
1890 */
1891 if (IEM_IS_64BIT_CODE(pVCpu))
1892 {
1893 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1894 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1895 pVCpu->iem.s.uRexB = 1 << 3;
1896 pVCpu->iem.s.uRexIndex = 1 << 3;
1897
1898 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1899 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1900 }
1901
1902 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1903 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBX);
1904}
1905
1906
1907/**
1908 * @opcode 0x44
1909 */
1910FNIEMOP_DEF(iemOp_inc_eSP)
1911{
1912 /*
1913 * This is a REX prefix in 64-bit mode.
1914 */
1915 if (IEM_IS_64BIT_CODE(pVCpu))
1916 {
1917 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1918 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1919 pVCpu->iem.s.uRexReg = 1 << 3;
1920
1921 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1922 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1923 }
1924
1925 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1926 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSP);
1927}
1928
1929
1930/**
1931 * @opcode 0x45
1932 */
1933FNIEMOP_DEF(iemOp_inc_eBP)
1934{
1935 /*
1936 * This is a REX prefix in 64-bit mode.
1937 */
1938 if (IEM_IS_64BIT_CODE(pVCpu))
1939 {
1940 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1941 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1942 pVCpu->iem.s.uRexReg = 1 << 3;
1943 pVCpu->iem.s.uRexB = 1 << 3;
1944
1945 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1946 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1947 }
1948
1949 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1950 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xBP);
1951}
1952
1953
1954/**
1955 * @opcode 0x46
1956 */
1957FNIEMOP_DEF(iemOp_inc_eSI)
1958{
1959 /*
1960 * This is a REX prefix in 64-bit mode.
1961 */
1962 if (IEM_IS_64BIT_CODE(pVCpu))
1963 {
1964 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1965 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1966 pVCpu->iem.s.uRexReg = 1 << 3;
1967 pVCpu->iem.s.uRexIndex = 1 << 3;
1968
1969 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1970 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1971 }
1972
1973 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1974 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xSI);
1975}
1976
1977
1978/**
1979 * @opcode 0x47
1980 */
1981FNIEMOP_DEF(iemOp_inc_eDI)
1982{
1983 /*
1984 * This is a REX prefix in 64-bit mode.
1985 */
1986 if (IEM_IS_64BIT_CODE(pVCpu))
1987 {
1988 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1989 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1990 pVCpu->iem.s.uRexReg = 1 << 3;
1991 pVCpu->iem.s.uRexB = 1 << 3;
1992 pVCpu->iem.s.uRexIndex = 1 << 3;
1993
1994 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1995 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1996 }
1997
1998 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1999 IEMOP_BODY_UNARY_GReg(iemAImpl_inc_u16, iemAImpl_inc_u32, X86_GREG_xDI);
2000}
2001
2002
2003/**
2004 * @opcode 0x48
2005 */
2006FNIEMOP_DEF(iemOp_dec_eAX)
2007{
2008 /*
2009 * This is a REX prefix in 64-bit mode.
2010 */
2011 if (IEM_IS_64BIT_CODE(pVCpu))
2012 {
2013 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
2014 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
2015 iemRecalEffOpSize(pVCpu);
2016
2017 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2018 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2019 }
2020
2021 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
2022 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xAX);
2023}
2024
2025
2026/**
2027 * @opcode 0x49
2028 */
2029FNIEMOP_DEF(iemOp_dec_eCX)
2030{
2031 /*
2032 * This is a REX prefix in 64-bit mode.
2033 */
2034 if (IEM_IS_64BIT_CODE(pVCpu))
2035 {
2036 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
2037 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2038 pVCpu->iem.s.uRexB = 1 << 3;
2039 iemRecalEffOpSize(pVCpu);
2040
2041 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2042 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2043 }
2044
2045 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
2046 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xCX);
2047}
2048
2049
2050/**
2051 * @opcode 0x4a
2052 */
2053FNIEMOP_DEF(iemOp_dec_eDX)
2054{
2055 /*
2056 * This is a REX prefix in 64-bit mode.
2057 */
2058 if (IEM_IS_64BIT_CODE(pVCpu))
2059 {
2060 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
2061 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2062 pVCpu->iem.s.uRexIndex = 1 << 3;
2063 iemRecalEffOpSize(pVCpu);
2064
2065 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2066 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2067 }
2068
2069 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
2070 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDX);
2071}
2072
2073
2074/**
2075 * @opcode 0x4b
2076 */
2077FNIEMOP_DEF(iemOp_dec_eBX)
2078{
2079 /*
2080 * This is a REX prefix in 64-bit mode.
2081 */
2082 if (IEM_IS_64BIT_CODE(pVCpu))
2083 {
2084 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
2085 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2086 pVCpu->iem.s.uRexB = 1 << 3;
2087 pVCpu->iem.s.uRexIndex = 1 << 3;
2088 iemRecalEffOpSize(pVCpu);
2089
2090 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2091 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2092 }
2093
2094 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
2095 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBX);
2096}
2097
2098
2099/**
2100 * @opcode 0x4c
2101 */
2102FNIEMOP_DEF(iemOp_dec_eSP)
2103{
2104 /*
2105 * This is a REX prefix in 64-bit mode.
2106 */
2107 if (IEM_IS_64BIT_CODE(pVCpu))
2108 {
2109 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
2110 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
2111 pVCpu->iem.s.uRexReg = 1 << 3;
2112 iemRecalEffOpSize(pVCpu);
2113
2114 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2115 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2116 }
2117
2118 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
2119 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSP);
2120}
2121
2122
2123/**
2124 * @opcode 0x4d
2125 */
2126FNIEMOP_DEF(iemOp_dec_eBP)
2127{
2128 /*
2129 * This is a REX prefix in 64-bit mode.
2130 */
2131 if (IEM_IS_64BIT_CODE(pVCpu))
2132 {
2133 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
2134 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
2135 pVCpu->iem.s.uRexReg = 1 << 3;
2136 pVCpu->iem.s.uRexB = 1 << 3;
2137 iemRecalEffOpSize(pVCpu);
2138
2139 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2140 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2141 }
2142
2143 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
2144 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xBP);
2145}
2146
2147
2148/**
2149 * @opcode 0x4e
2150 */
2151FNIEMOP_DEF(iemOp_dec_eSI)
2152{
2153 /*
2154 * This is a REX prefix in 64-bit mode.
2155 */
2156 if (IEM_IS_64BIT_CODE(pVCpu))
2157 {
2158 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
2159 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2160 pVCpu->iem.s.uRexReg = 1 << 3;
2161 pVCpu->iem.s.uRexIndex = 1 << 3;
2162 iemRecalEffOpSize(pVCpu);
2163
2164 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2165 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2166 }
2167
2168 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
2169 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xSI);
2170}
2171
2172
2173/**
2174 * @opcode 0x4f
2175 */
2176FNIEMOP_DEF(iemOp_dec_eDI)
2177{
2178 /*
2179 * This is a REX prefix in 64-bit mode.
2180 */
2181 if (IEM_IS_64BIT_CODE(pVCpu))
2182 {
2183 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
2184 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
2185 pVCpu->iem.s.uRexReg = 1 << 3;
2186 pVCpu->iem.s.uRexB = 1 << 3;
2187 pVCpu->iem.s.uRexIndex = 1 << 3;
2188 iemRecalEffOpSize(pVCpu);
2189
2190 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2191 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2192 }
2193
2194 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
2195 IEMOP_BODY_UNARY_GReg(iemAImpl_dec_u16, iemAImpl_dec_u32, X86_GREG_xDI);
2196}
2197
2198
2199/**
2200 * Common 'push register' helper.
2201 */
2202FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
2203{
2204 if (IEM_IS_64BIT_CODE(pVCpu))
2205 {
2206 iReg |= pVCpu->iem.s.uRexB;
2207 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2208 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2209 }
2210
2211 switch (pVCpu->iem.s.enmEffOpSize)
2212 {
2213 case IEMMODE_16BIT:
2214 IEM_MC_BEGIN(0, 1, 0, 0);
2215 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2216 IEM_MC_LOCAL(uint16_t, u16Value);
2217 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
2218 IEM_MC_PUSH_U16(u16Value);
2219 IEM_MC_ADVANCE_RIP_AND_FINISH();
2220 IEM_MC_END();
2221 break;
2222
2223 case IEMMODE_32BIT:
2224 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2226 IEM_MC_LOCAL(uint32_t, u32Value);
2227 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
2228 IEM_MC_PUSH_U32(u32Value);
2229 IEM_MC_ADVANCE_RIP_AND_FINISH();
2230 IEM_MC_END();
2231 break;
2232
2233 case IEMMODE_64BIT:
2234 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2235 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2236 IEM_MC_LOCAL(uint64_t, u64Value);
2237 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
2238 IEM_MC_PUSH_U64(u64Value);
2239 IEM_MC_ADVANCE_RIP_AND_FINISH();
2240 IEM_MC_END();
2241 break;
2242
2243 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2244 }
2245}
2246
2247
2248/**
2249 * @opcode 0x50
2250 */
2251FNIEMOP_DEF(iemOp_push_eAX)
2252{
2253 IEMOP_MNEMONIC(push_rAX, "push rAX");
2254 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
2255}
2256
2257
2258/**
2259 * @opcode 0x51
2260 */
2261FNIEMOP_DEF(iemOp_push_eCX)
2262{
2263 IEMOP_MNEMONIC(push_rCX, "push rCX");
2264 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
2265}
2266
2267
2268/**
2269 * @opcode 0x52
2270 */
2271FNIEMOP_DEF(iemOp_push_eDX)
2272{
2273 IEMOP_MNEMONIC(push_rDX, "push rDX");
2274 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
2275}
2276
2277
2278/**
2279 * @opcode 0x53
2280 */
2281FNIEMOP_DEF(iemOp_push_eBX)
2282{
2283 IEMOP_MNEMONIC(push_rBX, "push rBX");
2284 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
2285}
2286
2287
2288/**
2289 * @opcode 0x54
2290 */
2291FNIEMOP_DEF(iemOp_push_eSP)
2292{
2293 IEMOP_MNEMONIC(push_rSP, "push rSP");
2294 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
2295 {
2296 IEM_MC_BEGIN(0, 1, IEM_MC_F_ONLY_8086, 0);
2297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2298 IEM_MC_LOCAL(uint16_t, u16Value);
2299 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
2300 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
2301 IEM_MC_PUSH_U16(u16Value);
2302 IEM_MC_ADVANCE_RIP_AND_FINISH();
2303 IEM_MC_END();
2304 }
2305 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
2306}
2307
2308
2309/**
2310 * @opcode 0x55
2311 */
2312FNIEMOP_DEF(iemOp_push_eBP)
2313{
2314 IEMOP_MNEMONIC(push_rBP, "push rBP");
2315 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
2316}
2317
2318
2319/**
2320 * @opcode 0x56
2321 */
2322FNIEMOP_DEF(iemOp_push_eSI)
2323{
2324 IEMOP_MNEMONIC(push_rSI, "push rSI");
2325 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
2326}
2327
2328
2329/**
2330 * @opcode 0x57
2331 */
2332FNIEMOP_DEF(iemOp_push_eDI)
2333{
2334 IEMOP_MNEMONIC(push_rDI, "push rDI");
2335 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
2336}
2337
2338
2339/**
2340 * Common 'pop register' helper.
2341 */
2342FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
2343{
2344 if (IEM_IS_64BIT_CODE(pVCpu))
2345 {
2346 iReg |= pVCpu->iem.s.uRexB;
2347 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2348 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2349 }
2350
2351 switch (pVCpu->iem.s.enmEffOpSize)
2352 {
2353 case IEMMODE_16BIT:
2354 IEM_MC_BEGIN(0, 1, 0, 0);
2355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2356 IEM_MC_LOCAL(uint16_t *, pu16Dst);
2357 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
2358 IEM_MC_POP_U16(pu16Dst);
2359 IEM_MC_ADVANCE_RIP_AND_FINISH();
2360 IEM_MC_END();
2361 break;
2362
2363 case IEMMODE_32BIT:
2364 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2366 IEM_MC_LOCAL(uint32_t *, pu32Dst);
2367 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
2368 IEM_MC_POP_U32(pu32Dst);
2369 IEM_MC_CLEAR_HIGH_GREG_U64(iReg); /** @todo testcase*/
2370 IEM_MC_ADVANCE_RIP_AND_FINISH();
2371 IEM_MC_END();
2372 break;
2373
2374 case IEMMODE_64BIT:
2375 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2376 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2377 IEM_MC_LOCAL(uint64_t *, pu64Dst);
2378 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
2379 IEM_MC_POP_U64(pu64Dst);
2380 IEM_MC_ADVANCE_RIP_AND_FINISH();
2381 IEM_MC_END();
2382 break;
2383
2384 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2385 }
2386}
2387
2388
2389/**
2390 * @opcode 0x58
2391 */
2392FNIEMOP_DEF(iemOp_pop_eAX)
2393{
2394 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
2395 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
2396}
2397
2398
2399/**
2400 * @opcode 0x59
2401 */
2402FNIEMOP_DEF(iemOp_pop_eCX)
2403{
2404 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
2405 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
2406}
2407
2408
2409/**
2410 * @opcode 0x5a
2411 */
2412FNIEMOP_DEF(iemOp_pop_eDX)
2413{
2414 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
2415 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
2416}
2417
2418
2419/**
2420 * @opcode 0x5b
2421 */
2422FNIEMOP_DEF(iemOp_pop_eBX)
2423{
2424 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
2425 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
2426}
2427
2428
2429/**
2430 * @opcode 0x5c
2431 */
2432FNIEMOP_DEF(iemOp_pop_eSP)
2433{
2434 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
2435 if (IEM_IS_64BIT_CODE(pVCpu))
2436 {
2437 if (pVCpu->iem.s.uRexB)
2438 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
2439 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
2440 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
2441 }
2442
2443 /** @todo add testcase for this instruction. */
2444 switch (pVCpu->iem.s.enmEffOpSize)
2445 {
2446 case IEMMODE_16BIT:
2447 IEM_MC_BEGIN(0, 2, 0, 0);
2448 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2449 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2450 IEM_MC_LOCAL(uint16_t, u16Dst);
2451 IEM_MC_LOCAL(uint16_t *, pu16Dst);
2452 IEM_MC_REF_LOCAL(pu16Dst, u16Dst);
2453 IEM_MC_POP_U16(pu16Dst); /** @todo not correct MC, fix later. */
2454 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
2455 IEM_MC_ADVANCE_RIP_AND_FINISH();
2456 IEM_MC_END();
2457 break;
2458
2459 case IEMMODE_32BIT:
2460 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
2461 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2462 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2463 IEM_MC_LOCAL(uint32_t, u32Dst);
2464 IEM_MC_LOCAL(uint32_t *, pu32Dst);
2465 IEM_MC_REF_LOCAL(pu32Dst, u32Dst);
2466 IEM_MC_POP_U32(pu32Dst);
2467 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
2468 IEM_MC_ADVANCE_RIP_AND_FINISH();
2469 IEM_MC_END();
2470 break;
2471
2472 case IEMMODE_64BIT:
2473 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
2474 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
2475 DISOPTYPE_HARMLESS | DISOPTYPE_X86_DEFAULT_64_OP_SIZE | DISOPTYPE_X86_REXB_EXTENDS_OPREG);
2476 IEM_MC_LOCAL(uint64_t, u64Dst);
2477 IEM_MC_LOCAL(uint64_t *, pu64Dst);
2478 IEM_MC_REF_LOCAL(pu64Dst, u64Dst);
2479 IEM_MC_POP_U64(pu64Dst);
2480 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
2481 IEM_MC_ADVANCE_RIP_AND_FINISH();
2482 IEM_MC_END();
2483 break;
2484
2485 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2486 }
2487}
2488
2489
2490/**
2491 * @opcode 0x5d
2492 */
2493FNIEMOP_DEF(iemOp_pop_eBP)
2494{
2495 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
2496 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
2497}
2498
2499
2500/**
2501 * @opcode 0x5e
2502 */
2503FNIEMOP_DEF(iemOp_pop_eSI)
2504{
2505 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
2506 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
2507}
2508
2509
2510/**
2511 * @opcode 0x5f
2512 */
2513FNIEMOP_DEF(iemOp_pop_eDI)
2514{
2515 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
2516 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
2517}
2518
2519
2520/**
2521 * @opcode 0x60
2522 */
2523FNIEMOP_DEF(iemOp_pusha)
2524{
2525 IEMOP_MNEMONIC(pusha, "pusha");
2526 IEMOP_HLP_MIN_186();
2527 IEMOP_HLP_NO_64BIT();
2528 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2529 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pusha_16);
2530 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2531 IEM_MC_DEFER_TO_CIMPL_0_RET(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pusha_32);
2532}
2533
2534
2535/**
2536 * @opcode 0x61
2537 */
2538FNIEMOP_DEF(iemOp_popa__mvex)
2539{
2540 if (!IEM_IS_64BIT_CODE(pVCpu))
2541 {
2542 IEMOP_MNEMONIC(popa, "popa");
2543 IEMOP_HLP_MIN_186();
2544 IEMOP_HLP_NO_64BIT();
2545 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2546 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
2547 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
2548 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
2549 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
2550 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX)
2551 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
2552 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP)
2553 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
2554 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
2555 iemCImpl_popa_16);
2556 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
2557 IEM_MC_DEFER_TO_CIMPL_0_RET(0,
2558 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
2559 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX)
2560 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDX)
2561 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBX)
2562 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
2563 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP)
2564 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
2565 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
2566 iemCImpl_popa_32);
2567 }
2568 IEMOP_MNEMONIC(mvex, "mvex");
2569 Log(("mvex prefix is not supported!\n"));
2570 IEMOP_RAISE_INVALID_OPCODE_RET();
2571}
2572
2573
2574/**
2575 * @opcode 0x62
2576 * @opmnemonic bound
2577 * @op1 Gv_RO
2578 * @op2 Ma
2579 * @opmincpu 80186
2580 * @ophints harmless x86_invalid_64
2581 * @optest op1=0 op2=0 ->
2582 * @optest op1=1 op2=0 -> value.xcpt=5
2583 * @optest o16 / op1=0xffff op2=0x0000fffe ->
2584 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
2585 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
2586 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
2587 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
2588 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
2589 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
2590 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
2591 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
2592 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
2593 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
2594 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
2595 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
2596 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
2597 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
2598 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
2599 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
2600 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
2601 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
2602 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
2603 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
2604 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
2605 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
2606 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
2607 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
2608 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
2609 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
2610 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
2611 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
2612 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
2613 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
2614 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
2615 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
2616 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
2617 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
2618 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
2619 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
2620 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
2621 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
2622 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
2623 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
2624 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
2625 */
2626FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
2627{
2628 /* The BOUND instruction is invalid 64-bit mode. In legacy and
2629 compatability mode it is invalid with MOD=3.
2630
2631 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
2632 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
2633 given as R and X without an exact description, so we assume it builds on
2634 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
2635 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
2636 uint8_t bRm;
2637 if (!IEM_IS_64BIT_CODE(pVCpu))
2638 {
2639 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
2640 IEMOP_HLP_MIN_186();
2641 IEM_OPCODE_GET_NEXT_U8(&bRm);
2642 if (IEM_IS_MODRM_MEM_MODE(bRm))
2643 {
2644 /** @todo testcase: check that there are two memory accesses involved. Check
2645 * whether they're both read before the \#BR triggers. */
2646 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
2647 {
2648 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186 | IEM_MC_F_NOT_64BIT, 0);
2649 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2650 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
2651 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
2652 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2653
2654 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2656
2657 IEM_MC_FETCH_GREG_U16(u16Index, IEM_GET_MODRM_REG_8(bRm));
2658 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2659 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
2660
2661 IEM_MC_CALL_CIMPL_3(0, 0, iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
2662 IEM_MC_END();
2663 }
2664 else /* 32-bit operands */
2665 {
2666 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2667 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
2668 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
2669 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
2670 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
2671
2672 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
2673 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2674
2675 IEM_MC_FETCH_GREG_U32(u32Index, IEM_GET_MODRM_REG_8(bRm));
2676 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
2677 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
2678
2679 IEM_MC_CALL_CIMPL_3(0, 0, iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
2680 IEM_MC_END();
2681 }
2682 }
2683
2684 /*
2685 * @opdone
2686 */
2687 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2688 {
2689 /* Note that there is no need for the CPU to fetch further bytes
2690 here because MODRM.MOD == 3. */
2691 Log(("evex not supported by the guest CPU!\n"));
2692 IEMOP_RAISE_INVALID_OPCODE_RET();
2693 }
2694 }
2695 else
2696 {
2697 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
2698 * does modr/m read, whereas AMD probably doesn't... */
2699 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
2700 {
2701 Log(("evex not supported by the guest CPU!\n"));
2702 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
2703 }
2704 IEM_OPCODE_GET_NEXT_U8(&bRm);
2705 }
2706
2707 IEMOP_MNEMONIC(evex, "evex");
2708 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
2709 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
2710 Log(("evex prefix is not implemented!\n"));
2711 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
2712}
2713
2714
2715/** Opcode 0x63 - non-64-bit modes. */
2716FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
2717{
2718 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
2719 IEMOP_HLP_MIN_286();
2720 IEMOP_HLP_NO_REAL_OR_V86_MODE();
2721 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2722
2723 if (IEM_IS_MODRM_REG_MODE(bRm))
2724 {
2725 /* Register */
2726 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2727 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2728 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2729 IEM_MC_ARG(uint16_t, u16Src, 1);
2730 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2731
2732 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2733 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM_8(bRm));
2734 IEM_MC_REF_EFLAGS(pEFlags);
2735 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2736
2737 IEM_MC_ADVANCE_RIP_AND_FINISH();
2738 IEM_MC_END();
2739 }
2740 else
2741 {
2742 /* Memory */
2743 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_286 | IEM_MC_F_NOT_64BIT, 0);
2744 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2745 IEM_MC_ARG(uint16_t, u16Src, 1);
2746 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2747 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2748 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
2749
2750 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2751 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2752 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2753 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG_8(bRm));
2754 IEM_MC_FETCH_EFLAGS(EFlags);
2755 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2756
2757 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
2758 IEM_MC_COMMIT_EFLAGS(EFlags);
2759 IEM_MC_ADVANCE_RIP_AND_FINISH();
2760 IEM_MC_END();
2761 }
2762}
2763
2764
2765/**
2766 * @opcode 0x63
2767 *
2768 * @note This is a weird one. It works like a regular move instruction if
2769 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2770 * @todo This definitely needs a testcase to verify the odd cases. */
2771FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2772{
2773 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2774
2775 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2776 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2777
2778 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W)
2779 {
2780 if (IEM_IS_MODRM_REG_MODE(bRm))
2781 {
2782 /*
2783 * Register to register.
2784 */
2785 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2787 IEM_MC_LOCAL(uint64_t, u64Value);
2788 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
2789 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2790 IEM_MC_ADVANCE_RIP_AND_FINISH();
2791 IEM_MC_END();
2792 }
2793 else
2794 {
2795 /*
2796 * We're loading a register from memory.
2797 */
2798 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
2799 IEM_MC_LOCAL(uint64_t, u64Value);
2800 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2801 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2803 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2804 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
2805 IEM_MC_ADVANCE_RIP_AND_FINISH();
2806 IEM_MC_END();
2807 }
2808 }
2809 else
2810 AssertFailedReturn(VERR_IEM_INSTR_NOT_IMPLEMENTED);
2811}
2812
2813
2814/**
2815 * @opcode 0x64
2816 * @opmnemonic segfs
2817 * @opmincpu 80386
2818 * @opgroup og_prefixes
2819 */
2820FNIEMOP_DEF(iemOp_seg_FS)
2821{
2822 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2823 IEMOP_HLP_MIN_386();
2824
2825 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2826 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2827
2828 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2829 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2830}
2831
2832
2833/**
2834 * @opcode 0x65
2835 * @opmnemonic seggs
2836 * @opmincpu 80386
2837 * @opgroup og_prefixes
2838 */
2839FNIEMOP_DEF(iemOp_seg_GS)
2840{
2841 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2842 IEMOP_HLP_MIN_386();
2843
2844 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2845 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2846
2847 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2848 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2849}
2850
2851
2852/**
2853 * @opcode 0x66
2854 * @opmnemonic opsize
2855 * @openc prefix
2856 * @opmincpu 80386
2857 * @ophints harmless
2858 * @opgroup og_prefixes
2859 */
2860FNIEMOP_DEF(iemOp_op_size)
2861{
2862 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2863 IEMOP_HLP_MIN_386();
2864
2865 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2866 iemRecalEffOpSize(pVCpu);
2867
2868 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2869 when REPZ or REPNZ are present. */
2870 if (pVCpu->iem.s.idxPrefix == 0)
2871 pVCpu->iem.s.idxPrefix = 1;
2872
2873 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2874 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2875}
2876
2877
2878/**
2879 * @opcode 0x67
2880 * @opmnemonic addrsize
2881 * @openc prefix
2882 * @opmincpu 80386
2883 * @ophints harmless
2884 * @opgroup og_prefixes
2885 */
2886FNIEMOP_DEF(iemOp_addr_size)
2887{
2888 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2889 IEMOP_HLP_MIN_386();
2890
2891 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2892 switch (pVCpu->iem.s.enmDefAddrMode)
2893 {
2894 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2895 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2896 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2897 default: AssertFailed();
2898 }
2899
2900 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2901 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2902}
2903
2904
2905/**
2906 * @opcode 0x68
2907 */
2908FNIEMOP_DEF(iemOp_push_Iz)
2909{
2910 IEMOP_MNEMONIC(push_Iz, "push Iz");
2911 IEMOP_HLP_MIN_186();
2912 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2913 switch (pVCpu->iem.s.enmEffOpSize)
2914 {
2915 case IEMMODE_16BIT:
2916 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_186, 0);
2917 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2918 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2919 IEM_MC_LOCAL_CONST(uint16_t, u16Value, u16Imm);
2920 IEM_MC_PUSH_U16(u16Value);
2921 IEM_MC_ADVANCE_RIP_AND_FINISH();
2922 IEM_MC_END();
2923 break;
2924
2925 case IEMMODE_32BIT:
2926 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
2927 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2929 IEM_MC_LOCAL_CONST(uint32_t, u32Value, u32Imm);
2930 IEM_MC_PUSH_U32(u32Value);
2931 IEM_MC_ADVANCE_RIP_AND_FINISH();
2932 IEM_MC_END();
2933 break;
2934
2935 case IEMMODE_64BIT:
2936 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
2937 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2939 IEM_MC_LOCAL_CONST(uint64_t, u64Value, u64Imm);
2940 IEM_MC_PUSH_U64(u64Value);
2941 IEM_MC_ADVANCE_RIP_AND_FINISH();
2942 IEM_MC_END();
2943 break;
2944
2945 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2946 }
2947}
2948
2949
2950/**
2951 * @opcode 0x69
2952 */
2953FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
2954{
2955 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
2956 IEMOP_HLP_MIN_186();
2957 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2958 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2959
2960 switch (pVCpu->iem.s.enmEffOpSize)
2961 {
2962 case IEMMODE_16BIT:
2963 {
2964 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
2965 if (IEM_IS_MODRM_REG_MODE(bRm))
2966 {
2967 /* register operand */
2968 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2969 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186, 0);
2970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2971 IEM_MC_LOCAL(uint16_t, u16Tmp);
2972 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
2973 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
2974 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
2975 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2976 IEM_MC_REF_EFLAGS(pEFlags);
2977 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
2978 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
2979
2980 IEM_MC_ADVANCE_RIP_AND_FINISH();
2981 IEM_MC_END();
2982 }
2983 else
2984 {
2985 /* memory operand */
2986 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186, 0);
2987 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2988 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2989
2990 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2991 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2992
2993 IEM_MC_LOCAL(uint16_t, u16Tmp);
2994 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2995
2996 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
2997 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
2998 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2999 IEM_MC_REF_EFLAGS(pEFlags);
3000 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3001 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3002
3003 IEM_MC_ADVANCE_RIP_AND_FINISH();
3004 IEM_MC_END();
3005 }
3006 break;
3007 }
3008
3009 case IEMMODE_32BIT:
3010 {
3011 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
3012 if (IEM_IS_MODRM_REG_MODE(bRm))
3013 {
3014 /* register operand */
3015 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3016 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
3017 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3018 IEM_MC_LOCAL(uint32_t, u32Tmp);
3019 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3020
3021 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3022 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
3023 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3024 IEM_MC_REF_EFLAGS(pEFlags);
3025 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3026 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3027
3028 IEM_MC_ADVANCE_RIP_AND_FINISH();
3029 IEM_MC_END();
3030 }
3031 else
3032 {
3033 /* memory operand */
3034 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0);
3035 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3036 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3037
3038 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3040
3041 IEM_MC_LOCAL(uint32_t, u32Tmp);
3042 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3043
3044 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3045 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
3046 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3047 IEM_MC_REF_EFLAGS(pEFlags);
3048 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3049 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3050
3051 IEM_MC_ADVANCE_RIP_AND_FINISH();
3052 IEM_MC_END();
3053 }
3054 break;
3055 }
3056
3057 case IEMMODE_64BIT:
3058 {
3059 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3060 if (IEM_IS_MODRM_REG_MODE(bRm))
3061 {
3062 /* register operand */
3063 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3064 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
3065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3066 IEM_MC_LOCAL(uint64_t, u64Tmp);
3067 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3068
3069 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3070 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
3071 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3072 IEM_MC_REF_EFLAGS(pEFlags);
3073 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3074 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3075
3076 IEM_MC_ADVANCE_RIP_AND_FINISH();
3077 IEM_MC_END();
3078 }
3079 else
3080 {
3081 /* memory operand */
3082 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3083 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3084 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3085
3086 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); /* Not using IEM_OPCODE_GET_NEXT_S32_SX_U64 to reduce the */
3087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /* parameter count for the threaded function for this block. */
3088
3089 IEM_MC_LOCAL(uint64_t, u64Tmp);
3090 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3091
3092 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3093 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int32_t)u32Imm, 1);
3094 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3095 IEM_MC_REF_EFLAGS(pEFlags);
3096 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3097 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3098
3099 IEM_MC_ADVANCE_RIP_AND_FINISH();
3100 IEM_MC_END();
3101 }
3102 break;
3103 }
3104
3105 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3106 }
3107}
3108
3109
3110/**
3111 * @opcode 0x6a
3112 */
3113FNIEMOP_DEF(iemOp_push_Ib)
3114{
3115 IEMOP_MNEMONIC(push_Ib, "push Ib");
3116 IEMOP_HLP_MIN_186();
3117 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3118 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3119
3120 switch (pVCpu->iem.s.enmEffOpSize)
3121 {
3122 case IEMMODE_16BIT:
3123 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_186, 0);
3124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3125 IEM_MC_LOCAL_CONST(uint16_t, uValue, (int16_t)i8Imm);
3126 IEM_MC_PUSH_U16(uValue);
3127 IEM_MC_ADVANCE_RIP_AND_FINISH();
3128 IEM_MC_END();
3129 break;
3130 case IEMMODE_32BIT:
3131 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
3132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3133 IEM_MC_LOCAL_CONST(uint32_t, uValue, (int32_t)i8Imm);
3134 IEM_MC_PUSH_U32(uValue);
3135 IEM_MC_ADVANCE_RIP_AND_FINISH();
3136 IEM_MC_END();
3137 break;
3138 case IEMMODE_64BIT:
3139 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
3140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3141 IEM_MC_LOCAL_CONST(uint64_t, uValue, (int64_t)i8Imm);
3142 IEM_MC_PUSH_U64(uValue);
3143 IEM_MC_ADVANCE_RIP_AND_FINISH();
3144 IEM_MC_END();
3145 break;
3146 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3147 }
3148}
3149
3150
3151/**
3152 * @opcode 0x6b
3153 */
3154FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
3155{
3156 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
3157 IEMOP_HLP_MIN_186();
3158 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3159 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
3160
3161 switch (pVCpu->iem.s.enmEffOpSize)
3162 {
3163 case IEMMODE_16BIT:
3164 {
3165 PFNIEMAIMPLBINU16 const pfnAImplU16 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags);
3166 if (IEM_IS_MODRM_REG_MODE(bRm))
3167 {
3168 /* register operand */
3169 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_186, 0);
3170 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3172
3173 IEM_MC_LOCAL(uint16_t, u16Tmp);
3174 IEM_MC_FETCH_GREG_U16(u16Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3175
3176 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3177 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
3178 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3179 IEM_MC_REF_EFLAGS(pEFlags);
3180 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3181 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3182
3183 IEM_MC_ADVANCE_RIP_AND_FINISH();
3184 IEM_MC_END();
3185 }
3186 else
3187 {
3188 /* memory operand */
3189 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_186, 0);
3190
3191 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3192 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3193
3194 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
3195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3196
3197 IEM_MC_LOCAL(uint16_t, u16Tmp);
3198 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3199
3200 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Dst, u16Tmp, 0);
3201 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
3202 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3203 IEM_MC_REF_EFLAGS(pEFlags);
3204 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU16, pu16Dst, u16Src, pEFlags);
3205 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Tmp);
3206
3207 IEM_MC_ADVANCE_RIP_AND_FINISH();
3208 IEM_MC_END();
3209 }
3210 break;
3211 }
3212
3213 case IEMMODE_32BIT:
3214 {
3215 PFNIEMAIMPLBINU32 const pfnAImplU32 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags);
3216 if (IEM_IS_MODRM_REG_MODE(bRm))
3217 {
3218 /* register operand */
3219 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3220 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0);
3221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3222 IEM_MC_LOCAL(uint32_t, u32Tmp);
3223 IEM_MC_FETCH_GREG_U32(u32Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3224
3225 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3226 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
3227 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3228 IEM_MC_REF_EFLAGS(pEFlags);
3229 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3230 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3231
3232 IEM_MC_ADVANCE_RIP_AND_FINISH();
3233 IEM_MC_END();
3234 }
3235 else
3236 {
3237 /* memory operand */
3238 IEM_MC_BEGIN(3, 2, IEM_MC_F_MIN_386, 0);
3239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3241
3242 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
3243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3244
3245 IEM_MC_LOCAL(uint32_t, u32Tmp);
3246 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3247
3248 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Dst, u32Tmp, 0);
3249 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
3250 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3251 IEM_MC_REF_EFLAGS(pEFlags);
3252 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU32, pu32Dst, u32Src, pEFlags);
3253 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Tmp);
3254
3255 IEM_MC_ADVANCE_RIP_AND_FINISH();
3256 IEM_MC_END();
3257 }
3258 break;
3259 }
3260
3261 case IEMMODE_64BIT:
3262 {
3263 PFNIEMAIMPLBINU64 const pfnAImplU64 = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags);
3264 if (IEM_IS_MODRM_REG_MODE(bRm))
3265 {
3266 /* register operand */
3267 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3268 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0);
3269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3270 IEM_MC_LOCAL(uint64_t, u64Tmp);
3271 IEM_MC_FETCH_GREG_U64(u64Tmp, IEM_GET_MODRM_RM(pVCpu, bRm));
3272
3273 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3274 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 1);
3275 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3276 IEM_MC_REF_EFLAGS(pEFlags);
3277 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3278 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3279
3280 IEM_MC_ADVANCE_RIP_AND_FINISH();
3281 IEM_MC_END();
3282 }
3283 else
3284 {
3285 /* memory operand */
3286 IEM_MC_BEGIN(3, 2, IEM_MC_F_64BIT, 0);
3287 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3288 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3289
3290 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); /* Not using IEM_OPCODE_GET_NEXT_S8_SX_U64 to reduce the threaded parameter count. */
3291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3292
3293 IEM_MC_LOCAL(uint64_t, u64Tmp);
3294 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3295
3296 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Dst, u64Tmp, 0);
3297 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int64_t)(int8_t)u8Imm, 1);
3298 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3299 IEM_MC_REF_EFLAGS(pEFlags);
3300 IEM_MC_CALL_VOID_AIMPL_3(pfnAImplU64, pu64Dst, u64Src, pEFlags);
3301 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Tmp);
3302
3303 IEM_MC_ADVANCE_RIP_AND_FINISH();
3304 IEM_MC_END();
3305 }
3306 break;
3307 }
3308
3309 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3310 }
3311}
3312
3313
3314/**
3315 * @opcode 0x6c
3316 */
3317FNIEMOP_DEF(iemOp_insb_Yb_DX)
3318{
3319 IEMOP_HLP_MIN_186();
3320 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3321 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3322 {
3323 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
3324 switch (pVCpu->iem.s.enmEffAddrMode)
3325 {
3326 case IEMMODE_16BIT:
3327 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3328 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3329 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3330 iemCImpl_rep_ins_op8_addr16, false);
3331 case IEMMODE_32BIT:
3332 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3333 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3334 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3335 iemCImpl_rep_ins_op8_addr32, false);
3336 case IEMMODE_64BIT:
3337 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3338 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3339 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3340 iemCImpl_rep_ins_op8_addr64, false);
3341 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3342 }
3343 }
3344 else
3345 {
3346 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
3347 switch (pVCpu->iem.s.enmEffAddrMode)
3348 {
3349 case IEMMODE_16BIT:
3350 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3351 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3352 iemCImpl_ins_op8_addr16, false);
3353 case IEMMODE_32BIT:
3354 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3355 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3356 iemCImpl_ins_op8_addr32, false);
3357 case IEMMODE_64BIT:
3358 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3359 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3360 iemCImpl_ins_op8_addr64, false);
3361 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3362 }
3363 }
3364}
3365
3366
3367/**
3368 * @opcode 0x6d
3369 */
3370FNIEMOP_DEF(iemOp_inswd_Yv_DX)
3371{
3372 IEMOP_HLP_MIN_186();
3373 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3374 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3375 {
3376 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
3377 switch (pVCpu->iem.s.enmEffOpSize)
3378 {
3379 case IEMMODE_16BIT:
3380 switch (pVCpu->iem.s.enmEffAddrMode)
3381 {
3382 case IEMMODE_16BIT:
3383 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3384 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3385 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3386 iemCImpl_rep_ins_op16_addr16, false);
3387 case IEMMODE_32BIT:
3388 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3389 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3390 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3391 iemCImpl_rep_ins_op16_addr32, false);
3392 case IEMMODE_64BIT:
3393 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3394 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3395 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3396 iemCImpl_rep_ins_op16_addr64, false);
3397 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3398 }
3399 break;
3400 case IEMMODE_64BIT:
3401 case IEMMODE_32BIT:
3402 switch (pVCpu->iem.s.enmEffAddrMode)
3403 {
3404 case IEMMODE_16BIT:
3405 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3406 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3407 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3408 iemCImpl_rep_ins_op32_addr16, false);
3409 case IEMMODE_32BIT:
3410 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3411 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3412 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3413 iemCImpl_rep_ins_op32_addr32, false);
3414 case IEMMODE_64BIT:
3415 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3416 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
3417 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3418 iemCImpl_rep_ins_op32_addr64, false);
3419 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3420 }
3421 break;
3422 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3423 }
3424 }
3425 else
3426 {
3427 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
3428 switch (pVCpu->iem.s.enmEffOpSize)
3429 {
3430 case IEMMODE_16BIT:
3431 switch (pVCpu->iem.s.enmEffAddrMode)
3432 {
3433 case IEMMODE_16BIT:
3434 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3435 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3436 iemCImpl_ins_op16_addr16, false);
3437 case IEMMODE_32BIT:
3438 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3439 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3440 iemCImpl_ins_op16_addr32, false);
3441 case IEMMODE_64BIT:
3442 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3443 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3444 iemCImpl_ins_op16_addr64, false);
3445 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3446 }
3447 break;
3448 case IEMMODE_64BIT:
3449 case IEMMODE_32BIT:
3450 switch (pVCpu->iem.s.enmEffAddrMode)
3451 {
3452 case IEMMODE_16BIT:
3453 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3454 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3455 iemCImpl_ins_op32_addr16, false);
3456 case IEMMODE_32BIT:
3457 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3458 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3459 iemCImpl_ins_op32_addr32, false);
3460 case IEMMODE_64BIT:
3461 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3462 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI),
3463 iemCImpl_ins_op32_addr64, false);
3464 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3465 }
3466 break;
3467 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3468 }
3469 }
3470}
3471
3472
3473/**
3474 * @opcode 0x6e
3475 */
3476FNIEMOP_DEF(iemOp_outsb_Yb_DX)
3477{
3478 IEMOP_HLP_MIN_186();
3479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3480 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
3481 {
3482 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
3483 switch (pVCpu->iem.s.enmEffAddrMode)
3484 {
3485 case IEMMODE_16BIT:
3486 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3487 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3488 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3489 iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3490 case IEMMODE_32BIT:
3491 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3492 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3493 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3494 iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3495 case IEMMODE_64BIT:
3496 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3497 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3498 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3499 iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3500 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3501 }
3502 }
3503 else
3504 {
3505 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
3506 switch (pVCpu->iem.s.enmEffAddrMode)
3507 {
3508 case IEMMODE_16BIT:
3509 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3510 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3511 iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
3512 case IEMMODE_32BIT:
3513 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3514 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3515 iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
3516 case IEMMODE_64BIT:
3517 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3518 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3519 iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
3520 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3521 }
3522 }
3523}
3524
3525
3526/**
3527 * @opcode 0x6f
3528 */
3529FNIEMOP_DEF(iemOp_outswd_Yv_DX)
3530{
3531 IEMOP_HLP_MIN_186();
3532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3533 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
3534 {
3535 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
3536 switch (pVCpu->iem.s.enmEffOpSize)
3537 {
3538 case IEMMODE_16BIT:
3539 switch (pVCpu->iem.s.enmEffAddrMode)
3540 {
3541 case IEMMODE_16BIT:
3542 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3543 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3544 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3545 iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3546 case IEMMODE_32BIT:
3547 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3548 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3549 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3550 iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3551 case IEMMODE_64BIT:
3552 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3553 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3554 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3555 iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3556 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3557 }
3558 break;
3559 case IEMMODE_64BIT:
3560 case IEMMODE_32BIT:
3561 switch (pVCpu->iem.s.enmEffAddrMode)
3562 {
3563 case IEMMODE_16BIT:
3564 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3565 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3566 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3567 iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3568 case IEMMODE_32BIT:
3569 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3570 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3571 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3572 iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3573 case IEMMODE_64BIT:
3574 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3575 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
3576 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
3577 iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3578 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3579 }
3580 break;
3581 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3582 }
3583 }
3584 else
3585 {
3586 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
3587 switch (pVCpu->iem.s.enmEffOpSize)
3588 {
3589 case IEMMODE_16BIT:
3590 switch (pVCpu->iem.s.enmEffAddrMode)
3591 {
3592 case IEMMODE_16BIT:
3593 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3594 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3595 iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
3596 case IEMMODE_32BIT:
3597 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3598 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3599 iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
3600 case IEMMODE_64BIT:
3601 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3602 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3603 iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
3604 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3605 }
3606 break;
3607 case IEMMODE_64BIT:
3608 case IEMMODE_32BIT:
3609 switch (pVCpu->iem.s.enmEffAddrMode)
3610 {
3611 case IEMMODE_16BIT:
3612 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3613 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3614 iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
3615 case IEMMODE_32BIT:
3616 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3617 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3618 iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
3619 case IEMMODE_64BIT:
3620 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
3621 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI),
3622 iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
3623 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3624 }
3625 break;
3626 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3627 }
3628 }
3629}
3630
3631
3632/**
3633 * @opcode 0x70
3634 */
3635FNIEMOP_DEF(iemOp_jo_Jb)
3636{
3637 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
3638 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3639 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3640
3641 IEM_MC_BEGIN(0, 0, 0, 0);
3642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3643 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3644 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3645 } IEM_MC_ELSE() {
3646 IEM_MC_ADVANCE_RIP_AND_FINISH();
3647 } IEM_MC_ENDIF();
3648 IEM_MC_END();
3649}
3650
3651
3652/**
3653 * @opcode 0x71
3654 */
3655FNIEMOP_DEF(iemOp_jno_Jb)
3656{
3657 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
3658 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3659 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3660
3661 IEM_MC_BEGIN(0, 0, 0, 0);
3662 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3663 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
3664 IEM_MC_ADVANCE_RIP_AND_FINISH();
3665 } IEM_MC_ELSE() {
3666 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3667 } IEM_MC_ENDIF();
3668 IEM_MC_END();
3669}
3670
3671/**
3672 * @opcode 0x72
3673 */
3674FNIEMOP_DEF(iemOp_jc_Jb)
3675{
3676 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
3677 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3678 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3679
3680 IEM_MC_BEGIN(0, 0, 0, 0);
3681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3682 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3683 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3684 } IEM_MC_ELSE() {
3685 IEM_MC_ADVANCE_RIP_AND_FINISH();
3686 } IEM_MC_ENDIF();
3687 IEM_MC_END();
3688}
3689
3690
3691/**
3692 * @opcode 0x73
3693 */
3694FNIEMOP_DEF(iemOp_jnc_Jb)
3695{
3696 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
3697 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3698 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3699
3700 IEM_MC_BEGIN(0, 0, 0, 0);
3701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3702 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
3703 IEM_MC_ADVANCE_RIP_AND_FINISH();
3704 } IEM_MC_ELSE() {
3705 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3706 } IEM_MC_ENDIF();
3707 IEM_MC_END();
3708}
3709
3710
3711/**
3712 * @opcode 0x74
3713 */
3714FNIEMOP_DEF(iemOp_je_Jb)
3715{
3716 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
3717 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3718 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3719
3720 IEM_MC_BEGIN(0, 0, 0, 0);
3721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3722 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3723 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3724 } IEM_MC_ELSE() {
3725 IEM_MC_ADVANCE_RIP_AND_FINISH();
3726 } IEM_MC_ENDIF();
3727 IEM_MC_END();
3728}
3729
3730
3731/**
3732 * @opcode 0x75
3733 */
3734FNIEMOP_DEF(iemOp_jne_Jb)
3735{
3736 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
3737 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3738 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3739
3740 IEM_MC_BEGIN(0, 0, 0, 0);
3741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3742 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
3743 IEM_MC_ADVANCE_RIP_AND_FINISH();
3744 } IEM_MC_ELSE() {
3745 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3746 } IEM_MC_ENDIF();
3747 IEM_MC_END();
3748}
3749
3750
3751/**
3752 * @opcode 0x76
3753 */
3754FNIEMOP_DEF(iemOp_jbe_Jb)
3755{
3756 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
3757 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3758 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3759
3760 IEM_MC_BEGIN(0, 0, 0, 0);
3761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3762 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3763 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3764 } IEM_MC_ELSE() {
3765 IEM_MC_ADVANCE_RIP_AND_FINISH();
3766 } IEM_MC_ENDIF();
3767 IEM_MC_END();
3768}
3769
3770
3771/**
3772 * @opcode 0x77
3773 */
3774FNIEMOP_DEF(iemOp_jnbe_Jb)
3775{
3776 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
3777 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3778 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3779
3780 IEM_MC_BEGIN(0, 0, 0, 0);
3781 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3782 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
3783 IEM_MC_ADVANCE_RIP_AND_FINISH();
3784 } IEM_MC_ELSE() {
3785 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3786 } IEM_MC_ENDIF();
3787 IEM_MC_END();
3788}
3789
3790
3791/**
3792 * @opcode 0x78
3793 */
3794FNIEMOP_DEF(iemOp_js_Jb)
3795{
3796 IEMOP_MNEMONIC(js_Jb, "js Jb");
3797 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3798 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3799
3800 IEM_MC_BEGIN(0, 0, 0, 0);
3801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3802 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3803 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3804 } IEM_MC_ELSE() {
3805 IEM_MC_ADVANCE_RIP_AND_FINISH();
3806 } IEM_MC_ENDIF();
3807 IEM_MC_END();
3808}
3809
3810
3811/**
3812 * @opcode 0x79
3813 */
3814FNIEMOP_DEF(iemOp_jns_Jb)
3815{
3816 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
3817 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3818 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3819
3820 IEM_MC_BEGIN(0, 0, 0, 0);
3821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3822 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
3823 IEM_MC_ADVANCE_RIP_AND_FINISH();
3824 } IEM_MC_ELSE() {
3825 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3826 } IEM_MC_ENDIF();
3827 IEM_MC_END();
3828}
3829
3830
3831/**
3832 * @opcode 0x7a
3833 */
3834FNIEMOP_DEF(iemOp_jp_Jb)
3835{
3836 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
3837 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3838 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3839
3840 IEM_MC_BEGIN(0, 0, 0, 0);
3841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3842 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3843 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3844 } IEM_MC_ELSE() {
3845 IEM_MC_ADVANCE_RIP_AND_FINISH();
3846 } IEM_MC_ENDIF();
3847 IEM_MC_END();
3848}
3849
3850
3851/**
3852 * @opcode 0x7b
3853 */
3854FNIEMOP_DEF(iemOp_jnp_Jb)
3855{
3856 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
3857 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3858 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3859
3860 IEM_MC_BEGIN(0, 0, 0, 0);
3861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3862 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3863 IEM_MC_ADVANCE_RIP_AND_FINISH();
3864 } IEM_MC_ELSE() {
3865 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3866 } IEM_MC_ENDIF();
3867 IEM_MC_END();
3868}
3869
3870
3871/**
3872 * @opcode 0x7c
3873 */
3874FNIEMOP_DEF(iemOp_jl_Jb)
3875{
3876 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
3877 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3878 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3879
3880 IEM_MC_BEGIN(0, 0, 0, 0);
3881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3882 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3883 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3884 } IEM_MC_ELSE() {
3885 IEM_MC_ADVANCE_RIP_AND_FINISH();
3886 } IEM_MC_ENDIF();
3887 IEM_MC_END();
3888}
3889
3890
3891/**
3892 * @opcode 0x7d
3893 */
3894FNIEMOP_DEF(iemOp_jnl_Jb)
3895{
3896 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
3897 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3898 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3899
3900 IEM_MC_BEGIN(0, 0, 0, 0);
3901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3902 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3903 IEM_MC_ADVANCE_RIP_AND_FINISH();
3904 } IEM_MC_ELSE() {
3905 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3906 } IEM_MC_ENDIF();
3907 IEM_MC_END();
3908}
3909
3910
3911/**
3912 * @opcode 0x7e
3913 */
3914FNIEMOP_DEF(iemOp_jle_Jb)
3915{
3916 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
3917 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3918 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3919
3920 IEM_MC_BEGIN(0, 0, 0, 0);
3921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3922 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3923 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3924 } IEM_MC_ELSE() {
3925 IEM_MC_ADVANCE_RIP_AND_FINISH();
3926 } IEM_MC_ENDIF();
3927 IEM_MC_END();
3928}
3929
3930
3931/**
3932 * @opcode 0x7f
3933 */
3934FNIEMOP_DEF(iemOp_jnle_Jb)
3935{
3936 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
3937 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3938 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
3939
3940 IEM_MC_BEGIN(0, 0, 0, 0);
3941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3942 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3943 IEM_MC_ADVANCE_RIP_AND_FINISH();
3944 } IEM_MC_ELSE() {
3945 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
3946 } IEM_MC_ENDIF();
3947 IEM_MC_END();
3948}
3949
3950
3951/**
3952 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
3953 * iemOp_Grp1_Eb_Ib_80.
3954 */
3955#define IEMOP_BODY_BINARY_Eb_Ib_RW(a_fnNormalU8) \
3956 if (IEM_IS_MODRM_REG_MODE(bRm)) \
3957 { \
3958 /* register target */ \
3959 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3960 IEM_MC_BEGIN(3, 0, 0, 0); \
3961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3962 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3963 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3964 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
3965 \
3966 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
3967 IEM_MC_REF_EFLAGS(pEFlags); \
3968 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3969 \
3970 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3971 IEM_MC_END(); \
3972 } \
3973 else \
3974 { \
3975 /* memory target */ \
3976 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
3977 { \
3978 IEM_MC_BEGIN(3, 3, 0, 0); \
3979 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
3980 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
3981 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
3982 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
3983 \
3984 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
3985 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
3986 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
3987 IEMOP_HLP_DONE_DECODING(); \
3988 \
3989 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
3990 IEM_MC_FETCH_EFLAGS(EFlags); \
3991 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
3992 \
3993 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
3994 IEM_MC_COMMIT_EFLAGS(EFlags); \
3995 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
3996 IEM_MC_END(); \
3997 } \
3998 else \
3999 { \
4000 (void)0
4001
4002#define IEMOP_BODY_BINARY_Eb_Ib_LOCKED(a_fnLockedU8) \
4003 IEM_MC_BEGIN(3, 3, 0, 0); \
4004 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
4005 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4006 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4007 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4008 \
4009 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4010 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4011 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4012 IEMOP_HLP_DONE_DECODING(); \
4013 \
4014 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4015 IEM_MC_FETCH_EFLAGS(EFlags); \
4016 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \
4017 \
4018 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4019 IEM_MC_COMMIT_EFLAGS(EFlags); \
4020 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4021 IEM_MC_END(); \
4022 } \
4023 } \
4024 (void)0
4025
4026#define IEMOP_BODY_BINARY_Eb_Ib_RO(a_fnNormalU8) \
4027 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4028 { \
4029 /* register target */ \
4030 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4031 IEM_MC_BEGIN(3, 0, 0, 0); \
4032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4033 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
4034 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4035 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4036 \
4037 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4038 IEM_MC_REF_EFLAGS(pEFlags); \
4039 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
4040 \
4041 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4042 IEM_MC_END(); \
4043 } \
4044 else \
4045 { \
4046 /* memory target */ \
4047 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4048 { \
4049 IEM_MC_BEGIN(3, 3, 0, 0); \
4050 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); \
4051 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4052 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4053 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4054 \
4055 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4056 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4057 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1); \
4058 IEMOP_HLP_DONE_DECODING(); \
4059 \
4060 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4061 IEM_MC_FETCH_EFLAGS(EFlags); \
4062 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \
4063 \
4064 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4065 IEM_MC_COMMIT_EFLAGS(EFlags); \
4066 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4067 IEM_MC_END(); \
4068 } \
4069 else \
4070 { \
4071 (void)0
4072
4073#define IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK() \
4074 IEMOP_HLP_DONE_DECODING(); \
4075 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4076 } \
4077 } \
4078 (void)0
4079
4080
4081
4082/**
4083 * @opmaps grp1_80,grp1_83
4084 * @opcode /0
4085 */
4086FNIEMOP_DEF_1(iemOp_Grp1_add_Eb_Ib, uint8_t, bRm)
4087{
4088 IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib");
4089 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_add_u8);
4090 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_add_u8_locked);
4091}
4092
4093
4094/**
4095 * @opmaps grp1_80,grp1_83
4096 * @opcode /1
4097 */
4098FNIEMOP_DEF_1(iemOp_Grp1_or_Eb_Ib, uint8_t, bRm)
4099{
4100 IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib");
4101 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_or_u8);
4102 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_or_u8_locked);
4103}
4104
4105
4106/**
4107 * @opmaps grp1_80,grp1_83
4108 * @opcode /2
4109 */
4110FNIEMOP_DEF_1(iemOp_Grp1_adc_Eb_Ib, uint8_t, bRm)
4111{
4112 IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib");
4113 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_adc_u8);
4114 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_adc_u8_locked);
4115}
4116
4117
4118/**
4119 * @opmaps grp1_80,grp1_83
4120 * @opcode /3
4121 */
4122FNIEMOP_DEF_1(iemOp_Grp1_sbb_Eb_Ib, uint8_t, bRm)
4123{
4124 IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib");
4125 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sbb_u8);
4126 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sbb_u8_locked);
4127}
4128
4129
4130/**
4131 * @opmaps grp1_80,grp1_83
4132 * @opcode /4
4133 */
4134FNIEMOP_DEF_1(iemOp_Grp1_and_Eb_Ib, uint8_t, bRm)
4135{
4136 IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib");
4137 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_and_u8);
4138 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_and_u8_locked);
4139}
4140
4141
4142/**
4143 * @opmaps grp1_80,grp1_83
4144 * @opcode /5
4145 */
4146FNIEMOP_DEF_1(iemOp_Grp1_sub_Eb_Ib, uint8_t, bRm)
4147{
4148 IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib");
4149 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_sub_u8);
4150 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_sub_u8_locked);
4151}
4152
4153
4154/**
4155 * @opmaps grp1_80,grp1_83
4156 * @opcode /6
4157 */
4158FNIEMOP_DEF_1(iemOp_Grp1_xor_Eb_Ib, uint8_t, bRm)
4159{
4160 IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib");
4161 IEMOP_BODY_BINARY_Eb_Ib_RW( iemAImpl_xor_u8);
4162 IEMOP_BODY_BINARY_Eb_Ib_LOCKED(iemAImpl_xor_u8_locked);
4163}
4164
4165
4166/**
4167 * @opmaps grp1_80,grp1_83
4168 * @opcode /7
4169 */
4170FNIEMOP_DEF_1(iemOp_Grp1_cmp_Eb_Ib, uint8_t, bRm)
4171{
4172 IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib");
4173 IEMOP_BODY_BINARY_Eb_Ib_RO(iemAImpl_cmp_u8);
4174 IEMOP_BODY_BINARY_Eb_Ib_NO_LOCK();
4175}
4176
4177
4178/**
4179 * @opcode 0x80
4180 */
4181FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
4182{
4183 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4184 switch (IEM_GET_MODRM_REG_8(bRm))
4185 {
4186 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Eb_Ib, bRm);
4187 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Eb_Ib, bRm);
4188 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Eb_Ib, bRm);
4189 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Eb_Ib, bRm);
4190 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Eb_Ib, bRm);
4191 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Eb_Ib, bRm);
4192 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Eb_Ib, bRm);
4193 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Eb_Ib, bRm);
4194 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4195 }
4196}
4197
4198
4199/**
4200 * Body for a group 1 binary operator.
4201 */
4202#define IEMOP_BODY_BINARY_Ev_Iz_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4203 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4204 { \
4205 /* register target */ \
4206 switch (pVCpu->iem.s.enmEffOpSize) \
4207 { \
4208 case IEMMODE_16BIT: \
4209 { \
4210 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4211 IEM_MC_BEGIN(3, 0, 0, 0); \
4212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4213 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4214 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4215 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4216 \
4217 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4218 IEM_MC_REF_EFLAGS(pEFlags); \
4219 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4220 \
4221 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4222 IEM_MC_END(); \
4223 break; \
4224 } \
4225 \
4226 case IEMMODE_32BIT: \
4227 { \
4228 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4229 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4231 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4232 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4233 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4234 \
4235 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4236 IEM_MC_REF_EFLAGS(pEFlags); \
4237 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4238 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
4239 \
4240 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4241 IEM_MC_END(); \
4242 break; \
4243 } \
4244 \
4245 case IEMMODE_64BIT: \
4246 { \
4247 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4248 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4249 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4250 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4251 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4252 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4253 \
4254 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4255 IEM_MC_REF_EFLAGS(pEFlags); \
4256 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4257 \
4258 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4259 IEM_MC_END(); \
4260 break; \
4261 } \
4262 \
4263 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4264 } \
4265 } \
4266 else \
4267 { \
4268 /* memory target */ \
4269 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4270 { \
4271 switch (pVCpu->iem.s.enmEffOpSize) \
4272 { \
4273 case IEMMODE_16BIT: \
4274 { \
4275 IEM_MC_BEGIN(3, 3, 0, 0); \
4276 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4277 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4278 \
4279 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4280 IEMOP_HLP_DONE_DECODING(); \
4281 \
4282 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4283 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4284 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4285 \
4286 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4287 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4288 IEM_MC_FETCH_EFLAGS(EFlags); \
4289 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4290 \
4291 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4292 IEM_MC_COMMIT_EFLAGS(EFlags); \
4293 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4294 IEM_MC_END(); \
4295 break; \
4296 } \
4297 \
4298 case IEMMODE_32BIT: \
4299 { \
4300 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4301 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4302 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4303 \
4304 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4305 IEMOP_HLP_DONE_DECODING(); \
4306 \
4307 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4308 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4309 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4310 \
4311 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4312 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4313 IEM_MC_FETCH_EFLAGS(EFlags); \
4314 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4315 \
4316 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4317 IEM_MC_COMMIT_EFLAGS(EFlags); \
4318 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4319 IEM_MC_END(); \
4320 break; \
4321 } \
4322 \
4323 case IEMMODE_64BIT: \
4324 { \
4325 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4326 \
4327 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4329 \
4330 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4331 IEMOP_HLP_DONE_DECODING(); \
4332 \
4333 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4334 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4335 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4336 \
4337 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4338 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4339 IEM_MC_FETCH_EFLAGS(EFlags); \
4340 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4341 \
4342 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4343 IEM_MC_COMMIT_EFLAGS(EFlags); \
4344 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4345 IEM_MC_END(); \
4346 break; \
4347 } \
4348 \
4349 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4350 } \
4351 } \
4352 else \
4353 { \
4354 (void)0
4355/* This must be a separate macro due to parsing restrictions in IEMAllInstPython.py. */
4356#define IEMOP_BODY_BINARY_Ev_Iz_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4357 switch (pVCpu->iem.s.enmEffOpSize) \
4358 { \
4359 case IEMMODE_16BIT: \
4360 { \
4361 IEM_MC_BEGIN(3, 3, 0, 0); \
4362 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4363 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4364 \
4365 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4366 IEMOP_HLP_DONE_DECODING(); \
4367 \
4368 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4369 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4370 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4371 \
4372 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4373 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4374 IEM_MC_FETCH_EFLAGS(EFlags); \
4375 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4376 \
4377 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4378 IEM_MC_COMMIT_EFLAGS(EFlags); \
4379 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4380 IEM_MC_END(); \
4381 break; \
4382 } \
4383 \
4384 case IEMMODE_32BIT: \
4385 { \
4386 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4387 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4388 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4389 \
4390 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4391 IEMOP_HLP_DONE_DECODING(); \
4392 \
4393 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4394 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4395 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4396 \
4397 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4398 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4399 IEM_MC_FETCH_EFLAGS(EFlags); \
4400 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4401 \
4402 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4403 IEM_MC_COMMIT_EFLAGS(EFlags); \
4404 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4405 IEM_MC_END(); \
4406 break; \
4407 } \
4408 \
4409 case IEMMODE_64BIT: \
4410 { \
4411 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4412 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4413 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4414 \
4415 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4416 IEMOP_HLP_DONE_DECODING(); \
4417 \
4418 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4419 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4420 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4421 \
4422 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4423 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4424 IEM_MC_FETCH_EFLAGS(EFlags); \
4425 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4426 \
4427 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4428 IEM_MC_COMMIT_EFLAGS(EFlags); \
4429 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4430 IEM_MC_END(); \
4431 break; \
4432 } \
4433 \
4434 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4435 } \
4436 } \
4437 } \
4438 (void)0
4439
4440/* read-only version */
4441#define IEMOP_BODY_BINARY_Ev_Iz_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4442 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4443 { \
4444 /* register target */ \
4445 switch (pVCpu->iem.s.enmEffOpSize) \
4446 { \
4447 case IEMMODE_16BIT: \
4448 { \
4449 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4450 IEM_MC_BEGIN(3, 0, 0, 0); \
4451 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4452 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4453 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1); \
4454 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4455 \
4456 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4457 IEM_MC_REF_EFLAGS(pEFlags); \
4458 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4459 \
4460 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4461 IEM_MC_END(); \
4462 break; \
4463 } \
4464 \
4465 case IEMMODE_32BIT: \
4466 { \
4467 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4468 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4470 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4471 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1); \
4472 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4473 \
4474 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4475 IEM_MC_REF_EFLAGS(pEFlags); \
4476 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4477 \
4478 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4479 IEM_MC_END(); \
4480 break; \
4481 } \
4482 \
4483 case IEMMODE_64BIT: \
4484 { \
4485 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4486 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4488 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4489 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1); \
4490 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4491 \
4492 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4493 IEM_MC_REF_EFLAGS(pEFlags); \
4494 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4495 \
4496 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4497 IEM_MC_END(); \
4498 break; \
4499 } \
4500 \
4501 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4502 } \
4503 } \
4504 else \
4505 { \
4506 /* memory target */ \
4507 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4508 { \
4509 switch (pVCpu->iem.s.enmEffOpSize) \
4510 { \
4511 case IEMMODE_16BIT: \
4512 { \
4513 IEM_MC_BEGIN(3, 3, 0, 0); \
4514 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4515 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); \
4516 \
4517 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \
4518 IEMOP_HLP_DONE_DECODING(); \
4519 \
4520 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4521 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
4522 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4523 \
4524 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); \
4525 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4526 IEM_MC_FETCH_EFLAGS(EFlags); \
4527 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4528 \
4529 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4530 IEM_MC_COMMIT_EFLAGS(EFlags); \
4531 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4532 IEM_MC_END(); \
4533 break; \
4534 } \
4535 \
4536 case IEMMODE_32BIT: \
4537 { \
4538 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4539 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4540 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4541 \
4542 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \
4543 IEMOP_HLP_DONE_DECODING(); \
4544 \
4545 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4546 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
4547 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4548 \
4549 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); \
4550 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4551 IEM_MC_FETCH_EFLAGS(EFlags); \
4552 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4553 \
4554 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4555 IEM_MC_COMMIT_EFLAGS(EFlags); \
4556 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4557 IEM_MC_END(); \
4558 break; \
4559 } \
4560 \
4561 case IEMMODE_64BIT: \
4562 { \
4563 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4564 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4565 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); \
4566 \
4567 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \
4568 IEMOP_HLP_DONE_DECODING(); \
4569 \
4570 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4571 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
4572 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4573 \
4574 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); \
4575 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4576 IEM_MC_FETCH_EFLAGS(EFlags); \
4577 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4578 \
4579 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
4580 IEM_MC_COMMIT_EFLAGS(EFlags); \
4581 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4582 IEM_MC_END(); \
4583 break; \
4584 } \
4585 \
4586 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4587 } \
4588 } \
4589 else \
4590 { \
4591 IEMOP_HLP_DONE_DECODING(); \
4592 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
4593 } \
4594 } \
4595 (void)0
4596
4597
4598/**
4599 * @opmaps grp1_81
4600 * @opcode /0
4601 */
4602FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Iz, uint8_t, bRm)
4603{
4604 IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz");
4605 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
4606 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
4607}
4608
4609
4610/**
4611 * @opmaps grp1_81
4612 * @opcode /1
4613 */
4614FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Iz, uint8_t, bRm)
4615{
4616 IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz");
4617 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
4618 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
4619}
4620
4621
4622/**
4623 * @opmaps grp1_81
4624 * @opcode /2
4625 */
4626FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Iz, uint8_t, bRm)
4627{
4628 IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz");
4629 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
4630 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
4631}
4632
4633
4634/**
4635 * @opmaps grp1_81
4636 * @opcode /3
4637 */
4638FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Iz, uint8_t, bRm)
4639{
4640 IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz");
4641 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
4642 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
4643}
4644
4645
4646/**
4647 * @opmaps grp1_81
4648 * @opcode /4
4649 */
4650FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Iz, uint8_t, bRm)
4651{
4652 IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz");
4653 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
4654 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
4655}
4656
4657
4658/**
4659 * @opmaps grp1_81
4660 * @opcode /5
4661 */
4662FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Iz, uint8_t, bRm)
4663{
4664 IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz");
4665 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
4666 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
4667}
4668
4669
4670/**
4671 * @opmaps grp1_81
4672 * @opcode /6
4673 */
4674FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Iz, uint8_t, bRm)
4675{
4676 IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz");
4677 IEMOP_BODY_BINARY_Ev_Iz_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
4678 IEMOP_BODY_BINARY_Ev_Iz_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
4679}
4680
4681
4682/**
4683 * @opmaps grp1_81
4684 * @opcode /7
4685 */
4686FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Iz, uint8_t, bRm)
4687{
4688 IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz");
4689 IEMOP_BODY_BINARY_Ev_Iz_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
4690}
4691
4692
4693/**
4694 * @opcode 0x81
4695 */
4696FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
4697{
4698 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4699 switch (IEM_GET_MODRM_REG_8(bRm))
4700 {
4701 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Iz, bRm);
4702 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Iz, bRm);
4703 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Iz, bRm);
4704 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Iz, bRm);
4705 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Iz, bRm);
4706 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Iz, bRm);
4707 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Iz, bRm);
4708 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Iz, bRm);
4709 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4710 }
4711}
4712
4713
4714/**
4715 * @opcode 0x82
4716 * @opmnemonic grp1_82
4717 * @opgroup og_groups
4718 */
4719FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
4720{
4721 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
4722 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
4723}
4724
4725
4726/**
4727 * Body for group 1 instruction (binary) w/ byte imm operand, dispatched via
4728 * iemOp_Grp1_Ev_Ib.
4729 */
4730#define IEMOP_BODY_BINARY_Ev_Ib_RW(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4731 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4732 { \
4733 /* \
4734 * Register target \
4735 */ \
4736 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4737 switch (pVCpu->iem.s.enmEffOpSize) \
4738 { \
4739 case IEMMODE_16BIT: \
4740 IEM_MC_BEGIN(3, 0, 0, 0); \
4741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4742 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4743 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4744 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4745 \
4746 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4747 IEM_MC_REF_EFLAGS(pEFlags); \
4748 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4749 \
4750 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4751 IEM_MC_END(); \
4752 break; \
4753 \
4754 case IEMMODE_32BIT: \
4755 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4756 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4757 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4758 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4759 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4760 \
4761 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4762 IEM_MC_REF_EFLAGS(pEFlags); \
4763 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4764 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
4765 \
4766 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4767 IEM_MC_END(); \
4768 break; \
4769 \
4770 case IEMMODE_64BIT: \
4771 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4773 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4774 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4775 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4776 \
4777 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4778 IEM_MC_REF_EFLAGS(pEFlags); \
4779 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4780 \
4781 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4782 IEM_MC_END(); \
4783 break; \
4784 \
4785 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4786 } \
4787 } \
4788 else \
4789 { \
4790 /* \
4791 * Memory target. \
4792 */ \
4793 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
4794 { \
4795 switch (pVCpu->iem.s.enmEffOpSize) \
4796 { \
4797 case IEMMODE_16BIT: \
4798 IEM_MC_BEGIN(3, 3, 0, 0); \
4799 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4800 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4801 \
4802 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4803 IEMOP_HLP_DONE_DECODING(); \
4804 \
4805 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4806 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4807 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4808 \
4809 IEM_MC_ARG_CONST(uint16_t, u16Src, (int16_t)(int8_t)u8Imm, 1); \
4810 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4811 IEM_MC_FETCH_EFLAGS(EFlags); \
4812 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4813 \
4814 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4815 IEM_MC_COMMIT_EFLAGS(EFlags); \
4816 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4817 IEM_MC_END(); \
4818 break; \
4819 \
4820 case IEMMODE_32BIT: \
4821 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4822 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4823 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4824 \
4825 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4826 IEMOP_HLP_DONE_DECODING(); \
4827 \
4828 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4829 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4830 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4831 \
4832 IEM_MC_ARG_CONST(uint32_t, u32Src, (int32_t)(int8_t)u8Imm, 1); \
4833 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4834 IEM_MC_FETCH_EFLAGS(EFlags); \
4835 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4836 \
4837 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4838 IEM_MC_COMMIT_EFLAGS(EFlags); \
4839 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4840 IEM_MC_END(); \
4841 break; \
4842 \
4843 case IEMMODE_64BIT: \
4844 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4845 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4846 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4847 \
4848 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4849 IEMOP_HLP_DONE_DECODING(); \
4850 \
4851 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4852 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4853 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4854 \
4855 IEM_MC_ARG_CONST(uint64_t, u64Src, (int64_t)(int8_t)u8Imm, 1); \
4856 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4857 IEM_MC_FETCH_EFLAGS(EFlags); \
4858 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
4859 \
4860 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4861 IEM_MC_COMMIT_EFLAGS(EFlags); \
4862 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4863 IEM_MC_END(); \
4864 break; \
4865 \
4866 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4867 } \
4868 } \
4869 else \
4870 { \
4871 (void)0
4872/* Separate macro to work around parsing issue in IEMAllInstPython.py */
4873#define IEMOP_BODY_BINARY_Ev_Ib_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
4874 switch (pVCpu->iem.s.enmEffOpSize) \
4875 { \
4876 case IEMMODE_16BIT: \
4877 IEM_MC_BEGIN(3, 3, 0, 0); \
4878 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4879 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4880 \
4881 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4882 IEMOP_HLP_DONE_DECODING(); \
4883 \
4884 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4885 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4886 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4887 \
4888 IEM_MC_ARG_CONST(uint16_t, u16Src, (int16_t)(int8_t)u8Imm, 1); \
4889 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4890 IEM_MC_FETCH_EFLAGS(EFlags); \
4891 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \
4892 \
4893 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4894 IEM_MC_COMMIT_EFLAGS(EFlags); \
4895 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4896 IEM_MC_END(); \
4897 break; \
4898 \
4899 case IEMMODE_32BIT: \
4900 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
4901 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4902 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4903 \
4904 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4905 IEMOP_HLP_DONE_DECODING(); \
4906 \
4907 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4908 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4909 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4910 \
4911 IEM_MC_ARG_CONST(uint32_t, u32Src, (int32_t)(int8_t)u8Imm, 1); \
4912 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4913 IEM_MC_FETCH_EFLAGS(EFlags); \
4914 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \
4915 \
4916 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4917 IEM_MC_COMMIT_EFLAGS(EFlags); \
4918 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4919 IEM_MC_END(); \
4920 break; \
4921 \
4922 case IEMMODE_64BIT: \
4923 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
4924 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
4925 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
4926 \
4927 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4928 IEMOP_HLP_DONE_DECODING(); \
4929 \
4930 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
4931 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4932 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
4933 \
4934 IEM_MC_ARG_CONST(uint64_t, u64Src, (int64_t)(int8_t)u8Imm, 1); \
4935 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
4936 IEM_MC_FETCH_EFLAGS(EFlags); \
4937 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \
4938 \
4939 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
4940 IEM_MC_COMMIT_EFLAGS(EFlags); \
4941 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4942 IEM_MC_END(); \
4943 break; \
4944 \
4945 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4946 } \
4947 } \
4948 } \
4949 (void)0
4950
4951/* read-only variant */
4952#define IEMOP_BODY_BINARY_Ev_Ib_RO(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
4953 if (IEM_IS_MODRM_REG_MODE(bRm)) \
4954 { \
4955 /* \
4956 * Register target \
4957 */ \
4958 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
4959 switch (pVCpu->iem.s.enmEffOpSize) \
4960 { \
4961 case IEMMODE_16BIT: \
4962 IEM_MC_BEGIN(3, 0, 0, 0); \
4963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4964 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
4965 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1); \
4966 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4967 \
4968 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4969 IEM_MC_REF_EFLAGS(pEFlags); \
4970 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
4971 \
4972 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4973 IEM_MC_END(); \
4974 break; \
4975 \
4976 case IEMMODE_32BIT: \
4977 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); \
4978 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4979 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
4980 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1); \
4981 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4982 \
4983 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4984 IEM_MC_REF_EFLAGS(pEFlags); \
4985 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
4986 \
4987 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
4988 IEM_MC_END(); \
4989 break; \
4990 \
4991 case IEMMODE_64BIT: \
4992 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); \
4993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4994 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
4995 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1); \
4996 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4997 \
4998 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
4999 IEM_MC_REF_EFLAGS(pEFlags); \
5000 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
5001 \
5002 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5003 IEM_MC_END(); \
5004 break; \
5005 \
5006 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5007 } \
5008 } \
5009 else \
5010 { \
5011 /* \
5012 * Memory target. \
5013 */ \
5014 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
5015 { \
5016 switch (pVCpu->iem.s.enmEffOpSize) \
5017 { \
5018 case IEMMODE_16BIT: \
5019 IEM_MC_BEGIN(3, 3, 0, 0); \
5020 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5021 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5022 \
5023 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5024 IEMOP_HLP_DONE_DECODING(); \
5025 \
5026 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5027 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); \
5028 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5029 \
5030 IEM_MC_ARG_CONST(uint16_t, u16Src, (int16_t)(int8_t)u8Imm, 1); \
5031 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5032 IEM_MC_FETCH_EFLAGS(EFlags); \
5033 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \
5034 \
5035 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5036 IEM_MC_COMMIT_EFLAGS(EFlags); \
5037 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5038 IEM_MC_END(); \
5039 break; \
5040 \
5041 case IEMMODE_32BIT: \
5042 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); \
5043 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5044 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5045 \
5046 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5047 IEMOP_HLP_DONE_DECODING(); \
5048 \
5049 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5050 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); \
5051 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5052 \
5053 IEM_MC_ARG_CONST(uint32_t, u32Src, (int32_t)(int8_t)u8Imm, 1); \
5054 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5055 IEM_MC_FETCH_EFLAGS(EFlags); \
5056 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \
5057 \
5058 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5059 IEM_MC_COMMIT_EFLAGS(EFlags); \
5060 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5061 IEM_MC_END(); \
5062 break; \
5063 \
5064 case IEMMODE_64BIT: \
5065 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); \
5066 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5067 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); \
5068 \
5069 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \
5070 IEMOP_HLP_DONE_DECODING(); \
5071 \
5072 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5073 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); \
5074 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5075 \
5076 IEM_MC_ARG_CONST(uint64_t, u64Src, (int64_t)(int8_t)u8Imm, 1); \
5077 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); \
5078 IEM_MC_FETCH_EFLAGS(EFlags); \
5079 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \
5080 \
5081 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); \
5082 IEM_MC_COMMIT_EFLAGS(EFlags); \
5083 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5084 IEM_MC_END(); \
5085 break; \
5086 \
5087 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5088 } \
5089 } \
5090 else \
5091 { \
5092 IEMOP_HLP_DONE_DECODING(); \
5093 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
5094 } \
5095 } \
5096 (void)0
5097
5098/**
5099 * @opmaps grp1_83
5100 * @opcode /0
5101 */
5102FNIEMOP_DEF_1(iemOp_Grp1_add_Ev_Ib, uint8_t, bRm)
5103{
5104 IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib");
5105 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64);
5106 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked);
5107}
5108
5109
5110/**
5111 * @opmaps grp1_83
5112 * @opcode /1
5113 */
5114FNIEMOP_DEF_1(iemOp_Grp1_or_Ev_Ib, uint8_t, bRm)
5115{
5116 IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib");
5117 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64);
5118 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked);
5119}
5120
5121
5122/**
5123 * @opmaps grp1_83
5124 * @opcode /2
5125 */
5126FNIEMOP_DEF_1(iemOp_Grp1_adc_Ev_Ib, uint8_t, bRm)
5127{
5128 IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib");
5129 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64);
5130 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked);
5131}
5132
5133
5134/**
5135 * @opmaps grp1_83
5136 * @opcode /3
5137 */
5138FNIEMOP_DEF_1(iemOp_Grp1_sbb_Ev_Ib, uint8_t, bRm)
5139{
5140 IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib");
5141 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64);
5142 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked);
5143}
5144
5145
5146/**
5147 * @opmaps grp1_83
5148 * @opcode /4
5149 */
5150FNIEMOP_DEF_1(iemOp_Grp1_and_Ev_Ib, uint8_t, bRm)
5151{
5152 IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib");
5153 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64);
5154 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked);
5155}
5156
5157
5158/**
5159 * @opmaps grp1_83
5160 * @opcode /5
5161 */
5162FNIEMOP_DEF_1(iemOp_Grp1_sub_Ev_Ib, uint8_t, bRm)
5163{
5164 IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib");
5165 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64);
5166 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked);
5167}
5168
5169
5170/**
5171 * @opmaps grp1_83
5172 * @opcode /6
5173 */
5174FNIEMOP_DEF_1(iemOp_Grp1_xor_Ev_Ib, uint8_t, bRm)
5175{
5176 IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib");
5177 IEMOP_BODY_BINARY_Ev_Ib_RW( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64);
5178 IEMOP_BODY_BINARY_Ev_Ib_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked);
5179}
5180
5181
5182/**
5183 * @opmaps grp1_83
5184 * @opcode /7
5185 */
5186FNIEMOP_DEF_1(iemOp_Grp1_cmp_Ev_Ib, uint8_t, bRm)
5187{
5188 IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib");
5189 IEMOP_BODY_BINARY_Ev_Ib_RO(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64);
5190}
5191
5192
5193/**
5194 * @opcode 0x83
5195 */
5196FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
5197{
5198 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
5199 to the 386 even if absent in the intel reference manuals and some
5200 3rd party opcode listings. */
5201 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5202 switch (IEM_GET_MODRM_REG_8(bRm))
5203 {
5204 case 0: return FNIEMOP_CALL_1(iemOp_Grp1_add_Ev_Ib, bRm);
5205 case 1: return FNIEMOP_CALL_1(iemOp_Grp1_or_Ev_Ib, bRm);
5206 case 2: return FNIEMOP_CALL_1(iemOp_Grp1_adc_Ev_Ib, bRm);
5207 case 3: return FNIEMOP_CALL_1(iemOp_Grp1_sbb_Ev_Ib, bRm);
5208 case 4: return FNIEMOP_CALL_1(iemOp_Grp1_and_Ev_Ib, bRm);
5209 case 5: return FNIEMOP_CALL_1(iemOp_Grp1_sub_Ev_Ib, bRm);
5210 case 6: return FNIEMOP_CALL_1(iemOp_Grp1_xor_Ev_Ib, bRm);
5211 case 7: return FNIEMOP_CALL_1(iemOp_Grp1_cmp_Ev_Ib, bRm);
5212 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5213 }
5214}
5215
5216
5217/**
5218 * @opcode 0x84
5219 */
5220FNIEMOP_DEF(iemOp_test_Eb_Gb)
5221{
5222 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
5223 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5224 IEMOP_BODY_BINARY_rm_r8_RO(iemAImpl_test_u8);
5225 IEMOP_BODY_BINARY_rm_r8_NO_LOCK();
5226}
5227
5228
5229/**
5230 * @opcode 0x85
5231 */
5232FNIEMOP_DEF(iemOp_test_Ev_Gv)
5233{
5234 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
5235 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5236 IEMOP_BODY_BINARY_rm_rv_RO(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64);
5237}
5238
5239
5240/**
5241 * @opcode 0x86
5242 */
5243FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
5244{
5245 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5246 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
5247
5248 /*
5249 * If rm is denoting a register, no more instruction bytes.
5250 */
5251 if (IEM_IS_MODRM_REG_MODE(bRm))
5252 {
5253 IEM_MC_BEGIN(0, 2, 0, 0);
5254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5255 IEM_MC_LOCAL(uint8_t, uTmp1);
5256 IEM_MC_LOCAL(uint8_t, uTmp2);
5257
5258 IEM_MC_FETCH_GREG_U8(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5259 IEM_MC_FETCH_GREG_U8(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5260 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5261 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5262
5263 IEM_MC_ADVANCE_RIP_AND_FINISH();
5264 IEM_MC_END();
5265 }
5266 else
5267 {
5268 /*
5269 * We're accessing memory.
5270 */
5271#define IEMOP_XCHG_BYTE(a_fnWorker) \
5272 IEM_MC_BEGIN(2, 4, 0, 0); \
5273 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5274 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5275 IEM_MC_LOCAL(uint8_t, uTmpReg); \
5276 IEM_MC_ARG(uint8_t *, pu8Mem, 0); \
5277 IEM_MC_ARG_LOCAL_REF(uint8_t *, pu8Reg, uTmpReg, 1); \
5278 \
5279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5281 IEM_MC_MEM_MAP_U8_RW(pu8Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5282 IEM_MC_FETCH_GREG_U8(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5283 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker, pu8Mem, pu8Reg); \
5284 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
5285 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5286 \
5287 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5288 IEM_MC_END()
5289
5290 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5291 {
5292 IEMOP_XCHG_BYTE(iemAImpl_xchg_u8_locked);
5293 }
5294 else
5295 {
5296 IEMOP_XCHG_BYTE(iemAImpl_xchg_u8_unlocked);
5297 }
5298 }
5299}
5300
5301
5302/**
5303 * @opcode 0x87
5304 */
5305FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
5306{
5307 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
5308 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5309
5310 /*
5311 * If rm is denoting a register, no more instruction bytes.
5312 */
5313 if (IEM_IS_MODRM_REG_MODE(bRm))
5314 {
5315 switch (pVCpu->iem.s.enmEffOpSize)
5316 {
5317 case IEMMODE_16BIT:
5318 IEM_MC_BEGIN(0, 2, 0, 0);
5319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5320 IEM_MC_LOCAL(uint16_t, uTmp1);
5321 IEM_MC_LOCAL(uint16_t, uTmp2);
5322
5323 IEM_MC_FETCH_GREG_U16(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5324 IEM_MC_FETCH_GREG_U16(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5325 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5326 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5327
5328 IEM_MC_ADVANCE_RIP_AND_FINISH();
5329 IEM_MC_END();
5330 break;
5331
5332 case IEMMODE_32BIT:
5333 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5335 IEM_MC_LOCAL(uint32_t, uTmp1);
5336 IEM_MC_LOCAL(uint32_t, uTmp2);
5337
5338 IEM_MC_FETCH_GREG_U32(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5339 IEM_MC_FETCH_GREG_U32(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5340 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5341 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5342
5343 IEM_MC_ADVANCE_RIP_AND_FINISH();
5344 IEM_MC_END();
5345 break;
5346
5347 case IEMMODE_64BIT:
5348 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5350 IEM_MC_LOCAL(uint64_t, uTmp1);
5351 IEM_MC_LOCAL(uint64_t, uTmp2);
5352
5353 IEM_MC_FETCH_GREG_U64(uTmp1, IEM_GET_MODRM_REG(pVCpu, bRm));
5354 IEM_MC_FETCH_GREG_U64(uTmp2, IEM_GET_MODRM_RM(pVCpu, bRm));
5355 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uTmp1);
5356 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmp2);
5357
5358 IEM_MC_ADVANCE_RIP_AND_FINISH();
5359 IEM_MC_END();
5360 break;
5361
5362 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5363 }
5364 }
5365 else
5366 {
5367 /*
5368 * We're accessing memory.
5369 */
5370#define IEMOP_XCHG_EV_GV(a_fnWorker16, a_fnWorker32, a_fnWorker64) \
5371 do { \
5372 switch (pVCpu->iem.s.enmEffOpSize) \
5373 { \
5374 case IEMMODE_16BIT: \
5375 IEM_MC_BEGIN(2, 4, 0, 0); \
5376 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5377 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5378 IEM_MC_LOCAL(uint16_t, uTmpReg); \
5379 IEM_MC_ARG(uint16_t *, pu16Mem, 0); \
5380 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Reg, uTmpReg, 1); \
5381 \
5382 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5384 IEM_MC_MEM_MAP_U16_RW(pu16Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5385 IEM_MC_FETCH_GREG_U16(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5386 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker16, pu16Mem, pu16Reg); \
5387 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
5388 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5389 \
5390 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5391 IEM_MC_END(); \
5392 break; \
5393 \
5394 case IEMMODE_32BIT: \
5395 IEM_MC_BEGIN(2, 4, IEM_MC_F_MIN_386, 0); \
5396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5397 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5398 IEM_MC_LOCAL(uint32_t, uTmpReg); \
5399 IEM_MC_ARG(uint32_t *, pu32Mem, 0); \
5400 IEM_MC_ARG_LOCAL_REF(uint32_t *, pu32Reg, uTmpReg, 1); \
5401 \
5402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5403 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5404 IEM_MC_MEM_MAP_U32_RW(pu32Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5405 IEM_MC_FETCH_GREG_U32(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5406 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker32, pu32Mem, pu32Reg); \
5407 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
5408 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5409 \
5410 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5411 IEM_MC_END(); \
5412 break; \
5413 \
5414 case IEMMODE_64BIT: \
5415 IEM_MC_BEGIN(2, 4, IEM_MC_F_64BIT, 0); \
5416 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5417 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
5418 IEM_MC_LOCAL(uint64_t, uTmpReg); \
5419 IEM_MC_ARG(uint64_t *, pu64Mem, 0); \
5420 IEM_MC_ARG_LOCAL_REF(uint64_t *, pu64Reg, uTmpReg, 1); \
5421 \
5422 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5424 IEM_MC_MEM_MAP_U64_RW(pu64Mem, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5425 IEM_MC_FETCH_GREG_U64(uTmpReg, IEM_GET_MODRM_REG(pVCpu, bRm)); \
5426 IEM_MC_CALL_VOID_AIMPL_2(a_fnWorker64, pu64Mem, pu64Reg); \
5427 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
5428 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uTmpReg); \
5429 \
5430 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
5431 IEM_MC_END(); \
5432 break; \
5433 \
5434 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
5435 } \
5436 } while (0)
5437 if (!(pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK))
5438 {
5439 IEMOP_XCHG_EV_GV(iemAImpl_xchg_u16_locked, iemAImpl_xchg_u32_locked, iemAImpl_xchg_u64_locked);
5440 }
5441 else
5442 {
5443 IEMOP_XCHG_EV_GV(iemAImpl_xchg_u16_unlocked, iemAImpl_xchg_u32_unlocked, iemAImpl_xchg_u64_unlocked);
5444 }
5445 }
5446}
5447
5448
5449/**
5450 * @opcode 0x88
5451 */
5452FNIEMOP_DEF(iemOp_mov_Eb_Gb)
5453{
5454 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
5455
5456 uint8_t bRm;
5457 IEM_OPCODE_GET_NEXT_U8(&bRm);
5458
5459 /*
5460 * If rm is denoting a register, no more instruction bytes.
5461 */
5462 if (IEM_IS_MODRM_REG_MODE(bRm))
5463 {
5464 IEM_MC_BEGIN(0, 1, 0, 0);
5465 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5466 IEM_MC_LOCAL(uint8_t, u8Value);
5467 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5468 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_RM(pVCpu, bRm), u8Value);
5469 IEM_MC_ADVANCE_RIP_AND_FINISH();
5470 IEM_MC_END();
5471 }
5472 else
5473 {
5474 /*
5475 * We're writing a register to memory.
5476 */
5477 IEM_MC_BEGIN(0, 2, 0, 0);
5478 IEM_MC_LOCAL(uint8_t, u8Value);
5479 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5480 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5482 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5483 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
5484 IEM_MC_ADVANCE_RIP_AND_FINISH();
5485 IEM_MC_END();
5486 }
5487}
5488
5489
5490/**
5491 * @opcode 0x89
5492 */
5493FNIEMOP_DEF(iemOp_mov_Ev_Gv)
5494{
5495 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
5496
5497 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5498
5499 /*
5500 * If rm is denoting a register, no more instruction bytes.
5501 */
5502 if (IEM_IS_MODRM_REG_MODE(bRm))
5503 {
5504 switch (pVCpu->iem.s.enmEffOpSize)
5505 {
5506 case IEMMODE_16BIT:
5507 IEM_MC_BEGIN(0, 1, 0, 0);
5508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5509 IEM_MC_LOCAL(uint16_t, u16Value);
5510 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5511 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5512 IEM_MC_ADVANCE_RIP_AND_FINISH();
5513 IEM_MC_END();
5514 break;
5515
5516 case IEMMODE_32BIT:
5517 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5519 IEM_MC_LOCAL(uint32_t, u32Value);
5520 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5521 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5522 IEM_MC_ADVANCE_RIP_AND_FINISH();
5523 IEM_MC_END();
5524 break;
5525
5526 case IEMMODE_64BIT:
5527 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5529 IEM_MC_LOCAL(uint64_t, u64Value);
5530 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5531 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5532 IEM_MC_ADVANCE_RIP_AND_FINISH();
5533 IEM_MC_END();
5534 break;
5535
5536 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5537 }
5538 }
5539 else
5540 {
5541 /*
5542 * We're writing a register to memory.
5543 */
5544 switch (pVCpu->iem.s.enmEffOpSize)
5545 {
5546 case IEMMODE_16BIT:
5547 IEM_MC_BEGIN(0, 2, 0, 0);
5548 IEM_MC_LOCAL(uint16_t, u16Value);
5549 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5550 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5552 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5553 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5554 IEM_MC_ADVANCE_RIP_AND_FINISH();
5555 IEM_MC_END();
5556 break;
5557
5558 case IEMMODE_32BIT:
5559 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5560 IEM_MC_LOCAL(uint32_t, u32Value);
5561 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5562 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5564 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5565 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
5566 IEM_MC_ADVANCE_RIP_AND_FINISH();
5567 IEM_MC_END();
5568 break;
5569
5570 case IEMMODE_64BIT:
5571 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5572 IEM_MC_LOCAL(uint64_t, u64Value);
5573 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5574 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5576 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_REG(pVCpu, bRm));
5577 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
5578 IEM_MC_ADVANCE_RIP_AND_FINISH();
5579 IEM_MC_END();
5580 break;
5581
5582 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5583 }
5584 }
5585}
5586
5587
5588/**
5589 * @opcode 0x8a
5590 */
5591FNIEMOP_DEF(iemOp_mov_Gb_Eb)
5592{
5593 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
5594
5595 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5596
5597 /*
5598 * If rm is denoting a register, no more instruction bytes.
5599 */
5600 if (IEM_IS_MODRM_REG_MODE(bRm))
5601 {
5602 IEM_MC_BEGIN(0, 1, 0, 0);
5603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5604 IEM_MC_LOCAL(uint8_t, u8Value);
5605 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5606 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5607 IEM_MC_ADVANCE_RIP_AND_FINISH();
5608 IEM_MC_END();
5609 }
5610 else
5611 {
5612 /*
5613 * We're loading a register from memory.
5614 */
5615 IEM_MC_BEGIN(0, 2, 0, 0);
5616 IEM_MC_LOCAL(uint8_t, u8Value);
5617 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5618 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5620 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5621 IEM_MC_STORE_GREG_U8(IEM_GET_MODRM_REG(pVCpu, bRm), u8Value);
5622 IEM_MC_ADVANCE_RIP_AND_FINISH();
5623 IEM_MC_END();
5624 }
5625}
5626
5627
5628/**
5629 * @opcode 0x8b
5630 */
5631FNIEMOP_DEF(iemOp_mov_Gv_Ev)
5632{
5633 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
5634
5635 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5636
5637 /*
5638 * If rm is denoting a register, no more instruction bytes.
5639 */
5640 if (IEM_IS_MODRM_REG_MODE(bRm))
5641 {
5642 switch (pVCpu->iem.s.enmEffOpSize)
5643 {
5644 case IEMMODE_16BIT:
5645 IEM_MC_BEGIN(0, 1, 0, 0);
5646 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5647 IEM_MC_LOCAL(uint16_t, u16Value);
5648 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5649 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5650 IEM_MC_ADVANCE_RIP_AND_FINISH();
5651 IEM_MC_END();
5652 break;
5653
5654 case IEMMODE_32BIT:
5655 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5657 IEM_MC_LOCAL(uint32_t, u32Value);
5658 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5659 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5660 IEM_MC_ADVANCE_RIP_AND_FINISH();
5661 IEM_MC_END();
5662 break;
5663
5664 case IEMMODE_64BIT:
5665 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5667 IEM_MC_LOCAL(uint64_t, u64Value);
5668 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
5669 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5670 IEM_MC_ADVANCE_RIP_AND_FINISH();
5671 IEM_MC_END();
5672 break;
5673
5674 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5675 }
5676 }
5677 else
5678 {
5679 /*
5680 * We're loading a register from memory.
5681 */
5682 switch (pVCpu->iem.s.enmEffOpSize)
5683 {
5684 case IEMMODE_16BIT:
5685 IEM_MC_BEGIN(0, 2, 0, 0);
5686 IEM_MC_LOCAL(uint16_t, u16Value);
5687 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5688 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5690 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5691 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Value);
5692 IEM_MC_ADVANCE_RIP_AND_FINISH();
5693 IEM_MC_END();
5694 break;
5695
5696 case IEMMODE_32BIT:
5697 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5698 IEM_MC_LOCAL(uint32_t, u32Value);
5699 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5700 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5702 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5703 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Value);
5704 IEM_MC_ADVANCE_RIP_AND_FINISH();
5705 IEM_MC_END();
5706 break;
5707
5708 case IEMMODE_64BIT:
5709 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
5710 IEM_MC_LOCAL(uint64_t, u64Value);
5711 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5713 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5714 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
5715 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), u64Value);
5716 IEM_MC_ADVANCE_RIP_AND_FINISH();
5717 IEM_MC_END();
5718 break;
5719
5720 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5721 }
5722 }
5723}
5724
5725
5726/**
5727 * opcode 0x63
5728 * @todo Table fixme
5729 */
5730FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
5731{
5732 if (!IEM_IS_64BIT_CODE(pVCpu))
5733 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
5734 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
5735 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
5736 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
5737}
5738
5739
5740/**
5741 * @opcode 0x8c
5742 */
5743FNIEMOP_DEF(iemOp_mov_Ev_Sw)
5744{
5745 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
5746
5747 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5748
5749 /*
5750 * Check that the destination register exists. The REX.R prefix is ignored.
5751 */
5752 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5753 if (iSegReg > X86_SREG_GS)
5754 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5755
5756 /*
5757 * If rm is denoting a register, no more instruction bytes.
5758 * In that case, the operand size is respected and the upper bits are
5759 * cleared (starting with some pentium).
5760 */
5761 if (IEM_IS_MODRM_REG_MODE(bRm))
5762 {
5763 switch (pVCpu->iem.s.enmEffOpSize)
5764 {
5765 case IEMMODE_16BIT:
5766 IEM_MC_BEGIN(0, 1, 0, 0);
5767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5768 IEM_MC_LOCAL(uint16_t, u16Value);
5769 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5770 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_RM(pVCpu, bRm), u16Value);
5771 IEM_MC_ADVANCE_RIP_AND_FINISH();
5772 IEM_MC_END();
5773 break;
5774
5775 case IEMMODE_32BIT:
5776 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
5777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5778 IEM_MC_LOCAL(uint32_t, u32Value);
5779 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
5780 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Value);
5781 IEM_MC_ADVANCE_RIP_AND_FINISH();
5782 IEM_MC_END();
5783 break;
5784
5785 case IEMMODE_64BIT:
5786 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5787 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5788 IEM_MC_LOCAL(uint64_t, u64Value);
5789 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
5790 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Value);
5791 IEM_MC_ADVANCE_RIP_AND_FINISH();
5792 IEM_MC_END();
5793 break;
5794
5795 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5796 }
5797 }
5798 else
5799 {
5800 /*
5801 * We're saving the register to memory. The access is word sized
5802 * regardless of operand size prefixes.
5803 */
5804#if 0 /* not necessary */
5805 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5806#endif
5807 IEM_MC_BEGIN(0, 2, 0, 0);
5808 IEM_MC_LOCAL(uint16_t, u16Value);
5809 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5810 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5811 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5812 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
5813 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
5814 IEM_MC_ADVANCE_RIP_AND_FINISH();
5815 IEM_MC_END();
5816 }
5817}
5818
5819
5820
5821
5822/**
5823 * @opcode 0x8d
5824 */
5825FNIEMOP_DEF(iemOp_lea_Gv_M)
5826{
5827 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
5828 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5829 if (IEM_IS_MODRM_REG_MODE(bRm))
5830 IEMOP_RAISE_INVALID_OPCODE_RET(); /* no register form */
5831
5832 switch (pVCpu->iem.s.enmEffOpSize)
5833 {
5834 case IEMMODE_16BIT:
5835 IEM_MC_BEGIN(0, 2, 0, 0);
5836 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5837 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5839 IEM_MC_LOCAL(uint16_t, u16Cast);
5840 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
5841 IEM_MC_STORE_GREG_U16(IEM_GET_MODRM_REG(pVCpu, bRm), u16Cast);
5842 IEM_MC_ADVANCE_RIP_AND_FINISH();
5843 IEM_MC_END();
5844 break;
5845
5846 case IEMMODE_32BIT:
5847 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
5848 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5849 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5851 IEM_MC_LOCAL(uint32_t, u32Cast);
5852 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
5853 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), u32Cast);
5854 IEM_MC_ADVANCE_RIP_AND_FINISH();
5855 IEM_MC_END();
5856 break;
5857
5858 case IEMMODE_64BIT:
5859 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
5860 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
5861 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
5862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5863 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), GCPtrEffSrc);
5864 IEM_MC_ADVANCE_RIP_AND_FINISH();
5865 IEM_MC_END();
5866 break;
5867
5868 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5869 }
5870}
5871
5872
5873/**
5874 * @opcode 0x8e
5875 */
5876FNIEMOP_DEF(iemOp_mov_Sw_Ev)
5877{
5878 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
5879
5880 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5881
5882 /*
5883 * The practical operand size is 16-bit.
5884 */
5885#if 0 /* not necessary */
5886 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
5887#endif
5888
5889 /*
5890 * Check that the destination register exists and can be used with this
5891 * instruction. The REX.R prefix is ignored.
5892 */
5893 uint8_t const iSegReg = IEM_GET_MODRM_REG_8(bRm);
5894 /** @todo r=bird: What does 8086 do here wrt CS? */
5895 if ( iSegReg == X86_SREG_CS
5896 || iSegReg > X86_SREG_GS)
5897 IEMOP_RAISE_INVALID_OPCODE_RET(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
5898
5899 /*
5900 * If rm is denoting a register, no more instruction bytes.
5901 *
5902 * Note! Using IEMOP_MOV_SW_EV_REG_BODY here to specify different
5903 * IEM_CIMPL_F_XXX values depending on the CPU mode and target
5904 * register. This is a restriction of the current recompiler
5905 * approach.
5906 */
5907 if (IEM_IS_MODRM_REG_MODE(bRm))
5908 {
5909#define IEMOP_MOV_SW_EV_REG_BODY(a_fCImplFlags) \
5910 IEM_MC_BEGIN(2, 0, 0, a_fCImplFlags); \
5911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5912 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
5913 IEM_MC_ARG(uint16_t, u16Value, 1); \
5914 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm)); \
5915 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, \
5916 RT_BIT_64(kIemNativeGstReg_SegSelFirst + iSegReg) \
5917 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + iSegReg) \
5918 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + iSegReg), \
5919 iemCImpl_load_SReg, iSRegArg, u16Value); \
5920 IEM_MC_END()
5921
5922 if (iSegReg == X86_SREG_SS)
5923 {
5924 if (IEM_IS_32BIT_CODE(pVCpu))
5925 {
5926 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
5927 }
5928 else
5929 {
5930 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
5931 }
5932 }
5933 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5934 {
5935 IEMOP_MOV_SW_EV_REG_BODY(0);
5936 }
5937 else
5938 {
5939 IEMOP_MOV_SW_EV_REG_BODY(IEM_CIMPL_F_MODE);
5940 }
5941#undef IEMOP_MOV_SW_EV_REG_BODY
5942 }
5943 else
5944 {
5945 /*
5946 * We're loading the register from memory. The access is word sized
5947 * regardless of operand size prefixes.
5948 */
5949#define IEMOP_MOV_SW_EV_MEM_BODY(a_fCImplFlags) \
5950 IEM_MC_BEGIN(2, 1, 0, a_fCImplFlags); \
5951 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0); \
5952 IEM_MC_ARG(uint16_t, u16Value, 1); \
5953 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
5954 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
5955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
5956 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
5957 IEM_MC_CALL_CIMPL_2(a_fCImplFlags, \
5958 RT_BIT_64(kIemNativeGstReg_SegSelFirst + iSegReg) \
5959 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + iSegReg) \
5960 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + iSegReg), \
5961 iemCImpl_load_SReg, iSRegArg, u16Value); \
5962 IEM_MC_END()
5963
5964 if (iSegReg == X86_SREG_SS)
5965 {
5966 if (IEM_IS_32BIT_CODE(pVCpu))
5967 {
5968 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW | IEM_CIMPL_F_MODE);
5969 }
5970 else
5971 {
5972 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_INHIBIT_SHADOW);
5973 }
5974 }
5975 else if (iSegReg >= X86_SREG_FS || !IEM_IS_32BIT_CODE(pVCpu))
5976 {
5977 IEMOP_MOV_SW_EV_MEM_BODY(0);
5978 }
5979 else
5980 {
5981 IEMOP_MOV_SW_EV_MEM_BODY(IEM_CIMPL_F_MODE);
5982 }
5983#undef IEMOP_MOV_SW_EV_MEM_BODY
5984 }
5985}
5986
5987
5988/** Opcode 0x8f /0. */
5989FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
5990{
5991 /* This bugger is rather annoying as it requires rSP to be updated before
5992 doing the effective address calculations. Will eventually require a
5993 split between the R/M+SIB decoding and the effective address
5994 calculation - which is something that is required for any attempt at
5995 reusing this code for a recompiler. It may also be good to have if we
5996 need to delay #UD exception caused by invalid lock prefixes.
5997
5998 For now, we'll do a mostly safe interpreter-only implementation here. */
5999 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
6000 * now until tests show it's checked.. */
6001 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
6002
6003 /* Register access is relatively easy and can share code. */
6004 if (IEM_IS_MODRM_REG_MODE(bRm))
6005 return FNIEMOP_CALL_1(iemOpCommonPopGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
6006
6007 /*
6008 * Memory target.
6009 *
6010 * Intel says that RSP is incremented before it's used in any effective
6011 * address calcuations. This means some serious extra annoyance here since
6012 * we decode and calculate the effective address in one step and like to
6013 * delay committing registers till everything is done.
6014 *
6015 * So, we'll decode and calculate the effective address twice. This will
6016 * require some recoding if turned into a recompiler.
6017 */
6018 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
6019
6020#if 1 /* This can be compiled, optimize later if needed. */
6021 switch (pVCpu->iem.s.enmEffOpSize)
6022 {
6023 case IEMMODE_16BIT:
6024 IEM_MC_BEGIN(2, 0, 0, 0);
6025 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6026 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2 << 8);
6027 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6028 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6029 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem16, iEffSeg, GCPtrEffDst);
6030 IEM_MC_END();
6031 break;
6032
6033 case IEMMODE_32BIT:
6034 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0);
6035 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6036 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4 << 8);
6037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6038 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6039 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem32, iEffSeg, GCPtrEffDst);
6040 IEM_MC_END();
6041 break;
6042
6043 case IEMMODE_64BIT:
6044 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
6045 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 1);
6046 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 8 << 8);
6047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6048 IEM_MC_ARG_CONST(uint8_t, iEffSeg, pVCpu->iem.s.iEffSeg, 0);
6049 IEM_MC_CALL_CIMPL_2(0, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP), iemCImpl_pop_mem64, iEffSeg, GCPtrEffDst);
6050 IEM_MC_END();
6051 break;
6052
6053 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6054 }
6055
6056#else
6057# ifndef TST_IEM_CHECK_MC
6058 /* Calc effective address with modified ESP. */
6059/** @todo testcase */
6060 RTGCPTR GCPtrEff;
6061 VBOXSTRICTRC rcStrict;
6062 switch (pVCpu->iem.s.enmEffOpSize)
6063 {
6064 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 2 << 8, &GCPtrEff); break;
6065 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 4 << 8, &GCPtrEff); break;
6066 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 8 << 8, &GCPtrEff); break;
6067 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6068 }
6069 if (rcStrict != VINF_SUCCESS)
6070 return rcStrict;
6071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6072
6073 /* Perform the operation - this should be CImpl. */
6074 RTUINT64U TmpRsp;
6075 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
6076 switch (pVCpu->iem.s.enmEffOpSize)
6077 {
6078 case IEMMODE_16BIT:
6079 {
6080 uint16_t u16Value;
6081 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
6082 if (rcStrict == VINF_SUCCESS)
6083 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
6084 break;
6085 }
6086
6087 case IEMMODE_32BIT:
6088 {
6089 uint32_t u32Value;
6090 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
6091 if (rcStrict == VINF_SUCCESS)
6092 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
6093 break;
6094 }
6095
6096 case IEMMODE_64BIT:
6097 {
6098 uint64_t u64Value;
6099 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
6100 if (rcStrict == VINF_SUCCESS)
6101 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
6102 break;
6103 }
6104
6105 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6106 }
6107 if (rcStrict == VINF_SUCCESS)
6108 {
6109 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
6110 return iemRegUpdateRipAndFinishClearingRF(pVCpu);
6111 }
6112 return rcStrict;
6113
6114# else
6115 return VERR_IEM_IPE_2;
6116# endif
6117#endif
6118}
6119
6120
6121/**
6122 * @opcode 0x8f
6123 */
6124FNIEMOP_DEF(iemOp_Grp1A__xop)
6125{
6126 /*
6127 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
6128 * three byte VEX prefix, except that the mmmmm field cannot have the values
6129 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
6130 */
6131 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6132 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
6133 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
6134
6135 IEMOP_MNEMONIC(xop, "xop");
6136 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
6137 {
6138 /** @todo Test when exctly the XOP conformance checks kick in during
6139 * instruction decoding and fetching (using \#PF). */
6140 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
6141 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6142 if ( ( pVCpu->iem.s.fPrefixes
6143 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
6144 == 0)
6145 {
6146 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
6147 if ((bXop2 & 0x80 /* XOP.W */) && IEM_IS_64BIT_CODE(pVCpu))
6148 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
6149 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6150 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
6151 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
6152 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
6153 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
6154 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
6155
6156 /** @todo XOP: Just use new tables and decoders. */
6157 switch (bRm & 0x1f)
6158 {
6159 case 8: /* xop opcode map 8. */
6160 IEMOP_BITCH_ABOUT_STUB();
6161 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6162
6163 case 9: /* xop opcode map 9. */
6164 IEMOP_BITCH_ABOUT_STUB();
6165 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6166
6167 case 10: /* xop opcode map 10. */
6168 IEMOP_BITCH_ABOUT_STUB();
6169 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6170
6171 default:
6172 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6173 IEMOP_RAISE_INVALID_OPCODE_RET();
6174 }
6175 }
6176 else
6177 Log(("XOP: Invalid prefix mix!\n"));
6178 }
6179 else
6180 Log(("XOP: XOP support disabled!\n"));
6181 IEMOP_RAISE_INVALID_OPCODE_RET();
6182}
6183
6184
6185/**
6186 * Common 'xchg reg,rAX' helper.
6187 */
6188FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
6189{
6190 iReg |= pVCpu->iem.s.uRexB;
6191 switch (pVCpu->iem.s.enmEffOpSize)
6192 {
6193 case IEMMODE_16BIT:
6194 IEM_MC_BEGIN(0, 2, 0, 0);
6195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6196 IEM_MC_LOCAL(uint16_t, u16Tmp1);
6197 IEM_MC_LOCAL(uint16_t, u16Tmp2);
6198 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
6199 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
6200 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
6201 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
6202 IEM_MC_ADVANCE_RIP_AND_FINISH();
6203 IEM_MC_END();
6204 break;
6205
6206 case IEMMODE_32BIT:
6207 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6209 IEM_MC_LOCAL(uint32_t, u32Tmp1);
6210 IEM_MC_LOCAL(uint32_t, u32Tmp2);
6211 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
6212 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
6213 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
6214 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
6215 IEM_MC_ADVANCE_RIP_AND_FINISH();
6216 IEM_MC_END();
6217 break;
6218
6219 case IEMMODE_64BIT:
6220 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6222 IEM_MC_LOCAL(uint64_t, u64Tmp1);
6223 IEM_MC_LOCAL(uint64_t, u64Tmp2);
6224 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
6225 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
6226 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
6227 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
6228 IEM_MC_ADVANCE_RIP_AND_FINISH();
6229 IEM_MC_END();
6230 break;
6231
6232 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6233 }
6234}
6235
6236
6237/**
6238 * @opcode 0x90
6239 */
6240FNIEMOP_DEF(iemOp_nop)
6241{
6242 /* R8/R8D and RAX/EAX can be exchanged. */
6243 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
6244 {
6245 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
6246 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
6247 }
6248
6249 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
6250 {
6251 IEMOP_MNEMONIC(pause, "pause");
6252 /* ASSUMING that we keep the IEM_F_X86_CTX_IN_GUEST, IEM_F_X86_CTX_VMX
6253 and IEM_F_X86_CTX_SVM in the TB key, we can safely do the following: */
6254 if (!IEM_IS_IN_GUEST(pVCpu))
6255 { /* probable */ }
6256#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6257 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_VMX)
6258 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_vmx_pause);
6259#endif
6260#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
6261 else if (pVCpu->iem.s.fExec & IEM_F_X86_CTX_SVM)
6262 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_VMEXIT, 0, iemCImpl_svm_pause);
6263#endif
6264 }
6265 else
6266 IEMOP_MNEMONIC(nop, "nop");
6267 /** @todo testcase: lock nop; lock pause */
6268 IEM_MC_BEGIN(0, 0, 0, 0);
6269 IEMOP_HLP_DONE_DECODING();
6270 IEM_MC_ADVANCE_RIP_AND_FINISH();
6271 IEM_MC_END();
6272}
6273
6274
6275/**
6276 * @opcode 0x91
6277 */
6278FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
6279{
6280 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
6281 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
6282}
6283
6284
6285/**
6286 * @opcode 0x92
6287 */
6288FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
6289{
6290 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
6291 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
6292}
6293
6294
6295/**
6296 * @opcode 0x93
6297 */
6298FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
6299{
6300 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
6301 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
6302}
6303
6304
6305/**
6306 * @opcode 0x94
6307 */
6308FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
6309{
6310 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
6311 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
6312}
6313
6314
6315/**
6316 * @opcode 0x95
6317 */
6318FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
6319{
6320 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
6321 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
6322}
6323
6324
6325/**
6326 * @opcode 0x96
6327 */
6328FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
6329{
6330 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
6331 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
6332}
6333
6334
6335/**
6336 * @opcode 0x97
6337 */
6338FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
6339{
6340 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
6341 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
6342}
6343
6344
6345/**
6346 * @opcode 0x98
6347 */
6348FNIEMOP_DEF(iemOp_cbw)
6349{
6350 switch (pVCpu->iem.s.enmEffOpSize)
6351 {
6352 case IEMMODE_16BIT:
6353 IEMOP_MNEMONIC(cbw, "cbw");
6354 IEM_MC_BEGIN(0, 1, 0, 0);
6355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6356 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
6357 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
6358 } IEM_MC_ELSE() {
6359 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
6360 } IEM_MC_ENDIF();
6361 IEM_MC_ADVANCE_RIP_AND_FINISH();
6362 IEM_MC_END();
6363 break;
6364
6365 case IEMMODE_32BIT:
6366 IEMOP_MNEMONIC(cwde, "cwde");
6367 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6368 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6369 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6370 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
6371 } IEM_MC_ELSE() {
6372 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
6373 } IEM_MC_ENDIF();
6374 IEM_MC_ADVANCE_RIP_AND_FINISH();
6375 IEM_MC_END();
6376 break;
6377
6378 case IEMMODE_64BIT:
6379 IEMOP_MNEMONIC(cdqe, "cdqe");
6380 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6382 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6383 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
6384 } IEM_MC_ELSE() {
6385 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
6386 } IEM_MC_ENDIF();
6387 IEM_MC_ADVANCE_RIP_AND_FINISH();
6388 IEM_MC_END();
6389 break;
6390
6391 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6392 }
6393}
6394
6395
6396/**
6397 * @opcode 0x99
6398 */
6399FNIEMOP_DEF(iemOp_cwd)
6400{
6401 switch (pVCpu->iem.s.enmEffOpSize)
6402 {
6403 case IEMMODE_16BIT:
6404 IEMOP_MNEMONIC(cwd, "cwd");
6405 IEM_MC_BEGIN(0, 1, 0, 0);
6406 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6407 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
6408 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
6409 } IEM_MC_ELSE() {
6410 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
6411 } IEM_MC_ENDIF();
6412 IEM_MC_ADVANCE_RIP_AND_FINISH();
6413 IEM_MC_END();
6414 break;
6415
6416 case IEMMODE_32BIT:
6417 IEMOP_MNEMONIC(cdq, "cdq");
6418 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
6419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6420 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
6421 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
6422 } IEM_MC_ELSE() {
6423 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
6424 } IEM_MC_ENDIF();
6425 IEM_MC_ADVANCE_RIP_AND_FINISH();
6426 IEM_MC_END();
6427 break;
6428
6429 case IEMMODE_64BIT:
6430 IEMOP_MNEMONIC(cqo, "cqo");
6431 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
6432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6433 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
6434 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
6435 } IEM_MC_ELSE() {
6436 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
6437 } IEM_MC_ENDIF();
6438 IEM_MC_ADVANCE_RIP_AND_FINISH();
6439 IEM_MC_END();
6440 break;
6441
6442 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6443 }
6444}
6445
6446
6447/**
6448 * @opcode 0x9a
6449 */
6450FNIEMOP_DEF(iemOp_call_Ap)
6451{
6452 IEMOP_MNEMONIC(call_Ap, "call Ap");
6453 IEMOP_HLP_NO_64BIT();
6454
6455 /* Decode the far pointer address and pass it on to the far call C implementation. */
6456 uint32_t off32Seg;
6457 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
6458 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
6459 else
6460 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
6461 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
6462 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6463 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
6464 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, UINT64_MAX,
6465 iemCImpl_callf, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
6466 /** @todo make task-switches, ring-switches, ++ return non-zero status */
6467}
6468
6469
6470/** Opcode 0x9b. (aka fwait) */
6471FNIEMOP_DEF(iemOp_wait)
6472{
6473 IEMOP_MNEMONIC(wait, "wait");
6474 IEM_MC_BEGIN(0, 0, 0, 0);
6475 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6476 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
6477 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6478 IEM_MC_ADVANCE_RIP_AND_FINISH();
6479 IEM_MC_END();
6480}
6481
6482
6483/**
6484 * @opcode 0x9c
6485 */
6486FNIEMOP_DEF(iemOp_pushf_Fv)
6487{
6488 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
6489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6490 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6491 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP),
6492 iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
6493}
6494
6495
6496/**
6497 * @opcode 0x9d
6498 */
6499FNIEMOP_DEF(iemOp_popf_Fv)
6500{
6501 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
6502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6503 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6504 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE_AND_AFTER,
6505 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP),
6506 iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
6507}
6508
6509
6510/**
6511 * @opcode 0x9e
6512 */
6513FNIEMOP_DEF(iemOp_sahf)
6514{
6515 IEMOP_MNEMONIC(sahf, "sahf");
6516 if ( IEM_IS_64BIT_CODE(pVCpu)
6517 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6518 IEMOP_RAISE_INVALID_OPCODE_RET();
6519 IEM_MC_BEGIN(0, 2, 0, 0);
6520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6521 IEM_MC_LOCAL(uint32_t, u32Flags);
6522 IEM_MC_LOCAL(uint32_t, EFlags);
6523 IEM_MC_FETCH_EFLAGS(EFlags);
6524 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
6525 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
6526 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
6527 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
6528 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
6529 IEM_MC_COMMIT_EFLAGS(EFlags);
6530 IEM_MC_ADVANCE_RIP_AND_FINISH();
6531 IEM_MC_END();
6532}
6533
6534
6535/**
6536 * @opcode 0x9f
6537 */
6538FNIEMOP_DEF(iemOp_lahf)
6539{
6540 IEMOP_MNEMONIC(lahf, "lahf");
6541 if ( IEM_IS_64BIT_CODE(pVCpu)
6542 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
6543 IEMOP_RAISE_INVALID_OPCODE_RET();
6544 IEM_MC_BEGIN(0, 1, 0, 0);
6545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6546 IEM_MC_LOCAL(uint8_t, u8Flags);
6547 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
6548 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
6549 IEM_MC_ADVANCE_RIP_AND_FINISH();
6550 IEM_MC_END();
6551}
6552
6553
6554/**
6555 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
6556 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode.
6557 * Will return/throw on failures.
6558 * @param a_GCPtrMemOff The variable to store the offset in.
6559 */
6560#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
6561 do \
6562 { \
6563 switch (pVCpu->iem.s.enmEffAddrMode) \
6564 { \
6565 case IEMMODE_16BIT: \
6566 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
6567 break; \
6568 case IEMMODE_32BIT: \
6569 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
6570 break; \
6571 case IEMMODE_64BIT: \
6572 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
6573 break; \
6574 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
6575 } \
6576 } while (0)
6577
6578/**
6579 * @opcode 0xa0
6580 */
6581FNIEMOP_DEF(iemOp_mov_AL_Ob)
6582{
6583 /*
6584 * Get the offset.
6585 */
6586 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
6587 RTGCPTR GCPtrMemOffDecode;
6588 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6589
6590 /*
6591 * Fetch AL.
6592 */
6593 IEM_MC_BEGIN(0, 2, 0, 0);
6594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6595 IEM_MC_LOCAL(uint8_t, u8Tmp);
6596 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6597 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6598 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6599 IEM_MC_ADVANCE_RIP_AND_FINISH();
6600 IEM_MC_END();
6601}
6602
6603
6604/**
6605 * @opcode 0xa1
6606 */
6607FNIEMOP_DEF(iemOp_mov_rAX_Ov)
6608{
6609 /*
6610 * Get the offset.
6611 */
6612 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
6613 RTGCPTR GCPtrMemOffDecode;
6614 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6615
6616 /*
6617 * Fetch rAX.
6618 */
6619 switch (pVCpu->iem.s.enmEffOpSize)
6620 {
6621 case IEMMODE_16BIT:
6622 IEM_MC_BEGIN(0, 2, 0, 0);
6623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6624 IEM_MC_LOCAL(uint16_t, u16Tmp);
6625 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6626 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6627 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
6628 IEM_MC_ADVANCE_RIP_AND_FINISH();
6629 IEM_MC_END();
6630 break;
6631
6632 case IEMMODE_32BIT:
6633 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6635 IEM_MC_LOCAL(uint32_t, u32Tmp);
6636 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6637 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6638 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
6639 IEM_MC_ADVANCE_RIP_AND_FINISH();
6640 IEM_MC_END();
6641 break;
6642
6643 case IEMMODE_64BIT:
6644 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6646 IEM_MC_LOCAL(uint64_t, u64Tmp);
6647 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6648 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
6649 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
6650 IEM_MC_ADVANCE_RIP_AND_FINISH();
6651 IEM_MC_END();
6652 break;
6653
6654 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6655 }
6656}
6657
6658
6659/**
6660 * @opcode 0xa2
6661 */
6662FNIEMOP_DEF(iemOp_mov_Ob_AL)
6663{
6664 /*
6665 * Get the offset.
6666 */
6667 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
6668 RTGCPTR GCPtrMemOffDecode;
6669 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6670
6671 /*
6672 * Store AL.
6673 */
6674 IEM_MC_BEGIN(0, 2, 0, 0);
6675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6676 IEM_MC_LOCAL(uint8_t, u8Tmp);
6677 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
6678 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6679 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
6680 IEM_MC_ADVANCE_RIP_AND_FINISH();
6681 IEM_MC_END();
6682}
6683
6684
6685/**
6686 * @opcode 0xa3
6687 */
6688FNIEMOP_DEF(iemOp_mov_Ov_rAX)
6689{
6690 /*
6691 * Get the offset.
6692 */
6693 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
6694 RTGCPTR GCPtrMemOffDecode;
6695 IEMOP_FETCH_MOFFS_XX(GCPtrMemOffDecode);
6696
6697 /*
6698 * Store rAX.
6699 */
6700 switch (pVCpu->iem.s.enmEffOpSize)
6701 {
6702 case IEMMODE_16BIT:
6703 IEM_MC_BEGIN(0, 2, 0, 0);
6704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6705 IEM_MC_LOCAL(uint16_t, u16Tmp);
6706 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
6707 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6708 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
6709 IEM_MC_ADVANCE_RIP_AND_FINISH();
6710 IEM_MC_END();
6711 break;
6712
6713 case IEMMODE_32BIT:
6714 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
6715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6716 IEM_MC_LOCAL(uint32_t, u32Tmp);
6717 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
6718 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6719 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
6720 IEM_MC_ADVANCE_RIP_AND_FINISH();
6721 IEM_MC_END();
6722 break;
6723
6724 case IEMMODE_64BIT:
6725 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
6726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6727 IEM_MC_LOCAL(uint64_t, u64Tmp);
6728 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
6729 IEM_MC_LOCAL_CONST(RTGCPTR, GCPtrMemOff, GCPtrMemOffDecode);
6730 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
6731 IEM_MC_ADVANCE_RIP_AND_FINISH();
6732 IEM_MC_END();
6733 break;
6734
6735 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6736 }
6737}
6738
6739/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
6740#define IEM_MOVS_CASE(ValBits, AddrBits, a_fMcFlags) \
6741 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
6742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6743 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
6744 IEM_MC_LOCAL(RTGCPTR, uAddr); \
6745 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
6746 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
6747 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
6748 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
6749 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6750 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6751 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6752 } IEM_MC_ELSE() { \
6753 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6754 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6755 } IEM_MC_ENDIF(); \
6756 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6757 IEM_MC_END() \
6758
6759/**
6760 * @opcode 0xa4
6761 */
6762FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
6763{
6764 /*
6765 * Use the C implementation if a repeat prefix is encountered.
6766 */
6767 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6768 {
6769 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
6770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6771 switch (pVCpu->iem.s.enmEffAddrMode)
6772 {
6773 case IEMMODE_16BIT:
6774 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6775 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6776 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6777 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6778 iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
6779 case IEMMODE_32BIT:
6780 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6781 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6782 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6783 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6784 iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
6785 case IEMMODE_64BIT:
6786 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6787 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6788 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6789 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6790 iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
6791 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6792 }
6793 }
6794
6795 /*
6796 * Sharing case implementation with movs[wdq] below.
6797 */
6798 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
6799 switch (pVCpu->iem.s.enmEffAddrMode)
6800 {
6801 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
6802 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32, IEM_MC_F_MIN_386); break;
6803 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64, IEM_MC_F_64BIT); break;
6804 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6805 }
6806}
6807
6808
6809/**
6810 * @opcode 0xa5
6811 */
6812FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
6813{
6814
6815 /*
6816 * Use the C implementation if a repeat prefix is encountered.
6817 */
6818 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
6819 {
6820 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
6821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6822 switch (pVCpu->iem.s.enmEffOpSize)
6823 {
6824 case IEMMODE_16BIT:
6825 switch (pVCpu->iem.s.enmEffAddrMode)
6826 {
6827 case IEMMODE_16BIT:
6828 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6829 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6830 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6831 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6832 iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
6833 case IEMMODE_32BIT:
6834 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6835 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6836 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6837 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6838 iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
6839 case IEMMODE_64BIT:
6840 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6841 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6842 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6843 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6844 iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
6845 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6846 }
6847 break;
6848 case IEMMODE_32BIT:
6849 switch (pVCpu->iem.s.enmEffAddrMode)
6850 {
6851 case IEMMODE_16BIT:
6852 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6853 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6854 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6855 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6856 iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
6857 case IEMMODE_32BIT:
6858 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6859 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6860 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6861 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6862 iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
6863 case IEMMODE_64BIT:
6864 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6865 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6866 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6867 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6868 iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
6869 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6870 }
6871 case IEMMODE_64BIT:
6872 switch (pVCpu->iem.s.enmEffAddrMode)
6873 {
6874 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
6875 case IEMMODE_32BIT:
6876 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6877 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6878 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6879 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6880 iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
6881 case IEMMODE_64BIT:
6882 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
6883 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6884 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6885 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6886 iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
6887 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6888 }
6889 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6890 }
6891 }
6892
6893 /*
6894 * Annoying double switch here.
6895 * Using ugly macro for implementing the cases, sharing it with movsb.
6896 */
6897 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
6898 switch (pVCpu->iem.s.enmEffOpSize)
6899 {
6900 case IEMMODE_16BIT:
6901 switch (pVCpu->iem.s.enmEffAddrMode)
6902 {
6903 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
6904 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32, IEM_MC_F_MIN_386); break;
6905 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64, IEM_MC_F_64BIT); break;
6906 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6907 }
6908 break;
6909
6910 case IEMMODE_32BIT:
6911 switch (pVCpu->iem.s.enmEffAddrMode)
6912 {
6913 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
6914 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32, IEM_MC_F_MIN_386); break;
6915 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64, IEM_MC_F_64BIT); break;
6916 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6917 }
6918 break;
6919
6920 case IEMMODE_64BIT:
6921 switch (pVCpu->iem.s.enmEffAddrMode)
6922 {
6923 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
6924 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32, IEM_MC_F_64BIT); break;
6925 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64, IEM_MC_F_64BIT); break;
6926 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6927 }
6928 break;
6929 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6930 }
6931}
6932
6933#undef IEM_MOVS_CASE
6934
6935/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
6936#define IEM_CMPS_CASE(ValBits, AddrBits, a_fMcFlags) \
6937 IEM_MC_BEGIN(3, 3, a_fMcFlags, 0); \
6938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
6939 \
6940 IEM_MC_LOCAL(RTGCPTR, uAddr1); \
6941 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr1, X86_GREG_xSI); \
6942 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
6943 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr1); \
6944 \
6945 IEM_MC_LOCAL(RTGCPTR, uAddr2); \
6946 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr2, X86_GREG_xDI); \
6947 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
6948 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr2); \
6949 \
6950 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
6951 IEM_MC_REF_EFLAGS(pEFlags); \
6952 IEM_MC_ARG_LOCAL_REF(uint##ValBits##_t *, puValue1, uValue1, 0); \
6953 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
6954 \
6955 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
6956 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6957 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6958 } IEM_MC_ELSE() { \
6959 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
6960 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
6961 } IEM_MC_ENDIF(); \
6962 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
6963 IEM_MC_END() \
6964
6965/**
6966 * @opcode 0xa6
6967 */
6968FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
6969{
6970
6971 /*
6972 * Use the C implementation if a repeat prefix is encountered.
6973 */
6974 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
6975 {
6976 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
6977 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6978 switch (pVCpu->iem.s.enmEffAddrMode)
6979 {
6980 case IEMMODE_16BIT:
6981 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6982 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6983 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6984 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6985 iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
6986 case IEMMODE_32BIT:
6987 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6988 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6989 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6990 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6991 iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
6992 case IEMMODE_64BIT:
6993 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
6994 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
6995 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
6996 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
6997 iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
6998 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6999 }
7000 }
7001 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7002 {
7003 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
7004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7005 switch (pVCpu->iem.s.enmEffAddrMode)
7006 {
7007 case IEMMODE_16BIT:
7008 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7009 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7010 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7011 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7012 iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
7013 case IEMMODE_32BIT:
7014 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7015 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7016 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7017 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7018 iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
7019 case IEMMODE_64BIT:
7020 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7021 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7022 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7023 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7024 iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
7025 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7026 }
7027 }
7028
7029 /*
7030 * Sharing case implementation with cmps[wdq] below.
7031 */
7032 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
7033 switch (pVCpu->iem.s.enmEffAddrMode)
7034 {
7035 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7036 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7037 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64, IEM_MC_F_64BIT); break;
7038 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7039 }
7040}
7041
7042
7043/**
7044 * @opcode 0xa7
7045 */
7046FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
7047{
7048 /*
7049 * Use the C implementation if a repeat prefix is encountered.
7050 */
7051 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7052 {
7053 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
7054 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7055 switch (pVCpu->iem.s.enmEffOpSize)
7056 {
7057 case IEMMODE_16BIT:
7058 switch (pVCpu->iem.s.enmEffAddrMode)
7059 {
7060 case IEMMODE_16BIT:
7061 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7062 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7063 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7064 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7065 iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
7066 case IEMMODE_32BIT:
7067 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7068 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7069 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7070 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7071 iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
7072 case IEMMODE_64BIT:
7073 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7074 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7075 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7076 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7077 iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
7078 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7079 }
7080 break;
7081 case IEMMODE_32BIT:
7082 switch (pVCpu->iem.s.enmEffAddrMode)
7083 {
7084 case IEMMODE_16BIT:
7085 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7086 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7087 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7088 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7089 iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
7090 case IEMMODE_32BIT:
7091 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7092 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7093 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7094 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7095 iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
7096 case IEMMODE_64BIT:
7097 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7098 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7099 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7100 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7101 iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
7102 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7103 }
7104 case IEMMODE_64BIT:
7105 switch (pVCpu->iem.s.enmEffAddrMode)
7106 {
7107 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
7108 case IEMMODE_32BIT:
7109 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7110 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7111 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7112 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7113 iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
7114 case IEMMODE_64BIT:
7115 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7116 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7117 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7118 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7119 iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
7120 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7121 }
7122 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7123 }
7124 }
7125
7126 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7127 {
7128 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
7129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7130 switch (pVCpu->iem.s.enmEffOpSize)
7131 {
7132 case IEMMODE_16BIT:
7133 switch (pVCpu->iem.s.enmEffAddrMode)
7134 {
7135 case IEMMODE_16BIT:
7136 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7137 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7138 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7139 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7140 iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
7141 case IEMMODE_32BIT:
7142 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7143 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7144 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7145 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7146 iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
7147 case IEMMODE_64BIT:
7148 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7149 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7150 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7151 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7152 iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
7153 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7154 }
7155 break;
7156 case IEMMODE_32BIT:
7157 switch (pVCpu->iem.s.enmEffAddrMode)
7158 {
7159 case IEMMODE_16BIT:
7160 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7161 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7162 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7163 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7164 iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
7165 case IEMMODE_32BIT:
7166 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7167 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7168 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7169 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7170 iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
7171 case IEMMODE_64BIT:
7172 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7173 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7174 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7175 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7176 iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
7177 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7178 }
7179 case IEMMODE_64BIT:
7180 switch (pVCpu->iem.s.enmEffAddrMode)
7181 {
7182 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
7183 case IEMMODE_32BIT:
7184 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7185 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7186 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7187 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7188 iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
7189 case IEMMODE_64BIT:
7190 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7191 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7192 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7193 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7194 iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
7195 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7196 }
7197 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7198 }
7199 }
7200
7201 /*
7202 * Annoying double switch here.
7203 * Using ugly macro for implementing the cases, sharing it with cmpsb.
7204 */
7205 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
7206 switch (pVCpu->iem.s.enmEffOpSize)
7207 {
7208 case IEMMODE_16BIT:
7209 switch (pVCpu->iem.s.enmEffAddrMode)
7210 {
7211 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7212 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7213 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64, IEM_MC_F_64BIT); break;
7214 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7215 }
7216 break;
7217
7218 case IEMMODE_32BIT:
7219 switch (pVCpu->iem.s.enmEffAddrMode)
7220 {
7221 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7222 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7223 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64, IEM_MC_F_64BIT); break;
7224 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7225 }
7226 break;
7227
7228 case IEMMODE_64BIT:
7229 switch (pVCpu->iem.s.enmEffAddrMode)
7230 {
7231 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7232 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32, IEM_MC_F_MIN_386); break;
7233 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64, IEM_MC_F_64BIT); break;
7234 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7235 }
7236 break;
7237 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7238 }
7239}
7240
7241#undef IEM_CMPS_CASE
7242
7243/**
7244 * @opcode 0xa8
7245 */
7246FNIEMOP_DEF(iemOp_test_AL_Ib)
7247{
7248 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
7249 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7250 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_test_u8);
7251}
7252
7253
7254/**
7255 * @opcode 0xa9
7256 */
7257FNIEMOP_DEF(iemOp_test_eAX_Iz)
7258{
7259 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
7260 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
7261 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, 0);
7262}
7263
7264
7265/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
7266#define IEM_STOS_CASE(ValBits, AddrBits, a_fMcFlags) \
7267 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
7268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7269 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7270 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7271 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
7272 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7273 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
7274 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7275 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7276 } IEM_MC_ELSE() { \
7277 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7278 } IEM_MC_ENDIF(); \
7279 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7280 IEM_MC_END() \
7281
7282/**
7283 * @opcode 0xaa
7284 */
7285FNIEMOP_DEF(iemOp_stosb_Yb_AL)
7286{
7287 /*
7288 * Use the C implementation if a repeat prefix is encountered.
7289 */
7290 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7291 {
7292 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
7293 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7294 switch (pVCpu->iem.s.enmEffAddrMode)
7295 {
7296 case IEMMODE_16BIT:
7297 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7298 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7299 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7300 iemCImpl_stos_al_m16);
7301 case IEMMODE_32BIT:
7302 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7303 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7304 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7305 iemCImpl_stos_al_m32);
7306 case IEMMODE_64BIT:
7307 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP,
7308 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7309 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7310 iemCImpl_stos_al_m64);
7311 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7312 }
7313 }
7314
7315 /*
7316 * Sharing case implementation with stos[wdq] below.
7317 */
7318 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
7319 switch (pVCpu->iem.s.enmEffAddrMode)
7320 {
7321 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7322 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7323 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64, IEM_MC_F_64BIT); break;
7324 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7325 }
7326}
7327
7328
7329/**
7330 * @opcode 0xab
7331 */
7332FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
7333{
7334 /*
7335 * Use the C implementation if a repeat prefix is encountered.
7336 */
7337 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7338 {
7339 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
7340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7341 switch (pVCpu->iem.s.enmEffOpSize)
7342 {
7343 case IEMMODE_16BIT:
7344 switch (pVCpu->iem.s.enmEffAddrMode)
7345 {
7346 case IEMMODE_16BIT:
7347 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7348 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7349 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7350 iemCImpl_stos_ax_m16);
7351 case IEMMODE_32BIT:
7352 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7353 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7354 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7355 iemCImpl_stos_ax_m32);
7356 case IEMMODE_64BIT:
7357 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7358 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7359 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7360 iemCImpl_stos_ax_m64);
7361 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7362 }
7363 break;
7364 case IEMMODE_32BIT:
7365 switch (pVCpu->iem.s.enmEffAddrMode)
7366 {
7367 case IEMMODE_16BIT:
7368 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7369 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7370 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7371 iemCImpl_stos_eax_m16);
7372 case IEMMODE_32BIT:
7373 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7374 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7375 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7376 iemCImpl_stos_eax_m32);
7377 case IEMMODE_64BIT:
7378 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7379 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7380 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7381 iemCImpl_stos_eax_m64);
7382 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7383 }
7384 case IEMMODE_64BIT:
7385 switch (pVCpu->iem.s.enmEffAddrMode)
7386 {
7387 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
7388 case IEMMODE_32BIT:
7389 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7390 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7391 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7392 iemCImpl_stos_rax_m32);
7393 case IEMMODE_64BIT:
7394 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_REP,
7395 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7396 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7397 iemCImpl_stos_rax_m64);
7398 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7399 }
7400 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7401 }
7402 }
7403
7404 /*
7405 * Annoying double switch here.
7406 * Using ugly macro for implementing the cases, sharing it with stosb.
7407 */
7408 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
7409 switch (pVCpu->iem.s.enmEffOpSize)
7410 {
7411 case IEMMODE_16BIT:
7412 switch (pVCpu->iem.s.enmEffAddrMode)
7413 {
7414 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7415 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7416 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64, IEM_MC_F_64BIT); break;
7417 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7418 }
7419 break;
7420
7421 case IEMMODE_32BIT:
7422 switch (pVCpu->iem.s.enmEffAddrMode)
7423 {
7424 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7425 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7426 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64, IEM_MC_F_64BIT); break;
7427 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7428 }
7429 break;
7430
7431 case IEMMODE_64BIT:
7432 switch (pVCpu->iem.s.enmEffAddrMode)
7433 {
7434 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7435 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32, IEM_MC_F_64BIT); break;
7436 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64, IEM_MC_F_64BIT); break;
7437 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7438 }
7439 break;
7440 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7441 }
7442}
7443
7444#undef IEM_STOS_CASE
7445
7446/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
7447#define IEM_LODS_CASE(ValBits, AddrBits, a_fMcFlags) \
7448 IEM_MC_BEGIN(0, 2, a_fMcFlags, 0); \
7449 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7450 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
7451 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7452 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
7453 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
7454 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
7455 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7456 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7457 } IEM_MC_ELSE() { \
7458 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
7459 } IEM_MC_ENDIF(); \
7460 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7461 IEM_MC_END() \
7462
7463/**
7464 * @opcode 0xac
7465 */
7466FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
7467{
7468 /*
7469 * Use the C implementation if a repeat prefix is encountered.
7470 */
7471 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7472 {
7473 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
7474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7475 switch (pVCpu->iem.s.enmEffAddrMode)
7476 {
7477 case IEMMODE_16BIT:
7478 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7479 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7480 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7481 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7482 iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
7483 case IEMMODE_32BIT:
7484 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7485 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7486 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7487 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7488 iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
7489 case IEMMODE_64BIT:
7490 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7491 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7492 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7493 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7494 iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
7495 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7496 }
7497 }
7498
7499 /*
7500 * Sharing case implementation with stos[wdq] below.
7501 */
7502 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
7503 switch (pVCpu->iem.s.enmEffAddrMode)
7504 {
7505 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7506 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7507 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64, IEM_MC_F_64BIT); break;
7508 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7509 }
7510}
7511
7512
7513/**
7514 * @opcode 0xad
7515 */
7516FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
7517{
7518 /*
7519 * Use the C implementation if a repeat prefix is encountered.
7520 */
7521 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
7522 {
7523 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
7524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7525 switch (pVCpu->iem.s.enmEffOpSize)
7526 {
7527 case IEMMODE_16BIT:
7528 switch (pVCpu->iem.s.enmEffAddrMode)
7529 {
7530 case IEMMODE_16BIT:
7531 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7532 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7533 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7534 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7535 iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
7536 case IEMMODE_32BIT:
7537 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7538 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7539 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7540 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7541 iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
7542 case IEMMODE_64BIT:
7543 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7544 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7545 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7546 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7547 iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
7548 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7549 }
7550 break;
7551 case IEMMODE_32BIT:
7552 switch (pVCpu->iem.s.enmEffAddrMode)
7553 {
7554 case IEMMODE_16BIT:
7555 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7556 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7557 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7558 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7559 iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
7560 case IEMMODE_32BIT:
7561 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7562 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7563 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7564 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7565 iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
7566 case IEMMODE_64BIT:
7567 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7568 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7569 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7570 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7571 iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
7572 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7573 }
7574 case IEMMODE_64BIT:
7575 switch (pVCpu->iem.s.enmEffAddrMode)
7576 {
7577 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
7578 case IEMMODE_32BIT:
7579 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7580 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7581 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7582 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7583 iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
7584 case IEMMODE_64BIT:
7585 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_REP,
7586 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX)
7587 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSI)
7588 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7589 iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
7590 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7591 }
7592 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7593 }
7594 }
7595
7596 /*
7597 * Annoying double switch here.
7598 * Using ugly macro for implementing the cases, sharing it with lodsb.
7599 */
7600 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
7601 switch (pVCpu->iem.s.enmEffOpSize)
7602 {
7603 case IEMMODE_16BIT:
7604 switch (pVCpu->iem.s.enmEffAddrMode)
7605 {
7606 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7607 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7608 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64, IEM_MC_F_64BIT); break;
7609 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7610 }
7611 break;
7612
7613 case IEMMODE_32BIT:
7614 switch (pVCpu->iem.s.enmEffAddrMode)
7615 {
7616 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7617 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7618 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64, IEM_MC_F_64BIT); break;
7619 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7620 }
7621 break;
7622
7623 case IEMMODE_64BIT:
7624 switch (pVCpu->iem.s.enmEffAddrMode)
7625 {
7626 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7627 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32, IEM_MC_F_64BIT); break;
7628 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64, IEM_MC_F_64BIT); break;
7629 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7630 }
7631 break;
7632 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7633 }
7634}
7635
7636#undef IEM_LODS_CASE
7637
7638/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
7639#define IEM_SCAS_CASE(ValBits, AddrBits, a_fMcFlags) \
7640 IEM_MC_BEGIN(3, 2, a_fMcFlags, 0); \
7641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
7642 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
7643 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
7644 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
7645 IEM_MC_LOCAL(RTGCPTR, uAddr); \
7646 \
7647 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
7648 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
7649 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
7650 IEM_MC_REF_EFLAGS(pEFlags); \
7651 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
7652 \
7653 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
7654 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7655 } IEM_MC_ELSE() { \
7656 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
7657 } IEM_MC_ENDIF(); \
7658 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
7659 IEM_MC_END();
7660
7661/**
7662 * @opcode 0xae
7663 */
7664FNIEMOP_DEF(iemOp_scasb_AL_Xb)
7665{
7666 /*
7667 * Use the C implementation if a repeat prefix is encountered.
7668 */
7669 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7670 {
7671 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
7672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7673 switch (pVCpu->iem.s.enmEffAddrMode)
7674 {
7675 case IEMMODE_16BIT:
7676 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7677 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7678 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7679 iemCImpl_repe_scas_al_m16);
7680 case IEMMODE_32BIT:
7681 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7682 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7683 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7684 iemCImpl_repe_scas_al_m32);
7685 case IEMMODE_64BIT:
7686 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7687 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7688 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7689 iemCImpl_repe_scas_al_m64);
7690 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7691 }
7692 }
7693 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7694 {
7695 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
7696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7697 switch (pVCpu->iem.s.enmEffAddrMode)
7698 {
7699 case IEMMODE_16BIT:
7700 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7701 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7702 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7703 iemCImpl_repne_scas_al_m16);
7704 case IEMMODE_32BIT:
7705 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7706 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7707 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7708 iemCImpl_repne_scas_al_m32);
7709 case IEMMODE_64BIT:
7710 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7711 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7712 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7713 iemCImpl_repne_scas_al_m64);
7714 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7715 }
7716 }
7717
7718 /*
7719 * Sharing case implementation with stos[wdq] below.
7720 */
7721 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
7722 switch (pVCpu->iem.s.enmEffAddrMode)
7723 {
7724 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16, IEM_MC_F_NOT_64BIT); break;
7725 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32, IEM_MC_F_MIN_386); break;
7726 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64, IEM_MC_F_64BIT); break;
7727 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7728 }
7729}
7730
7731
7732/**
7733 * @opcode 0xaf
7734 */
7735FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
7736{
7737 /*
7738 * Use the C implementation if a repeat prefix is encountered.
7739 */
7740 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
7741 {
7742 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
7743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7744 switch (pVCpu->iem.s.enmEffOpSize)
7745 {
7746 case IEMMODE_16BIT:
7747 switch (pVCpu->iem.s.enmEffAddrMode)
7748 {
7749 case IEMMODE_16BIT:
7750 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7751 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7752 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7753 iemCImpl_repe_scas_ax_m16);
7754 case IEMMODE_32BIT:
7755 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7756 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7757 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7758 iemCImpl_repe_scas_ax_m32);
7759 case IEMMODE_64BIT:
7760 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7761 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7762 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7763 iemCImpl_repe_scas_ax_m64);
7764 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7765 }
7766 break;
7767 case IEMMODE_32BIT:
7768 switch (pVCpu->iem.s.enmEffAddrMode)
7769 {
7770 case IEMMODE_16BIT:
7771 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7772 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7773 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7774 iemCImpl_repe_scas_eax_m16);
7775 case IEMMODE_32BIT:
7776 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7777 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7778 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7779 iemCImpl_repe_scas_eax_m32);
7780 case IEMMODE_64BIT:
7781 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7782 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7783 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7784 iemCImpl_repe_scas_eax_m64);
7785 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7786 }
7787 case IEMMODE_64BIT:
7788 switch (pVCpu->iem.s.enmEffAddrMode)
7789 {
7790 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
7791 case IEMMODE_32BIT:
7792 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7793 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7794 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7795 iemCImpl_repe_scas_rax_m32);
7796 case IEMMODE_64BIT:
7797 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7798 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7799 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7800 iemCImpl_repe_scas_rax_m64);
7801 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7802 }
7803 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7804 }
7805 }
7806 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
7807 {
7808 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
7809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7810 switch (pVCpu->iem.s.enmEffOpSize)
7811 {
7812 case IEMMODE_16BIT:
7813 switch (pVCpu->iem.s.enmEffAddrMode)
7814 {
7815 case IEMMODE_16BIT:
7816 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7817 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7818 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7819 iemCImpl_repne_scas_ax_m16);
7820 case IEMMODE_32BIT:
7821 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7822 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7823 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7824 iemCImpl_repne_scas_ax_m32);
7825 case IEMMODE_64BIT:
7826 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7827 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7828 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7829 iemCImpl_repne_scas_ax_m64);
7830 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7831 }
7832 break;
7833 case IEMMODE_32BIT:
7834 switch (pVCpu->iem.s.enmEffAddrMode)
7835 {
7836 case IEMMODE_16BIT:
7837 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7838 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7839 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7840 iemCImpl_repne_scas_eax_m16);
7841 case IEMMODE_32BIT:
7842 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7843 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7844 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7845 iemCImpl_repne_scas_eax_m32);
7846 case IEMMODE_64BIT:
7847 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7848 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7849 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7850 iemCImpl_repne_scas_eax_m64);
7851 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7852 }
7853 case IEMMODE_64BIT:
7854 switch (pVCpu->iem.s.enmEffAddrMode)
7855 {
7856 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
7857 case IEMMODE_32BIT:
7858 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7859 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7860 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7861 iemCImpl_repne_scas_rax_m32);
7862 case IEMMODE_64BIT:
7863 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS,
7864 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI)
7865 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX),
7866 iemCImpl_repne_scas_rax_m64);
7867 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7868 }
7869 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7870 }
7871 }
7872
7873 /*
7874 * Annoying double switch here.
7875 * Using ugly macro for implementing the cases, sharing it with scasb.
7876 */
7877 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
7878 switch (pVCpu->iem.s.enmEffOpSize)
7879 {
7880 case IEMMODE_16BIT:
7881 switch (pVCpu->iem.s.enmEffAddrMode)
7882 {
7883 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16, IEM_MC_F_NOT_64BIT); break;
7884 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32, IEM_MC_F_MIN_386); break;
7885 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64, IEM_MC_F_64BIT); break;
7886 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7887 }
7888 break;
7889
7890 case IEMMODE_32BIT:
7891 switch (pVCpu->iem.s.enmEffAddrMode)
7892 {
7893 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT); break;
7894 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32, IEM_MC_F_MIN_386); break;
7895 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64, IEM_MC_F_64BIT); break;
7896 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7897 }
7898 break;
7899
7900 case IEMMODE_64BIT:
7901 switch (pVCpu->iem.s.enmEffAddrMode)
7902 {
7903 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
7904 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32, IEM_MC_F_64BIT); break;
7905 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64, IEM_MC_F_64BIT); break;
7906 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7907 }
7908 break;
7909 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7910 }
7911}
7912
7913#undef IEM_SCAS_CASE
7914
7915/**
7916 * Common 'mov r8, imm8' helper.
7917 */
7918FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iFixedReg)
7919{
7920 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
7921 IEM_MC_BEGIN(0, 0, 0, 0);
7922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7923 IEM_MC_STORE_GREG_U8_CONST(iFixedReg, u8Imm);
7924 IEM_MC_ADVANCE_RIP_AND_FINISH();
7925 IEM_MC_END();
7926}
7927
7928
7929/**
7930 * @opcode 0xb0
7931 */
7932FNIEMOP_DEF(iemOp_mov_AL_Ib)
7933{
7934 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
7935 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
7936}
7937
7938
7939/**
7940 * @opcode 0xb1
7941 */
7942FNIEMOP_DEF(iemOp_CL_Ib)
7943{
7944 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
7945 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
7946}
7947
7948
7949/**
7950 * @opcode 0xb2
7951 */
7952FNIEMOP_DEF(iemOp_DL_Ib)
7953{
7954 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
7955 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
7956}
7957
7958
7959/**
7960 * @opcode 0xb3
7961 */
7962FNIEMOP_DEF(iemOp_BL_Ib)
7963{
7964 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
7965 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
7966}
7967
7968
7969/**
7970 * @opcode 0xb4
7971 */
7972FNIEMOP_DEF(iemOp_mov_AH_Ib)
7973{
7974 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
7975 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
7976}
7977
7978
7979/**
7980 * @opcode 0xb5
7981 */
7982FNIEMOP_DEF(iemOp_CH_Ib)
7983{
7984 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
7985 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
7986}
7987
7988
7989/**
7990 * @opcode 0xb6
7991 */
7992FNIEMOP_DEF(iemOp_DH_Ib)
7993{
7994 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
7995 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
7996}
7997
7998
7999/**
8000 * @opcode 0xb7
8001 */
8002FNIEMOP_DEF(iemOp_BH_Ib)
8003{
8004 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
8005 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8006}
8007
8008
8009/**
8010 * Common 'mov regX,immX' helper.
8011 */
8012FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iFixedReg)
8013{
8014 switch (pVCpu->iem.s.enmEffOpSize)
8015 {
8016 case IEMMODE_16BIT:
8017 IEM_MC_BEGIN(0, 0, 0, 0);
8018 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8020 IEM_MC_STORE_GREG_U16_CONST(iFixedReg, u16Imm);
8021 IEM_MC_ADVANCE_RIP_AND_FINISH();
8022 IEM_MC_END();
8023 break;
8024
8025 case IEMMODE_32BIT:
8026 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8027 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8029 IEM_MC_STORE_GREG_U32_CONST(iFixedReg, u32Imm);
8030 IEM_MC_ADVANCE_RIP_AND_FINISH();
8031 IEM_MC_END();
8032 break;
8033
8034 case IEMMODE_64BIT:
8035 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
8036 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
8037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8038 IEM_MC_STORE_GREG_U64_CONST(iFixedReg, u64Imm);
8039 IEM_MC_ADVANCE_RIP_AND_FINISH();
8040 IEM_MC_END();
8041 break;
8042 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8043 }
8044}
8045
8046
8047/**
8048 * @opcode 0xb8
8049 */
8050FNIEMOP_DEF(iemOp_eAX_Iv)
8051{
8052 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
8053 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
8054}
8055
8056
8057/**
8058 * @opcode 0xb9
8059 */
8060FNIEMOP_DEF(iemOp_eCX_Iv)
8061{
8062 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
8063 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
8064}
8065
8066
8067/**
8068 * @opcode 0xba
8069 */
8070FNIEMOP_DEF(iemOp_eDX_Iv)
8071{
8072 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
8073 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
8074}
8075
8076
8077/**
8078 * @opcode 0xbb
8079 */
8080FNIEMOP_DEF(iemOp_eBX_Iv)
8081{
8082 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
8083 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
8084}
8085
8086
8087/**
8088 * @opcode 0xbc
8089 */
8090FNIEMOP_DEF(iemOp_eSP_Iv)
8091{
8092 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
8093 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
8094}
8095
8096
8097/**
8098 * @opcode 0xbd
8099 */
8100FNIEMOP_DEF(iemOp_eBP_Iv)
8101{
8102 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
8103 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
8104}
8105
8106
8107/**
8108 * @opcode 0xbe
8109 */
8110FNIEMOP_DEF(iemOp_eSI_Iv)
8111{
8112 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
8113 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
8114}
8115
8116
8117/**
8118 * @opcode 0xbf
8119 */
8120FNIEMOP_DEF(iemOp_eDI_Iv)
8121{
8122 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
8123 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
8124}
8125
8126
8127/**
8128 * @opcode 0xc0
8129 */
8130FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
8131{
8132 IEMOP_HLP_MIN_186();
8133 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8134 PCIEMOPSHIFTSIZES pImpl;
8135 switch (IEM_GET_MODRM_REG_8(bRm))
8136 {
8137 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
8138 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
8139 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
8140 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
8141 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
8142 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
8143 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
8144 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8145 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8146 }
8147 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8148
8149 if (IEM_IS_MODRM_REG_MODE(bRm))
8150 {
8151 /* register */
8152 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8153 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0);
8154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8155 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8156 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8157 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8158 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8159 IEM_MC_REF_EFLAGS(pEFlags);
8160 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8161 IEM_MC_ADVANCE_RIP_AND_FINISH();
8162 IEM_MC_END();
8163 }
8164 else
8165 {
8166 /* memory */
8167 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_186, 0);
8168 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8169 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8170
8171 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8173
8174 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8175 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8176 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8177
8178 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8179 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8180 IEM_MC_FETCH_EFLAGS(EFlags);
8181 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8182
8183 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
8184 IEM_MC_COMMIT_EFLAGS(EFlags);
8185 IEM_MC_ADVANCE_RIP_AND_FINISH();
8186 IEM_MC_END();
8187 }
8188}
8189
8190
8191/**
8192 * @opcode 0xc1
8193 */
8194FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
8195{
8196 IEMOP_HLP_MIN_186();
8197 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8198 PCIEMOPSHIFTSIZES pImpl;
8199 switch (IEM_GET_MODRM_REG_8(bRm))
8200 {
8201 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
8202 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
8203 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
8204 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
8205 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
8206 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
8207 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
8208 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8209 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
8210 }
8211 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8212
8213 if (IEM_IS_MODRM_REG_MODE(bRm))
8214 {
8215 /* register */
8216 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8217 switch (pVCpu->iem.s.enmEffOpSize)
8218 {
8219 case IEMMODE_16BIT:
8220 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_186, 0);
8221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8222 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8223 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8224 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8225 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8226 IEM_MC_REF_EFLAGS(pEFlags);
8227 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8228 IEM_MC_ADVANCE_RIP_AND_FINISH();
8229 IEM_MC_END();
8230 break;
8231
8232 case IEMMODE_32BIT:
8233 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
8234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8235 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8236 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8237 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8238 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8239 IEM_MC_REF_EFLAGS(pEFlags);
8240 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8241 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
8242 IEM_MC_ADVANCE_RIP_AND_FINISH();
8243 IEM_MC_END();
8244 break;
8245
8246 case IEMMODE_64BIT:
8247 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
8248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8249 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8250 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8251 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8252 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8253 IEM_MC_REF_EFLAGS(pEFlags);
8254 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8255 IEM_MC_ADVANCE_RIP_AND_FINISH();
8256 IEM_MC_END();
8257 break;
8258
8259 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8260 }
8261 }
8262 else
8263 {
8264 /* memory */
8265 switch (pVCpu->iem.s.enmEffOpSize)
8266 {
8267 case IEMMODE_16BIT:
8268 IEM_MC_BEGIN(3, 3, 0, 0);
8269 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8270 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8271
8272 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8274
8275 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8276 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8277 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8278
8279 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8280 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8281 IEM_MC_FETCH_EFLAGS(EFlags);
8282 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8283
8284 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
8285 IEM_MC_COMMIT_EFLAGS(EFlags);
8286 IEM_MC_ADVANCE_RIP_AND_FINISH();
8287 IEM_MC_END();
8288 break;
8289
8290 case IEMMODE_32BIT:
8291 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
8292 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8293 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8294
8295 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8297
8298 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8299 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8300 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8301
8302 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8303 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8304 IEM_MC_FETCH_EFLAGS(EFlags);
8305 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8306
8307 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
8308 IEM_MC_COMMIT_EFLAGS(EFlags);
8309 IEM_MC_ADVANCE_RIP_AND_FINISH();
8310 IEM_MC_END();
8311 break;
8312
8313 case IEMMODE_64BIT:
8314 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
8315 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8316 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8317
8318 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
8319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8320
8321 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8322 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8323 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8324
8325 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
8326 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
8327 IEM_MC_FETCH_EFLAGS(EFlags);
8328 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8329
8330 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
8331 IEM_MC_COMMIT_EFLAGS(EFlags);
8332 IEM_MC_ADVANCE_RIP_AND_FINISH();
8333 IEM_MC_END();
8334 break;
8335
8336 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8337 }
8338 }
8339}
8340
8341
8342/**
8343 * @opcode 0xc2
8344 */
8345FNIEMOP_DEF(iemOp_retn_Iw)
8346{
8347 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
8348 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8349 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8350 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8351 switch (pVCpu->iem.s.enmEffOpSize)
8352 {
8353 case IEMMODE_16BIT:
8354 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_retn_iw_16, u16Imm);
8355 case IEMMODE_32BIT:
8356 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_retn_iw_32, u16Imm);
8357 case IEMMODE_64BIT:
8358 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_retn_iw_64, u16Imm);
8359 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8360 }
8361}
8362
8363
8364/**
8365 * @opcode 0xc3
8366 */
8367FNIEMOP_DEF(iemOp_retn)
8368{
8369 IEMOP_MNEMONIC(retn, "retn");
8370 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
8371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8372 switch (pVCpu->iem.s.enmEffOpSize)
8373 {
8374 case IEMMODE_16BIT:
8375 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_retn_16);
8376 case IEMMODE_32BIT:
8377 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_retn_32);
8378 case IEMMODE_64BIT:
8379 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_retn_64);
8380 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8381 }
8382}
8383
8384
8385/**
8386 * @opcode 0xc4
8387 */
8388FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
8389{
8390 /* The LDS instruction is invalid 64-bit mode. In legacy and
8391 compatability mode it is invalid with MOD=3.
8392 The use as a VEX prefix is made possible by assigning the inverted
8393 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
8394 outside of 64-bit mode. VEX is not available in real or v86 mode. */
8395 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8396 if ( IEM_IS_64BIT_CODE(pVCpu)
8397 || IEM_IS_MODRM_REG_MODE(bRm) )
8398 {
8399 IEMOP_MNEMONIC(vex3_prefix, "vex3");
8400 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
8401 {
8402 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
8403 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
8404 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
8405 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
8406 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
8407 if ((bVex2 & 0x80 /* VEX.W */) && IEM_IS_64BIT_CODE(pVCpu))
8408 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
8409 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
8410 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
8411 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
8412 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
8413 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
8414 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
8415
8416 switch (bRm & 0x1f)
8417 {
8418 case 1: /* 0x0f lead opcode byte. */
8419#ifdef IEM_WITH_VEX
8420 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8421#else
8422 IEMOP_BITCH_ABOUT_STUB();
8423 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8424#endif
8425
8426 case 2: /* 0x0f 0x38 lead opcode bytes. */
8427#ifdef IEM_WITH_VEX
8428 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8429#else
8430 IEMOP_BITCH_ABOUT_STUB();
8431 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8432#endif
8433
8434 case 3: /* 0x0f 0x3a lead opcode bytes. */
8435#ifdef IEM_WITH_VEX
8436 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8437#else
8438 IEMOP_BITCH_ABOUT_STUB();
8439 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8440#endif
8441
8442 default:
8443 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
8444 IEMOP_RAISE_INVALID_OPCODE_RET();
8445 }
8446 }
8447 Log(("VEX3: VEX support disabled!\n"));
8448 IEMOP_RAISE_INVALID_OPCODE_RET();
8449 }
8450
8451 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
8452 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
8453}
8454
8455
8456/**
8457 * @opcode 0xc5
8458 */
8459FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
8460{
8461 /* The LES instruction is invalid 64-bit mode. In legacy and
8462 compatability mode it is invalid with MOD=3.
8463 The use as a VEX prefix is made possible by assigning the inverted
8464 REX.R to the top MOD bit, and the top bit in the inverted register
8465 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
8466 to accessing registers 0..7 in this VEX form. */
8467 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8468 if ( IEM_IS_64BIT_CODE(pVCpu)
8469 || IEM_IS_MODRM_REG_MODE(bRm))
8470 {
8471 IEMOP_MNEMONIC(vex2_prefix, "vex2");
8472 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVex)
8473 {
8474 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
8475 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
8476 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
8477 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
8478 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
8479 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
8480 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
8481 pVCpu->iem.s.idxPrefix = bRm & 0x3;
8482
8483#ifdef IEM_WITH_VEX
8484 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
8485#else
8486 IEMOP_BITCH_ABOUT_STUB();
8487 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
8488#endif
8489 }
8490
8491 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
8492 Log(("VEX2: VEX support disabled!\n"));
8493 IEMOP_RAISE_INVALID_OPCODE_RET();
8494 }
8495
8496 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
8497 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
8498}
8499
8500
8501/**
8502 * @opcode 0xc6
8503 */
8504FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
8505{
8506 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8507 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
8508 IEMOP_RAISE_INVALID_OPCODE_RET();
8509 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
8510
8511 if (IEM_IS_MODRM_REG_MODE(bRm))
8512 {
8513 /* register access */
8514 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8515 IEM_MC_BEGIN(0, 0, 0, 0);
8516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8517 IEM_MC_STORE_GREG_U8_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u8Imm);
8518 IEM_MC_ADVANCE_RIP_AND_FINISH();
8519 IEM_MC_END();
8520 }
8521 else
8522 {
8523 /* memory access. */
8524 IEM_MC_BEGIN(0, 1, 0, 0);
8525 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8526 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
8527 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
8528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8529 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
8530 IEM_MC_ADVANCE_RIP_AND_FINISH();
8531 IEM_MC_END();
8532 }
8533}
8534
8535
8536/**
8537 * @opcode 0xc7
8538 */
8539FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
8540{
8541 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8542 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Iz in this group. */
8543 IEMOP_RAISE_INVALID_OPCODE_RET();
8544 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
8545
8546 if (IEM_IS_MODRM_REG_MODE(bRm))
8547 {
8548 /* register access */
8549 switch (pVCpu->iem.s.enmEffOpSize)
8550 {
8551 case IEMMODE_16BIT:
8552 IEM_MC_BEGIN(0, 0, 0, 0);
8553 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8555 IEM_MC_STORE_GREG_U16_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u16Imm);
8556 IEM_MC_ADVANCE_RIP_AND_FINISH();
8557 IEM_MC_END();
8558 break;
8559
8560 case IEMMODE_32BIT:
8561 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
8562 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8563 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8564 IEM_MC_STORE_GREG_U32_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u32Imm);
8565 IEM_MC_ADVANCE_RIP_AND_FINISH();
8566 IEM_MC_END();
8567 break;
8568
8569 case IEMMODE_64BIT:
8570 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
8571 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8573 IEM_MC_STORE_GREG_U64_CONST(IEM_GET_MODRM_RM(pVCpu, bRm), u64Imm);
8574 IEM_MC_ADVANCE_RIP_AND_FINISH();
8575 IEM_MC_END();
8576 break;
8577
8578 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8579 }
8580 }
8581 else
8582 {
8583 /* memory access. */
8584 switch (pVCpu->iem.s.enmEffOpSize)
8585 {
8586 case IEMMODE_16BIT:
8587 IEM_MC_BEGIN(0, 1, 0, 0);
8588 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8589 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
8590 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8592 IEM_MC_STORE_MEM_U16_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
8593 IEM_MC_ADVANCE_RIP_AND_FINISH();
8594 IEM_MC_END();
8595 break;
8596
8597 case IEMMODE_32BIT:
8598 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
8599 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8600 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8601 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
8602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8603 IEM_MC_STORE_MEM_U32_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
8604 IEM_MC_ADVANCE_RIP_AND_FINISH();
8605 IEM_MC_END();
8606 break;
8607
8608 case IEMMODE_64BIT:
8609 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
8610 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8611 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
8612 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
8613 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8614 IEM_MC_STORE_MEM_U64_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
8615 IEM_MC_ADVANCE_RIP_AND_FINISH();
8616 IEM_MC_END();
8617 break;
8618
8619 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8620 }
8621 }
8622}
8623
8624
8625
8626
8627/**
8628 * @opcode 0xc8
8629 */
8630FNIEMOP_DEF(iemOp_enter_Iw_Ib)
8631{
8632 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
8633 IEMOP_HLP_MIN_186();
8634 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8635 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
8636 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
8637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8638 IEM_MC_DEFER_TO_CIMPL_3_RET(0,
8639 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8640 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP),
8641 iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
8642}
8643
8644
8645/**
8646 * @opcode 0xc9
8647 */
8648FNIEMOP_DEF(iemOp_leave)
8649{
8650 IEMOP_MNEMONIC(leave, "leave");
8651 IEMOP_HLP_MIN_186();
8652 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
8653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8654 IEM_MC_DEFER_TO_CIMPL_1_RET(0,
8655 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xSP)
8656 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xBP),
8657 iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
8658}
8659
8660
8661/**
8662 * @opcode 0xca
8663 */
8664FNIEMOP_DEF(iemOp_retf_Iw)
8665{
8666 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
8667 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
8668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8669 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
8670 | IEM_CIMPL_F_MODE,
8671 RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
8672 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
8673 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8674 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
8675 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
8676 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
8677 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8678 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
8679 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
8680 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
8681 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8682 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS),
8683 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
8684}
8685
8686
8687/**
8688 * @opcode 0xcb
8689 */
8690FNIEMOP_DEF(iemOp_retf)
8691{
8692 IEMOP_MNEMONIC(retf, "retf");
8693 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8694 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK
8695 | IEM_CIMPL_F_MODE,
8696 RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
8697 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
8698 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8699 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
8700 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
8701 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
8702 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8703 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
8704 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
8705 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
8706 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8707 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS),
8708 iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
8709}
8710
8711
8712/**
8713 * @opcode 0xcc
8714 */
8715FNIEMOP_DEF(iemOp_int3)
8716{
8717 IEMOP_MNEMONIC(int3, "int3");
8718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8719 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8720 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
8721 iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
8722}
8723
8724
8725/**
8726 * @opcode 0xcd
8727 */
8728FNIEMOP_DEF(iemOp_int_Ib)
8729{
8730 IEMOP_MNEMONIC(int_Ib, "int Ib");
8731 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
8732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8733 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8734 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS, UINT64_MAX,
8735 iemCImpl_int, u8Int, IEMINT_INTN);
8736 /** @todo make task-switches, ring-switches, ++ return non-zero status */
8737}
8738
8739
8740/**
8741 * @opcode 0xce
8742 */
8743FNIEMOP_DEF(iemOp_into)
8744{
8745 IEMOP_MNEMONIC(into, "into");
8746 IEMOP_HLP_NO_64BIT();
8747 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8748 | IEM_CIMPL_F_BRANCH_CONDITIONAL | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,
8749 UINT64_MAX,
8750 iemCImpl_int, X86_XCPT_OF, IEMINT_INTO);
8751 /** @todo make task-switches, ring-switches, ++ return non-zero status */
8752}
8753
8754
8755/**
8756 * @opcode 0xcf
8757 */
8758FNIEMOP_DEF(iemOp_iret)
8759{
8760 IEMOP_MNEMONIC(iret, "iret");
8761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8762 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
8763 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_BEFORE | IEM_CIMPL_F_VMEXIT,
8764 RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_DS)
8765 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_DS)
8766 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_DS)
8767 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_ES)
8768 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_ES)
8769 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_ES)
8770 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_FS)
8771 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_FS)
8772 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_FS)
8773 | RT_BIT_64(kIemNativeGstReg_SegSelFirst + X86_SREG_GS)
8774 | RT_BIT_64(kIemNativeGstReg_SegBaseFirst + X86_SREG_GS)
8775 | RT_BIT_64(kIemNativeGstReg_SegLimitFirst + X86_SREG_GS),
8776 iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
8777 /* Segment registers are sanitized when returning to an outer ring, or fully
8778 reloaded when returning to v86 mode. Thus the large flush list above. */
8779}
8780
8781
8782/**
8783 * @opcode 0xd0
8784 */
8785FNIEMOP_DEF(iemOp_Grp2_Eb_1)
8786{
8787 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8788 PCIEMOPSHIFTSIZES pImpl;
8789 switch (IEM_GET_MODRM_REG_8(bRm))
8790 {
8791 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
8792 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
8793 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
8794 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
8795 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
8796 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
8797 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
8798 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8799 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
8800 }
8801 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8802
8803 if (IEM_IS_MODRM_REG_MODE(bRm))
8804 {
8805 /* register */
8806 IEM_MC_BEGIN(3, 0, 0, 0);
8807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8808 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8809 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
8810 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8811 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8812 IEM_MC_REF_EFLAGS(pEFlags);
8813 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8814 IEM_MC_ADVANCE_RIP_AND_FINISH();
8815 IEM_MC_END();
8816 }
8817 else
8818 {
8819 /* memory */
8820 IEM_MC_BEGIN(3, 3, 0, 0);
8821 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
8822 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
8823 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8824 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8825 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8826
8827 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8829 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8830 IEM_MC_FETCH_EFLAGS(EFlags);
8831 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
8832
8833 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
8834 IEM_MC_COMMIT_EFLAGS(EFlags);
8835 IEM_MC_ADVANCE_RIP_AND_FINISH();
8836 IEM_MC_END();
8837 }
8838}
8839
8840
8841
8842/**
8843 * @opcode 0xd1
8844 */
8845FNIEMOP_DEF(iemOp_Grp2_Ev_1)
8846{
8847 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8848 PCIEMOPSHIFTSIZES pImpl;
8849 switch (IEM_GET_MODRM_REG_8(bRm))
8850 {
8851 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
8852 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
8853 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
8854 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
8855 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
8856 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
8857 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
8858 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8859 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
8860 }
8861 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
8862
8863 if (IEM_IS_MODRM_REG_MODE(bRm))
8864 {
8865 /* register */
8866 switch (pVCpu->iem.s.enmEffOpSize)
8867 {
8868 case IEMMODE_16BIT:
8869 IEM_MC_BEGIN(3, 0, 0, 0);
8870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8871 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8872 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8873 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8874 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8875 IEM_MC_REF_EFLAGS(pEFlags);
8876 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8877 IEM_MC_ADVANCE_RIP_AND_FINISH();
8878 IEM_MC_END();
8879 break;
8880
8881 case IEMMODE_32BIT:
8882 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
8883 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8884 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8885 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8886 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8887 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8888 IEM_MC_REF_EFLAGS(pEFlags);
8889 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8890 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
8891 IEM_MC_ADVANCE_RIP_AND_FINISH();
8892 IEM_MC_END();
8893 break;
8894
8895 case IEMMODE_64BIT:
8896 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
8897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8898 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8899 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8900 IEM_MC_ARG(uint32_t *, pEFlags, 2);
8901 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
8902 IEM_MC_REF_EFLAGS(pEFlags);
8903 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8904 IEM_MC_ADVANCE_RIP_AND_FINISH();
8905 IEM_MC_END();
8906 break;
8907
8908 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8909 }
8910 }
8911 else
8912 {
8913 /* memory */
8914 switch (pVCpu->iem.s.enmEffOpSize)
8915 {
8916 case IEMMODE_16BIT:
8917 IEM_MC_BEGIN(3, 3, 0, 0);
8918 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
8919 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8920 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8921 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8922 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8923
8924 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8926 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8927 IEM_MC_FETCH_EFLAGS(EFlags);
8928 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
8929
8930 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
8931 IEM_MC_COMMIT_EFLAGS(EFlags);
8932 IEM_MC_ADVANCE_RIP_AND_FINISH();
8933 IEM_MC_END();
8934 break;
8935
8936 case IEMMODE_32BIT:
8937 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
8938 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
8939 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8940 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8941 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8942 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8943
8944 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8946 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8947 IEM_MC_FETCH_EFLAGS(EFlags);
8948 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
8949
8950 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
8951 IEM_MC_COMMIT_EFLAGS(EFlags);
8952 IEM_MC_ADVANCE_RIP_AND_FINISH();
8953 IEM_MC_END();
8954 break;
8955
8956 case IEMMODE_64BIT:
8957 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
8958 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
8959 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
8960 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
8961 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8962 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
8963
8964 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8966 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8967 IEM_MC_FETCH_EFLAGS(EFlags);
8968 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
8969
8970 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
8971 IEM_MC_COMMIT_EFLAGS(EFlags);
8972 IEM_MC_ADVANCE_RIP_AND_FINISH();
8973 IEM_MC_END();
8974 break;
8975
8976 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8977 }
8978 }
8979}
8980
8981
8982/**
8983 * @opcode 0xd2
8984 */
8985FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
8986{
8987 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8988 PCIEMOPSHIFTSIZES pImpl;
8989 switch (IEM_GET_MODRM_REG_8(bRm))
8990 {
8991 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
8992 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
8993 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
8994 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
8995 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
8996 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
8997 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
8998 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
8999 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
9000 }
9001 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
9002
9003 if (IEM_IS_MODRM_REG_MODE(bRm))
9004 {
9005 /* register */
9006 IEM_MC_BEGIN(3, 0, 0, 0);
9007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9008 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9009 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9010 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9011 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9012 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9013 IEM_MC_REF_EFLAGS(pEFlags);
9014 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
9015 IEM_MC_ADVANCE_RIP_AND_FINISH();
9016 IEM_MC_END();
9017 }
9018 else
9019 {
9020 /* memory */
9021 IEM_MC_BEGIN(3, 3, 0, 0);
9022 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9023 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9024 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9025 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9026 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9027
9028 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9030 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9031 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9032 IEM_MC_FETCH_EFLAGS(EFlags);
9033 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
9034
9035 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
9036 IEM_MC_COMMIT_EFLAGS(EFlags);
9037 IEM_MC_ADVANCE_RIP_AND_FINISH();
9038 IEM_MC_END();
9039 }
9040}
9041
9042
9043/**
9044 * @opcode 0xd3
9045 */
9046FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
9047{
9048 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9049 PCIEMOPSHIFTSIZES pImpl;
9050 switch (IEM_GET_MODRM_REG_8(bRm))
9051 {
9052 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
9053 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
9054 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
9055 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
9056 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
9057 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
9058 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
9059 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
9060 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
9061 }
9062 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
9063
9064 if (IEM_IS_MODRM_REG_MODE(bRm))
9065 {
9066 /* register */
9067 switch (pVCpu->iem.s.enmEffOpSize)
9068 {
9069 case IEMMODE_16BIT:
9070 IEM_MC_BEGIN(3, 0, 0, 0);
9071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9072 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9073 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9074 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9075 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9076 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9077 IEM_MC_REF_EFLAGS(pEFlags);
9078 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
9079 IEM_MC_ADVANCE_RIP_AND_FINISH();
9080 IEM_MC_END();
9081 break;
9082
9083 case IEMMODE_32BIT:
9084 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
9085 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9086 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9087 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9088 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9089 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9090 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9091 IEM_MC_REF_EFLAGS(pEFlags);
9092 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
9093 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm));
9094 IEM_MC_ADVANCE_RIP_AND_FINISH();
9095 IEM_MC_END();
9096 break;
9097
9098 case IEMMODE_64BIT:
9099 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
9100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9101 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9102 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9103 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9104 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9105 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
9106 IEM_MC_REF_EFLAGS(pEFlags);
9107 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
9108 IEM_MC_ADVANCE_RIP_AND_FINISH();
9109 IEM_MC_END();
9110 break;
9111
9112 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9113 }
9114 }
9115 else
9116 {
9117 /* memory */
9118 switch (pVCpu->iem.s.enmEffOpSize)
9119 {
9120 case IEMMODE_16BIT:
9121 IEM_MC_BEGIN(3, 3, 0, 0);
9122 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9123 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9124 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9125 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9126 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9127
9128 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9130 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9131 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9132 IEM_MC_FETCH_EFLAGS(EFlags);
9133 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
9134
9135 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
9136 IEM_MC_COMMIT_EFLAGS(EFlags);
9137 IEM_MC_ADVANCE_RIP_AND_FINISH();
9138 IEM_MC_END();
9139 break;
9140
9141 case IEMMODE_32BIT:
9142 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
9143 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9144 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9145 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9146 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9147 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9148
9149 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9150 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9151 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9152 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9153 IEM_MC_FETCH_EFLAGS(EFlags);
9154 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
9155
9156 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
9157 IEM_MC_COMMIT_EFLAGS(EFlags);
9158 IEM_MC_ADVANCE_RIP_AND_FINISH();
9159 IEM_MC_END();
9160 break;
9161
9162 case IEMMODE_64BIT:
9163 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
9164 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9165 IEM_MC_ARG(uint8_t, cShiftArg, 1);
9166 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
9167 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9168 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9169
9170 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9172 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
9173 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9174 IEM_MC_FETCH_EFLAGS(EFlags);
9175 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
9176
9177 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo);
9178 IEM_MC_COMMIT_EFLAGS(EFlags);
9179 IEM_MC_ADVANCE_RIP_AND_FINISH();
9180 IEM_MC_END();
9181 break;
9182
9183 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9184 }
9185 }
9186}
9187
9188/**
9189 * @opcode 0xd4
9190 */
9191FNIEMOP_DEF(iemOp_aam_Ib)
9192{
9193 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
9194 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
9195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9196 IEMOP_HLP_NO_64BIT();
9197 if (!bImm)
9198 IEMOP_RAISE_DIVIDE_ERROR_RET();
9199 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aam, bImm);
9200}
9201
9202
9203/**
9204 * @opcode 0xd5
9205 */
9206FNIEMOP_DEF(iemOp_aad_Ib)
9207{
9208 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
9209 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
9210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9211 IEMOP_HLP_NO_64BIT();
9212 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_STATUS_FLAGS, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX), iemCImpl_aad, bImm);
9213}
9214
9215
9216/**
9217 * @opcode 0xd6
9218 */
9219FNIEMOP_DEF(iemOp_salc)
9220{
9221 IEMOP_MNEMONIC(salc, "salc");
9222 IEMOP_HLP_NO_64BIT();
9223
9224 IEM_MC_BEGIN(0, 0, 0, 0);
9225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9226 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
9227 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
9228 } IEM_MC_ELSE() {
9229 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
9230 } IEM_MC_ENDIF();
9231 IEM_MC_ADVANCE_RIP_AND_FINISH();
9232 IEM_MC_END();
9233}
9234
9235
9236/**
9237 * @opcode 0xd7
9238 */
9239FNIEMOP_DEF(iemOp_xlat)
9240{
9241 IEMOP_MNEMONIC(xlat, "xlat");
9242 switch (pVCpu->iem.s.enmEffAddrMode)
9243 {
9244 case IEMMODE_16BIT:
9245 IEM_MC_BEGIN(2, 0, 0, 0);
9246 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9247 IEM_MC_LOCAL(uint8_t, u8Tmp);
9248 IEM_MC_LOCAL(uint16_t, u16Addr);
9249 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
9250 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
9251 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
9252 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9253 IEM_MC_ADVANCE_RIP_AND_FINISH();
9254 IEM_MC_END();
9255 break;
9256
9257 case IEMMODE_32BIT:
9258 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0);
9259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9260 IEM_MC_LOCAL(uint8_t, u8Tmp);
9261 IEM_MC_LOCAL(uint32_t, u32Addr);
9262 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
9263 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
9264 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
9265 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9266 IEM_MC_ADVANCE_RIP_AND_FINISH();
9267 IEM_MC_END();
9268 break;
9269
9270 case IEMMODE_64BIT:
9271 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0);
9272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9273 IEM_MC_LOCAL(uint8_t, u8Tmp);
9274 IEM_MC_LOCAL(uint64_t, u64Addr);
9275 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
9276 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
9277 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
9278 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
9279 IEM_MC_ADVANCE_RIP_AND_FINISH();
9280 IEM_MC_END();
9281 break;
9282
9283 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9284 }
9285}
9286
9287
9288/**
9289 * Common worker for FPU instructions working on ST0 and STn, and storing the
9290 * result in ST0.
9291 *
9292 * @param bRm Mod R/M byte.
9293 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9294 */
9295FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
9296{
9297 IEM_MC_BEGIN(3, 1, 0, 0);
9298 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9299 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9300 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9301 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9302 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9303
9304 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9305 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9306 IEM_MC_PREPARE_FPU_USAGE();
9307 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9308 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
9309 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9310 } IEM_MC_ELSE() {
9311 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9312 } IEM_MC_ENDIF();
9313 IEM_MC_ADVANCE_RIP_AND_FINISH();
9314
9315 IEM_MC_END();
9316}
9317
9318
9319/**
9320 * Common worker for FPU instructions working on ST0 and STn, and only affecting
9321 * flags.
9322 *
9323 * @param bRm Mod R/M byte.
9324 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9325 */
9326FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
9327{
9328 IEM_MC_BEGIN(3, 1, 0, 0);
9329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9330 IEM_MC_LOCAL(uint16_t, u16Fsw);
9331 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9332 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9333 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9334
9335 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9336 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9337 IEM_MC_PREPARE_FPU_USAGE();
9338 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9339 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
9340 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9341 } IEM_MC_ELSE() {
9342 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
9343 } IEM_MC_ENDIF();
9344 IEM_MC_ADVANCE_RIP_AND_FINISH();
9345
9346 IEM_MC_END();
9347}
9348
9349
9350/**
9351 * Common worker for FPU instructions working on ST0 and STn, only affecting
9352 * flags, and popping when done.
9353 *
9354 * @param bRm Mod R/M byte.
9355 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9356 */
9357FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
9358{
9359 IEM_MC_BEGIN(3, 1, 0, 0);
9360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9361 IEM_MC_LOCAL(uint16_t, u16Fsw);
9362 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9363 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9364 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9365
9366 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9367 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9368 IEM_MC_PREPARE_FPU_USAGE();
9369 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9370 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
9371 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9372 } IEM_MC_ELSE() {
9373 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
9374 } IEM_MC_ENDIF();
9375 IEM_MC_ADVANCE_RIP_AND_FINISH();
9376
9377 IEM_MC_END();
9378}
9379
9380
9381/** Opcode 0xd8 11/0. */
9382FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
9383{
9384 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
9385 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
9386}
9387
9388
9389/** Opcode 0xd8 11/1. */
9390FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
9391{
9392 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
9393 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
9394}
9395
9396
9397/** Opcode 0xd8 11/2. */
9398FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
9399{
9400 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
9401 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
9402}
9403
9404
9405/** Opcode 0xd8 11/3. */
9406FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
9407{
9408 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
9409 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
9410}
9411
9412
9413/** Opcode 0xd8 11/4. */
9414FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
9415{
9416 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
9417 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
9418}
9419
9420
9421/** Opcode 0xd8 11/5. */
9422FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
9423{
9424 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
9425 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
9426}
9427
9428
9429/** Opcode 0xd8 11/6. */
9430FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
9431{
9432 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
9433 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
9434}
9435
9436
9437/** Opcode 0xd8 11/7. */
9438FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
9439{
9440 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
9441 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
9442}
9443
9444
9445/**
9446 * Common worker for FPU instructions working on ST0 and an m32r, and storing
9447 * the result in ST0.
9448 *
9449 * @param bRm Mod R/M byte.
9450 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9451 */
9452FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
9453{
9454 IEM_MC_BEGIN(3, 3, 0, 0);
9455 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9456 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9457 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
9458 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9459 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9460 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
9461
9462 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9464
9465 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9466 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9467 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9468
9469 IEM_MC_PREPARE_FPU_USAGE();
9470 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9471 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
9472 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9473 } IEM_MC_ELSE() {
9474 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9475 } IEM_MC_ENDIF();
9476 IEM_MC_ADVANCE_RIP_AND_FINISH();
9477
9478 IEM_MC_END();
9479}
9480
9481
9482/** Opcode 0xd8 !11/0. */
9483FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
9484{
9485 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
9486 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
9487}
9488
9489
9490/** Opcode 0xd8 !11/1. */
9491FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
9492{
9493 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
9494 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
9495}
9496
9497
9498/** Opcode 0xd8 !11/2. */
9499FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
9500{
9501 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
9502
9503 IEM_MC_BEGIN(3, 3, 0, 0);
9504 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9505 IEM_MC_LOCAL(uint16_t, u16Fsw);
9506 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
9507 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9508 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9509 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
9510
9511 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9513
9514 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9515 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9516 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9517
9518 IEM_MC_PREPARE_FPU_USAGE();
9519 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9520 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
9521 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9522 } IEM_MC_ELSE() {
9523 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9524 } IEM_MC_ENDIF();
9525 IEM_MC_ADVANCE_RIP_AND_FINISH();
9526
9527 IEM_MC_END();
9528}
9529
9530
9531/** Opcode 0xd8 !11/3. */
9532FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
9533{
9534 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
9535
9536 IEM_MC_BEGIN(3, 3, 0, 0);
9537 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9538 IEM_MC_LOCAL(uint16_t, u16Fsw);
9539 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
9540 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9541 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9542 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
9543
9544 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9546
9547 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9548 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9549 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9550
9551 IEM_MC_PREPARE_FPU_USAGE();
9552 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
9553 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
9554 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9555 } IEM_MC_ELSE() {
9556 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9557 } IEM_MC_ENDIF();
9558 IEM_MC_ADVANCE_RIP_AND_FINISH();
9559
9560 IEM_MC_END();
9561}
9562
9563
9564/** Opcode 0xd8 !11/4. */
9565FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
9566{
9567 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
9568 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
9569}
9570
9571
9572/** Opcode 0xd8 !11/5. */
9573FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
9574{
9575 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
9576 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
9577}
9578
9579
9580/** Opcode 0xd8 !11/6. */
9581FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
9582{
9583 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
9584 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
9585}
9586
9587
9588/** Opcode 0xd8 !11/7. */
9589FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
9590{
9591 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
9592 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
9593}
9594
9595
9596/**
9597 * @opcode 0xd8
9598 */
9599FNIEMOP_DEF(iemOp_EscF0)
9600{
9601 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9602 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
9603
9604 if (IEM_IS_MODRM_REG_MODE(bRm))
9605 {
9606 switch (IEM_GET_MODRM_REG_8(bRm))
9607 {
9608 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
9609 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
9610 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
9611 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
9612 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
9613 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
9614 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
9615 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
9616 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9617 }
9618 }
9619 else
9620 {
9621 switch (IEM_GET_MODRM_REG_8(bRm))
9622 {
9623 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
9624 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
9625 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
9626 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
9627 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
9628 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
9629 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
9630 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
9631 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9632 }
9633 }
9634}
9635
9636
9637/** Opcode 0xd9 /0 mem32real
9638 * @sa iemOp_fld_m64r */
9639FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
9640{
9641 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
9642
9643 IEM_MC_BEGIN(2, 3, 0, 0);
9644 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9645 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9646 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
9647 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9648 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
9649
9650 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9652
9653 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9654 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9655 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9656 IEM_MC_PREPARE_FPU_USAGE();
9657 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
9658 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r32, pFpuRes, pr32Val);
9659 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9660 } IEM_MC_ELSE() {
9661 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
9662 } IEM_MC_ENDIF();
9663 IEM_MC_ADVANCE_RIP_AND_FINISH();
9664
9665 IEM_MC_END();
9666}
9667
9668
9669/** Opcode 0xd9 !11/2 mem32real */
9670FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
9671{
9672 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
9673 IEM_MC_BEGIN(3, 3, 0, 0);
9674 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9675 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9676
9677 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9678 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9679 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9680 IEM_MC_PREPARE_FPU_USAGE();
9681
9682 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9683 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
9684 IEM_MC_MEM_MAP_R32_WO(pr32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9685
9686 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9687 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9688 IEM_MC_LOCAL(uint16_t, u16Fsw);
9689 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
9690 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
9691 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
9692 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9693 } IEM_MC_ELSE() {
9694 IEM_MC_IF_FCW_IM() {
9695 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
9696 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
9697 } IEM_MC_ELSE() {
9698 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
9699 } IEM_MC_ENDIF();
9700 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9701 } IEM_MC_ENDIF();
9702 IEM_MC_ADVANCE_RIP_AND_FINISH();
9703
9704 IEM_MC_END();
9705}
9706
9707
9708/** Opcode 0xd9 !11/3 */
9709FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
9710{
9711 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
9712 IEM_MC_BEGIN(3, 3, 0, 0);
9713 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9714 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9715
9716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9717 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9718 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9719 IEM_MC_PREPARE_FPU_USAGE();
9720
9721 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
9722 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
9723 IEM_MC_MEM_MAP_R32_WO(pr32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9724
9725 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9726 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9727 IEM_MC_LOCAL(uint16_t, u16Fsw);
9728 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
9729 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
9730 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
9731 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9732 } IEM_MC_ELSE() {
9733 IEM_MC_IF_FCW_IM() {
9734 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
9735 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
9736 } IEM_MC_ELSE() {
9737 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
9738 } IEM_MC_ENDIF();
9739 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
9740 } IEM_MC_ENDIF();
9741 IEM_MC_ADVANCE_RIP_AND_FINISH();
9742
9743 IEM_MC_END();
9744}
9745
9746
9747/** Opcode 0xd9 !11/4 */
9748FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
9749{
9750 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
9751 IEM_MC_BEGIN(3, 0, 0, 0);
9752 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
9753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9754
9755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9756 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9757 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9758
9759 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9760 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
9761 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
9762 IEM_MC_END();
9763}
9764
9765
9766/** Opcode 0xd9 !11/5 */
9767FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
9768{
9769 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
9770 IEM_MC_BEGIN(1, 1, 0, 0);
9771 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9772 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9773
9774 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9775 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9776 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9777
9778 IEM_MC_ARG(uint16_t, u16Fsw, 0);
9779 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9780
9781 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_FPU, 0, iemCImpl_fldcw, u16Fsw);
9782 IEM_MC_END();
9783}
9784
9785
9786/** Opcode 0xd9 !11/6 */
9787FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
9788{
9789 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
9790 IEM_MC_BEGIN(3, 0, 0, 0);
9791 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
9792 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9793
9794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9795 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9796 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9797
9798 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9799 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
9800 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
9801 IEM_MC_END();
9802}
9803
9804
9805/** Opcode 0xd9 !11/7 */
9806FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
9807{
9808 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
9809 IEM_MC_BEGIN(2, 0, 0, 0);
9810 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9811 IEM_MC_LOCAL(uint16_t, u16Fcw);
9812 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9814 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9815 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9816 IEM_MC_FETCH_FCW(u16Fcw);
9817 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
9818 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
9819 IEM_MC_END();
9820}
9821
9822
9823/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
9824FNIEMOP_DEF(iemOp_fnop)
9825{
9826 IEMOP_MNEMONIC(fnop, "fnop");
9827 IEM_MC_BEGIN(0, 0, 0, 0);
9828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9829 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9830 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9831 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9832 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
9833 * intel optimizations. Investigate. */
9834 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
9835 IEM_MC_ADVANCE_RIP_AND_FINISH(); /* C0-C3 are documented as undefined, we leave them unmodified. */
9836 IEM_MC_END();
9837}
9838
9839
9840/** Opcode 0xd9 11/0 stN */
9841FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
9842{
9843 IEMOP_MNEMONIC(fld_stN, "fld stN");
9844 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
9845 * indicates that it does. */
9846 IEM_MC_BEGIN(0, 2, 0, 0);
9847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9848 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9849 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9850 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9851 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9852
9853 IEM_MC_PREPARE_FPU_USAGE();
9854 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, IEM_GET_MODRM_RM_8(bRm)) {
9855 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9856 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
9857 } IEM_MC_ELSE() {
9858 IEM_MC_FPU_STACK_PUSH_UNDERFLOW(pVCpu->iem.s.uFpuOpcode);
9859 } IEM_MC_ENDIF();
9860
9861 IEM_MC_ADVANCE_RIP_AND_FINISH();
9862 IEM_MC_END();
9863}
9864
9865
9866/** Opcode 0xd9 11/3 stN */
9867FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
9868{
9869 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
9870 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
9871 * indicates that it does. */
9872 IEM_MC_BEGIN(2, 3, 0, 0);
9873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9874 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
9875 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
9876 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9877 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ IEM_GET_MODRM_RM_8(bRm), 0);
9878 IEM_MC_ARG_CONST(uint16_t, uFpuOpcode, /*=*/ pVCpu->iem.s.uFpuOpcode, 1);
9879 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9880 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9881
9882 IEM_MC_PREPARE_FPU_USAGE();
9883 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, IEM_GET_MODRM_RM_8(bRm)) {
9884 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
9885 IEM_MC_STORE_FPUREG_R80_SRC_REF(IEM_GET_MODRM_RM_8(bRm), pr80Value1);
9886 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9887 } IEM_MC_ELSE() {
9888 IEM_MC_CALL_CIMPL_2(IEM_CIMPL_F_FPU, 0, iemCImpl_fxch_underflow, iStReg, uFpuOpcode);
9889 } IEM_MC_ENDIF();
9890
9891 IEM_MC_ADVANCE_RIP_AND_FINISH();
9892 IEM_MC_END();
9893}
9894
9895
9896/** Opcode 0xd9 11/4, 0xdd 11/2. */
9897FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
9898{
9899 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
9900
9901 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
9902 uint8_t const iDstReg = IEM_GET_MODRM_RM_8(bRm);
9903 if (!iDstReg)
9904 {
9905 IEM_MC_BEGIN(0, 1, 0, 0);
9906 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9907 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
9908 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9909 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9910
9911 IEM_MC_PREPARE_FPU_USAGE();
9912 IEM_MC_IF_FPUREG_NOT_EMPTY(0) {
9913 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
9914 } IEM_MC_ELSE() {
9915 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0, pVCpu->iem.s.uFpuOpcode);
9916 } IEM_MC_ENDIF();
9917
9918 IEM_MC_ADVANCE_RIP_AND_FINISH();
9919 IEM_MC_END();
9920 }
9921 else
9922 {
9923 IEM_MC_BEGIN(0, 2, 0, 0);
9924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9925 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9926 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9927 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9928 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9929
9930 IEM_MC_PREPARE_FPU_USAGE();
9931 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9932 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9933 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg, pVCpu->iem.s.uFpuOpcode);
9934 } IEM_MC_ELSE() {
9935 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg, pVCpu->iem.s.uFpuOpcode);
9936 } IEM_MC_ENDIF();
9937
9938 IEM_MC_ADVANCE_RIP_AND_FINISH();
9939 IEM_MC_END();
9940 }
9941}
9942
9943
9944/**
9945 * Common worker for FPU instructions working on ST0 and replaces it with the
9946 * result, i.e. unary operators.
9947 *
9948 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9949 */
9950FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
9951{
9952 IEM_MC_BEGIN(2, 1, 0, 0);
9953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9954 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9955 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9956 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9957
9958 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9959 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9960 IEM_MC_PREPARE_FPU_USAGE();
9961 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
9962 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
9963 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
9964 } IEM_MC_ELSE() {
9965 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
9966 } IEM_MC_ENDIF();
9967 IEM_MC_ADVANCE_RIP_AND_FINISH();
9968
9969 IEM_MC_END();
9970}
9971
9972
9973/** Opcode 0xd9 0xe0. */
9974FNIEMOP_DEF(iemOp_fchs)
9975{
9976 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
9977 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
9978}
9979
9980
9981/** Opcode 0xd9 0xe1. */
9982FNIEMOP_DEF(iemOp_fabs)
9983{
9984 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
9985 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
9986}
9987
9988
9989/** Opcode 0xd9 0xe4. */
9990FNIEMOP_DEF(iemOp_ftst)
9991{
9992 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
9993 IEM_MC_BEGIN(2, 1, 0, 0);
9994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9995 IEM_MC_LOCAL(uint16_t, u16Fsw);
9996 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9997 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
9998
9999 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10000 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10001 IEM_MC_PREPARE_FPU_USAGE();
10002 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10003 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_ftst_r80, pu16Fsw, pr80Value);
10004 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10005 } IEM_MC_ELSE() {
10006 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX, pVCpu->iem.s.uFpuOpcode);
10007 } IEM_MC_ENDIF();
10008 IEM_MC_ADVANCE_RIP_AND_FINISH();
10009
10010 IEM_MC_END();
10011}
10012
10013
10014/** Opcode 0xd9 0xe5. */
10015FNIEMOP_DEF(iemOp_fxam)
10016{
10017 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
10018 IEM_MC_BEGIN(2, 1, 0, 0);
10019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10020 IEM_MC_LOCAL(uint16_t, u16Fsw);
10021 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10022 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10023
10024 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10025 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10026 IEM_MC_PREPARE_FPU_USAGE();
10027 IEM_MC_REF_FPUREG(pr80Value, 0);
10028 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fxam_r80, pu16Fsw, pr80Value);
10029 IEM_MC_UPDATE_FSW(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10030 IEM_MC_ADVANCE_RIP_AND_FINISH();
10031
10032 IEM_MC_END();
10033}
10034
10035
10036/**
10037 * Common worker for FPU instructions pushing a constant onto the FPU stack.
10038 *
10039 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10040 */
10041FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
10042{
10043 IEM_MC_BEGIN(1, 1, 0, 0);
10044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10045 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10046 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10047
10048 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10049 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10050 IEM_MC_PREPARE_FPU_USAGE();
10051 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10052 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
10053 IEM_MC_PUSH_FPU_RESULT(FpuRes, pVCpu->iem.s.uFpuOpcode);
10054 } IEM_MC_ELSE() {
10055 IEM_MC_FPU_STACK_PUSH_OVERFLOW(pVCpu->iem.s.uFpuOpcode);
10056 } IEM_MC_ENDIF();
10057 IEM_MC_ADVANCE_RIP_AND_FINISH();
10058
10059 IEM_MC_END();
10060}
10061
10062
10063/** Opcode 0xd9 0xe8. */
10064FNIEMOP_DEF(iemOp_fld1)
10065{
10066 IEMOP_MNEMONIC(fld1, "fld1");
10067 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
10068}
10069
10070
10071/** Opcode 0xd9 0xe9. */
10072FNIEMOP_DEF(iemOp_fldl2t)
10073{
10074 IEMOP_MNEMONIC(fldl2t, "fldl2t");
10075 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
10076}
10077
10078
10079/** Opcode 0xd9 0xea. */
10080FNIEMOP_DEF(iemOp_fldl2e)
10081{
10082 IEMOP_MNEMONIC(fldl2e, "fldl2e");
10083 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
10084}
10085
10086/** Opcode 0xd9 0xeb. */
10087FNIEMOP_DEF(iemOp_fldpi)
10088{
10089 IEMOP_MNEMONIC(fldpi, "fldpi");
10090 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
10091}
10092
10093
10094/** Opcode 0xd9 0xec. */
10095FNIEMOP_DEF(iemOp_fldlg2)
10096{
10097 IEMOP_MNEMONIC(fldlg2, "fldlg2");
10098 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
10099}
10100
10101/** Opcode 0xd9 0xed. */
10102FNIEMOP_DEF(iemOp_fldln2)
10103{
10104 IEMOP_MNEMONIC(fldln2, "fldln2");
10105 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
10106}
10107
10108
10109/** Opcode 0xd9 0xee. */
10110FNIEMOP_DEF(iemOp_fldz)
10111{
10112 IEMOP_MNEMONIC(fldz, "fldz");
10113 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
10114}
10115
10116
10117/** Opcode 0xd9 0xf0.
10118 *
10119 * The f2xm1 instruction works on values +1.0 thru -1.0, currently (the range on
10120 * 287 & 8087 was +0.5 thru 0.0 according to docs). In addition is does appear
10121 * to produce proper results for +Inf and -Inf.
10122 *
10123 * This is probably usful in the implementation pow() and similar.
10124 */
10125FNIEMOP_DEF(iemOp_f2xm1)
10126{
10127 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
10128 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
10129}
10130
10131
10132/**
10133 * Common worker for FPU instructions working on STn and ST0, storing the result
10134 * in STn, and popping the stack unless IE, DE or ZE was raised.
10135 *
10136 * @param bRm Mod R/M byte.
10137 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10138 */
10139FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
10140{
10141 IEM_MC_BEGIN(3, 1, 0, 0);
10142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10143 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10144 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10145 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10146 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10147
10148 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10149 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10150
10151 IEM_MC_PREPARE_FPU_USAGE();
10152 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
10153 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
10154 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10155 } IEM_MC_ELSE() {
10156 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
10157 } IEM_MC_ENDIF();
10158 IEM_MC_ADVANCE_RIP_AND_FINISH();
10159
10160 IEM_MC_END();
10161}
10162
10163
10164/** Opcode 0xd9 0xf1. */
10165FNIEMOP_DEF(iemOp_fyl2x)
10166{
10167 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
10168 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
10169}
10170
10171
10172/**
10173 * Common worker for FPU instructions working on ST0 and having two outputs, one
10174 * replacing ST0 and one pushed onto the stack.
10175 *
10176 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10177 */
10178FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
10179{
10180 IEM_MC_BEGIN(2, 1, 0, 0);
10181 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10182 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
10183 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
10184 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
10185
10186 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10187 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10188 IEM_MC_PREPARE_FPU_USAGE();
10189 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10190 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
10191 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo, pVCpu->iem.s.uFpuOpcode);
10192 } IEM_MC_ELSE() {
10193 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO(pVCpu->iem.s.uFpuOpcode);
10194 } IEM_MC_ENDIF();
10195 IEM_MC_ADVANCE_RIP_AND_FINISH();
10196
10197 IEM_MC_END();
10198}
10199
10200
10201/** Opcode 0xd9 0xf2. */
10202FNIEMOP_DEF(iemOp_fptan)
10203{
10204 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
10205 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
10206}
10207
10208
10209/** Opcode 0xd9 0xf3. */
10210FNIEMOP_DEF(iemOp_fpatan)
10211{
10212 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
10213 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
10214}
10215
10216
10217/** Opcode 0xd9 0xf4. */
10218FNIEMOP_DEF(iemOp_fxtract)
10219{
10220 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
10221 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
10222}
10223
10224
10225/** Opcode 0xd9 0xf5. */
10226FNIEMOP_DEF(iemOp_fprem1)
10227{
10228 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
10229 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
10230}
10231
10232
10233/** Opcode 0xd9 0xf6. */
10234FNIEMOP_DEF(iemOp_fdecstp)
10235{
10236 IEMOP_MNEMONIC(fdecstp, "fdecstp");
10237 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
10238 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
10239 * FINCSTP and FDECSTP. */
10240 IEM_MC_BEGIN(0, 0, 0, 0);
10241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10242
10243 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10244 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10245
10246 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10247 IEM_MC_FPU_STACK_DEC_TOP();
10248 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
10249
10250 IEM_MC_ADVANCE_RIP_AND_FINISH();
10251 IEM_MC_END();
10252}
10253
10254
10255/** Opcode 0xd9 0xf7. */
10256FNIEMOP_DEF(iemOp_fincstp)
10257{
10258 IEMOP_MNEMONIC(fincstp, "fincstp");
10259 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
10260 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
10261 * FINCSTP and FDECSTP. */
10262 IEM_MC_BEGIN(0, 0, 0, 0);
10263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10264
10265 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10266 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10267
10268 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
10269 IEM_MC_FPU_STACK_INC_TOP();
10270 IEM_MC_UPDATE_FSW_CONST(0, pVCpu->iem.s.uFpuOpcode);
10271
10272 IEM_MC_ADVANCE_RIP_AND_FINISH();
10273 IEM_MC_END();
10274}
10275
10276
10277/** Opcode 0xd9 0xf8. */
10278FNIEMOP_DEF(iemOp_fprem)
10279{
10280 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
10281 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
10282}
10283
10284
10285/** Opcode 0xd9 0xf9. */
10286FNIEMOP_DEF(iemOp_fyl2xp1)
10287{
10288 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
10289 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
10290}
10291
10292
10293/** Opcode 0xd9 0xfa. */
10294FNIEMOP_DEF(iemOp_fsqrt)
10295{
10296 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
10297 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
10298}
10299
10300
10301/** Opcode 0xd9 0xfb. */
10302FNIEMOP_DEF(iemOp_fsincos)
10303{
10304 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
10305 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
10306}
10307
10308
10309/** Opcode 0xd9 0xfc. */
10310FNIEMOP_DEF(iemOp_frndint)
10311{
10312 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
10313 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
10314}
10315
10316
10317/** Opcode 0xd9 0xfd. */
10318FNIEMOP_DEF(iemOp_fscale)
10319{
10320 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
10321 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
10322}
10323
10324
10325/** Opcode 0xd9 0xfe. */
10326FNIEMOP_DEF(iemOp_fsin)
10327{
10328 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
10329 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
10330}
10331
10332
10333/** Opcode 0xd9 0xff. */
10334FNIEMOP_DEF(iemOp_fcos)
10335{
10336 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
10337 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
10338}
10339
10340
10341/** Used by iemOp_EscF1. */
10342IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
10343{
10344 /* 0xe0 */ iemOp_fchs,
10345 /* 0xe1 */ iemOp_fabs,
10346 /* 0xe2 */ iemOp_Invalid,
10347 /* 0xe3 */ iemOp_Invalid,
10348 /* 0xe4 */ iemOp_ftst,
10349 /* 0xe5 */ iemOp_fxam,
10350 /* 0xe6 */ iemOp_Invalid,
10351 /* 0xe7 */ iemOp_Invalid,
10352 /* 0xe8 */ iemOp_fld1,
10353 /* 0xe9 */ iemOp_fldl2t,
10354 /* 0xea */ iemOp_fldl2e,
10355 /* 0xeb */ iemOp_fldpi,
10356 /* 0xec */ iemOp_fldlg2,
10357 /* 0xed */ iemOp_fldln2,
10358 /* 0xee */ iemOp_fldz,
10359 /* 0xef */ iemOp_Invalid,
10360 /* 0xf0 */ iemOp_f2xm1,
10361 /* 0xf1 */ iemOp_fyl2x,
10362 /* 0xf2 */ iemOp_fptan,
10363 /* 0xf3 */ iemOp_fpatan,
10364 /* 0xf4 */ iemOp_fxtract,
10365 /* 0xf5 */ iemOp_fprem1,
10366 /* 0xf6 */ iemOp_fdecstp,
10367 /* 0xf7 */ iemOp_fincstp,
10368 /* 0xf8 */ iemOp_fprem,
10369 /* 0xf9 */ iemOp_fyl2xp1,
10370 /* 0xfa */ iemOp_fsqrt,
10371 /* 0xfb */ iemOp_fsincos,
10372 /* 0xfc */ iemOp_frndint,
10373 /* 0xfd */ iemOp_fscale,
10374 /* 0xfe */ iemOp_fsin,
10375 /* 0xff */ iemOp_fcos
10376};
10377
10378
10379/**
10380 * @opcode 0xd9
10381 */
10382FNIEMOP_DEF(iemOp_EscF1)
10383{
10384 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10385 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
10386
10387 if (IEM_IS_MODRM_REG_MODE(bRm))
10388 {
10389 switch (IEM_GET_MODRM_REG_8(bRm))
10390 {
10391 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
10392 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
10393 case 2:
10394 if (bRm == 0xd0)
10395 return FNIEMOP_CALL(iemOp_fnop);
10396 IEMOP_RAISE_INVALID_OPCODE_RET();
10397 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
10398 case 4:
10399 case 5:
10400 case 6:
10401 case 7:
10402 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
10403 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
10404 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10405 }
10406 }
10407 else
10408 {
10409 switch (IEM_GET_MODRM_REG_8(bRm))
10410 {
10411 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
10412 case 1: IEMOP_RAISE_INVALID_OPCODE_RET();
10413 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
10414 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
10415 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
10416 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
10417 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
10418 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
10419 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10420 }
10421 }
10422}
10423
10424
10425/** Opcode 0xda 11/0. */
10426FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
10427{
10428 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
10429 IEM_MC_BEGIN(0, 1, 0, 0);
10430 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10431 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10432
10433 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10434 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10435
10436 IEM_MC_PREPARE_FPU_USAGE();
10437 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10438 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
10439 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10440 } IEM_MC_ENDIF();
10441 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10442 } IEM_MC_ELSE() {
10443 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10444 } IEM_MC_ENDIF();
10445 IEM_MC_ADVANCE_RIP_AND_FINISH();
10446
10447 IEM_MC_END();
10448}
10449
10450
10451/** Opcode 0xda 11/1. */
10452FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
10453{
10454 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
10455 IEM_MC_BEGIN(0, 1, 0, 0);
10456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10457 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10458
10459 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10460 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10461
10462 IEM_MC_PREPARE_FPU_USAGE();
10463 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10464 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
10465 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10466 } IEM_MC_ENDIF();
10467 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10468 } IEM_MC_ELSE() {
10469 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10470 } IEM_MC_ENDIF();
10471 IEM_MC_ADVANCE_RIP_AND_FINISH();
10472
10473 IEM_MC_END();
10474}
10475
10476
10477/** Opcode 0xda 11/2. */
10478FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
10479{
10480 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
10481 IEM_MC_BEGIN(0, 1, 0, 0);
10482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10483 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10484
10485 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10486 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10487
10488 IEM_MC_PREPARE_FPU_USAGE();
10489 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10490 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
10491 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10492 } IEM_MC_ENDIF();
10493 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10494 } IEM_MC_ELSE() {
10495 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10496 } IEM_MC_ENDIF();
10497 IEM_MC_ADVANCE_RIP_AND_FINISH();
10498
10499 IEM_MC_END();
10500}
10501
10502
10503/** Opcode 0xda 11/3. */
10504FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
10505{
10506 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
10507 IEM_MC_BEGIN(0, 1, 0, 0);
10508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10509 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10510
10511 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10512 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10513
10514 IEM_MC_PREPARE_FPU_USAGE();
10515 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10516 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
10517 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10518 } IEM_MC_ENDIF();
10519 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10520 } IEM_MC_ELSE() {
10521 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10522 } IEM_MC_ENDIF();
10523 IEM_MC_ADVANCE_RIP_AND_FINISH();
10524
10525 IEM_MC_END();
10526}
10527
10528
10529/**
10530 * Common worker for FPU instructions working on ST0 and ST1, only affecting
10531 * flags, and popping twice when done.
10532 *
10533 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10534 */
10535FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
10536{
10537 IEM_MC_BEGIN(3, 1, 0, 0);
10538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10539 IEM_MC_LOCAL(uint16_t, u16Fsw);
10540 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10541 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10542 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
10543
10544 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10545 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10546
10547 IEM_MC_PREPARE_FPU_USAGE();
10548 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1) {
10549 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
10550 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw, pVCpu->iem.s.uFpuOpcode);
10551 } IEM_MC_ELSE() {
10552 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP(pVCpu->iem.s.uFpuOpcode);
10553 } IEM_MC_ENDIF();
10554 IEM_MC_ADVANCE_RIP_AND_FINISH();
10555
10556 IEM_MC_END();
10557}
10558
10559
10560/** Opcode 0xda 0xe9. */
10561FNIEMOP_DEF(iemOp_fucompp)
10562{
10563 IEMOP_MNEMONIC(fucompp, "fucompp");
10564 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fucom_r80_by_r80);
10565}
10566
10567
10568/**
10569 * Common worker for FPU instructions working on ST0 and an m32i, and storing
10570 * the result in ST0.
10571 *
10572 * @param bRm Mod R/M byte.
10573 * @param pfnAImpl Pointer to the instruction implementation (assembly).
10574 */
10575FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
10576{
10577 IEM_MC_BEGIN(3, 3, 0, 0);
10578 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10579 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10580 IEM_MC_LOCAL(int32_t, i32Val2);
10581 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10582 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10583 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10584
10585 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10587
10588 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10589 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10590 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10591
10592 IEM_MC_PREPARE_FPU_USAGE();
10593 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10594 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
10595 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
10596 } IEM_MC_ELSE() {
10597 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
10598 } IEM_MC_ENDIF();
10599 IEM_MC_ADVANCE_RIP_AND_FINISH();
10600
10601 IEM_MC_END();
10602}
10603
10604
10605/** Opcode 0xda !11/0. */
10606FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
10607{
10608 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
10609 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
10610}
10611
10612
10613/** Opcode 0xda !11/1. */
10614FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
10615{
10616 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
10617 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
10618}
10619
10620
10621/** Opcode 0xda !11/2. */
10622FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
10623{
10624 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
10625
10626 IEM_MC_BEGIN(3, 3, 0, 0);
10627 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10628 IEM_MC_LOCAL(uint16_t, u16Fsw);
10629 IEM_MC_LOCAL(int32_t, i32Val2);
10630 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10631 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10632 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10633
10634 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10636
10637 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10638 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10639 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10640
10641 IEM_MC_PREPARE_FPU_USAGE();
10642 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10643 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
10644 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10645 } IEM_MC_ELSE() {
10646 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10647 } IEM_MC_ENDIF();
10648 IEM_MC_ADVANCE_RIP_AND_FINISH();
10649
10650 IEM_MC_END();
10651}
10652
10653
10654/** Opcode 0xda !11/3. */
10655FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
10656{
10657 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
10658
10659 IEM_MC_BEGIN(3, 3, 0, 0);
10660 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10661 IEM_MC_LOCAL(uint16_t, u16Fsw);
10662 IEM_MC_LOCAL(int32_t, i32Val2);
10663 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10664 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
10665 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
10666
10667 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10669
10670 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10671 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10672 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10673
10674 IEM_MC_PREPARE_FPU_USAGE();
10675 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
10676 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
10677 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10678 } IEM_MC_ELSE() {
10679 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10680 } IEM_MC_ENDIF();
10681 IEM_MC_ADVANCE_RIP_AND_FINISH();
10682
10683 IEM_MC_END();
10684}
10685
10686
10687/** Opcode 0xda !11/4. */
10688FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
10689{
10690 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
10691 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
10692}
10693
10694
10695/** Opcode 0xda !11/5. */
10696FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
10697{
10698 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
10699 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
10700}
10701
10702
10703/** Opcode 0xda !11/6. */
10704FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
10705{
10706 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
10707 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
10708}
10709
10710
10711/** Opcode 0xda !11/7. */
10712FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
10713{
10714 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
10715 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
10716}
10717
10718
10719/**
10720 * @opcode 0xda
10721 */
10722FNIEMOP_DEF(iemOp_EscF2)
10723{
10724 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10725 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
10726 if (IEM_IS_MODRM_REG_MODE(bRm))
10727 {
10728 switch (IEM_GET_MODRM_REG_8(bRm))
10729 {
10730 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
10731 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
10732 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
10733 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
10734 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
10735 case 5:
10736 if (bRm == 0xe9)
10737 return FNIEMOP_CALL(iemOp_fucompp);
10738 IEMOP_RAISE_INVALID_OPCODE_RET();
10739 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
10740 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
10741 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10742 }
10743 }
10744 else
10745 {
10746 switch (IEM_GET_MODRM_REG_8(bRm))
10747 {
10748 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
10749 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
10750 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
10751 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
10752 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
10753 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
10754 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
10755 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
10756 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10757 }
10758 }
10759}
10760
10761
10762/** Opcode 0xdb !11/0. */
10763FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
10764{
10765 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
10766
10767 IEM_MC_BEGIN(2, 3, 0, 0);
10768 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10769 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10770 IEM_MC_LOCAL(int32_t, i32Val);
10771 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10772 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
10773
10774 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10775 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10776
10777 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10778 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10779 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10780
10781 IEM_MC_PREPARE_FPU_USAGE();
10782 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10783 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i32, pFpuRes, pi32Val);
10784 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10785 } IEM_MC_ELSE() {
10786 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10787 } IEM_MC_ENDIF();
10788 IEM_MC_ADVANCE_RIP_AND_FINISH();
10789
10790 IEM_MC_END();
10791}
10792
10793
10794/** Opcode 0xdb !11/1. */
10795FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
10796{
10797 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
10798 IEM_MC_BEGIN(3, 3, 0, 0);
10799 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10800 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10801
10802 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10803 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10804 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10805 IEM_MC_PREPARE_FPU_USAGE();
10806
10807 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10808 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10809 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10810
10811 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10812 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10813 IEM_MC_LOCAL(uint16_t, u16Fsw);
10814 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
10815 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10816 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
10817 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10818 } IEM_MC_ELSE() {
10819 IEM_MC_IF_FCW_IM() {
10820 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10821 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
10822 } IEM_MC_ELSE() {
10823 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
10824 } IEM_MC_ENDIF();
10825 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10826 } IEM_MC_ENDIF();
10827 IEM_MC_ADVANCE_RIP_AND_FINISH();
10828
10829 IEM_MC_END();
10830}
10831
10832
10833/** Opcode 0xdb !11/2. */
10834FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
10835{
10836 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
10837 IEM_MC_BEGIN(3, 3, 0, 0);
10838 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10839 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10840
10841 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10842 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10843 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10844 IEM_MC_PREPARE_FPU_USAGE();
10845
10846 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10847 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10848 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10849
10850 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10851 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10852 IEM_MC_LOCAL(uint16_t, u16Fsw);
10853 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
10854 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10855 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
10856 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10857 } IEM_MC_ELSE() {
10858 IEM_MC_IF_FCW_IM() {
10859 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10860 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
10861 } IEM_MC_ELSE() {
10862 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
10863 } IEM_MC_ENDIF();
10864 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10865 } IEM_MC_ENDIF();
10866 IEM_MC_ADVANCE_RIP_AND_FINISH();
10867
10868 IEM_MC_END();
10869}
10870
10871
10872/** Opcode 0xdb !11/3. */
10873FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
10874{
10875 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
10876 IEM_MC_BEGIN(3, 2, 0, 0);
10877 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10878 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10879
10880 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10881 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10882 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10883 IEM_MC_PREPARE_FPU_USAGE();
10884
10885 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10886 IEM_MC_ARG(int32_t *, pi32Dst, 1);
10887 IEM_MC_MEM_MAP_I32_WO(pi32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10888
10889 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10890 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10891 IEM_MC_LOCAL(uint16_t, u16Fsw);
10892 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
10893 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
10894 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
10895 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10896 } IEM_MC_ELSE() {
10897 IEM_MC_IF_FCW_IM() {
10898 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
10899 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
10900 } IEM_MC_ELSE() {
10901 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
10902 } IEM_MC_ENDIF();
10903 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10904 } IEM_MC_ENDIF();
10905 IEM_MC_ADVANCE_RIP_AND_FINISH();
10906
10907 IEM_MC_END();
10908}
10909
10910
10911/** Opcode 0xdb !11/5. */
10912FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
10913{
10914 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
10915
10916 IEM_MC_BEGIN(2, 3, 0, 0);
10917 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10918 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10919 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
10920 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10921 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
10922
10923 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10925
10926 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10927 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10928 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10929
10930 IEM_MC_PREPARE_FPU_USAGE();
10931 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
10932 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
10933 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10934 } IEM_MC_ELSE() {
10935 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
10936 } IEM_MC_ENDIF();
10937 IEM_MC_ADVANCE_RIP_AND_FINISH();
10938
10939 IEM_MC_END();
10940}
10941
10942
10943/** Opcode 0xdb !11/7. */
10944FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
10945{
10946 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
10947 IEM_MC_BEGIN(3, 3, 0, 0);
10948 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10949 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10950
10951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10952 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10953 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10954 IEM_MC_PREPARE_FPU_USAGE();
10955
10956 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
10957 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
10958 IEM_MC_MEM_MAP_R80_WO(pr80Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10959
10960 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10961 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
10962 IEM_MC_LOCAL(uint16_t, u16Fsw);
10963 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
10964 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
10965 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
10966 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10967 } IEM_MC_ELSE() {
10968 IEM_MC_IF_FCW_IM() {
10969 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
10970 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
10971 } IEM_MC_ELSE() {
10972 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
10973 } IEM_MC_ENDIF();
10974 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
10975 } IEM_MC_ENDIF();
10976 IEM_MC_ADVANCE_RIP_AND_FINISH();
10977
10978 IEM_MC_END();
10979}
10980
10981
10982/** Opcode 0xdb 11/0. */
10983FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
10984{
10985 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
10986 IEM_MC_BEGIN(0, 1, 0, 0);
10987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10988 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
10989
10990 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10991 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10992
10993 IEM_MC_PREPARE_FPU_USAGE();
10994 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
10995 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF) {
10996 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
10997 } IEM_MC_ENDIF();
10998 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
10999 } IEM_MC_ELSE() {
11000 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11001 } IEM_MC_ENDIF();
11002 IEM_MC_ADVANCE_RIP_AND_FINISH();
11003
11004 IEM_MC_END();
11005}
11006
11007
11008/** Opcode 0xdb 11/1. */
11009FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
11010{
11011 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
11012 IEM_MC_BEGIN(0, 1, 0, 0);
11013 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11014 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11015
11016 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11017 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11018
11019 IEM_MC_PREPARE_FPU_USAGE();
11020 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11021 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) {
11022 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11023 } IEM_MC_ENDIF();
11024 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11025 } IEM_MC_ELSE() {
11026 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11027 } IEM_MC_ENDIF();
11028 IEM_MC_ADVANCE_RIP_AND_FINISH();
11029
11030 IEM_MC_END();
11031}
11032
11033
11034/** Opcode 0xdb 11/2. */
11035FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
11036{
11037 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
11038 IEM_MC_BEGIN(0, 1, 0, 0);
11039 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11040 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11041
11042 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11043 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11044
11045 IEM_MC_PREPARE_FPU_USAGE();
11046 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11047 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
11048 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11049 } IEM_MC_ENDIF();
11050 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11051 } IEM_MC_ELSE() {
11052 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11053 } IEM_MC_ENDIF();
11054 IEM_MC_ADVANCE_RIP_AND_FINISH();
11055
11056 IEM_MC_END();
11057}
11058
11059
11060/** Opcode 0xdb 11/3. */
11061FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
11062{
11063 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
11064 IEM_MC_BEGIN(0, 1, 0, 0);
11065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11066 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
11067
11068 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11069 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11070
11071 IEM_MC_PREPARE_FPU_USAGE();
11072 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, IEM_GET_MODRM_RM_8(bRm), 0) {
11073 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF) {
11074 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
11075 } IEM_MC_ENDIF();
11076 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11077 } IEM_MC_ELSE() {
11078 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11079 } IEM_MC_ENDIF();
11080 IEM_MC_ADVANCE_RIP_AND_FINISH();
11081
11082 IEM_MC_END();
11083}
11084
11085
11086/** Opcode 0xdb 0xe0. */
11087FNIEMOP_DEF(iemOp_fneni)
11088{
11089 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
11090 IEM_MC_BEGIN(0, 0, 0, 0);
11091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11092 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11093 IEM_MC_ADVANCE_RIP_AND_FINISH();
11094 IEM_MC_END();
11095}
11096
11097
11098/** Opcode 0xdb 0xe1. */
11099FNIEMOP_DEF(iemOp_fndisi)
11100{
11101 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
11102 IEM_MC_BEGIN(0, 0, 0, 0);
11103 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11104 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11105 IEM_MC_ADVANCE_RIP_AND_FINISH();
11106 IEM_MC_END();
11107}
11108
11109
11110/** Opcode 0xdb 0xe2. */
11111FNIEMOP_DEF(iemOp_fnclex)
11112{
11113 IEMOP_MNEMONIC(fnclex, "fnclex");
11114 IEM_MC_BEGIN(0, 0, 0, 0);
11115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11116 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11117 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11118 IEM_MC_CLEAR_FSW_EX();
11119 IEM_MC_ADVANCE_RIP_AND_FINISH();
11120 IEM_MC_END();
11121}
11122
11123
11124/** Opcode 0xdb 0xe3. */
11125FNIEMOP_DEF(iemOp_fninit)
11126{
11127 IEMOP_MNEMONIC(fninit, "fninit");
11128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11129 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_FPU, 0, iemCImpl_finit, false /*fCheckXcpts*/);
11130}
11131
11132
11133/** Opcode 0xdb 0xe4. */
11134FNIEMOP_DEF(iemOp_fnsetpm)
11135{
11136 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
11137 IEM_MC_BEGIN(0, 0, 0, 0);
11138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11139 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11140 IEM_MC_ADVANCE_RIP_AND_FINISH();
11141 IEM_MC_END();
11142}
11143
11144
11145/** Opcode 0xdb 0xe5. */
11146FNIEMOP_DEF(iemOp_frstpm)
11147{
11148 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
11149#if 0 /* #UDs on newer CPUs */
11150 IEM_MC_BEGIN(0, 0, 0, 0);
11151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11152 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11153 IEM_MC_ADVANCE_RIP_AND_FINISH();
11154 IEM_MC_END();
11155 return VINF_SUCCESS;
11156#else
11157 IEMOP_RAISE_INVALID_OPCODE_RET();
11158#endif
11159}
11160
11161
11162/** Opcode 0xdb 11/5. */
11163FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
11164{
11165 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
11166 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
11167 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), true /*fUCmp*/,
11168 0 /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11169}
11170
11171
11172/** Opcode 0xdb 11/6. */
11173FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
11174{
11175 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
11176 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
11177 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
11178 false /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
11179}
11180
11181
11182/**
11183 * @opcode 0xdb
11184 */
11185FNIEMOP_DEF(iemOp_EscF3)
11186{
11187 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11188 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
11189 if (IEM_IS_MODRM_REG_MODE(bRm))
11190 {
11191 switch (IEM_GET_MODRM_REG_8(bRm))
11192 {
11193 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
11194 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
11195 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
11196 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
11197 case 4:
11198 switch (bRm)
11199 {
11200 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
11201 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
11202 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
11203 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
11204 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
11205 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
11206 case 0xe6: IEMOP_RAISE_INVALID_OPCODE_RET();
11207 case 0xe7: IEMOP_RAISE_INVALID_OPCODE_RET();
11208 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11209 }
11210 break;
11211 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
11212 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
11213 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11214 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11215 }
11216 }
11217 else
11218 {
11219 switch (IEM_GET_MODRM_REG_8(bRm))
11220 {
11221 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
11222 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
11223 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
11224 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
11225 case 4: IEMOP_RAISE_INVALID_OPCODE_RET();
11226 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
11227 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
11228 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
11229 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11230 }
11231 }
11232}
11233
11234
11235/**
11236 * Common worker for FPU instructions working on STn and ST0, and storing the
11237 * result in STn unless IE, DE or ZE was raised.
11238 *
11239 * @param bRm Mod R/M byte.
11240 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11241 */
11242FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
11243{
11244 IEM_MC_BEGIN(3, 1, 0, 0);
11245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11246 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11247 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11248 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11249 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
11250
11251 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11252 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11253
11254 IEM_MC_PREPARE_FPU_USAGE();
11255 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, IEM_GET_MODRM_RM_8(bRm), pr80Value2, 0) {
11256 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
11257 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11258 } IEM_MC_ELSE() {
11259 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11260 } IEM_MC_ENDIF();
11261 IEM_MC_ADVANCE_RIP_AND_FINISH();
11262
11263 IEM_MC_END();
11264}
11265
11266
11267/** Opcode 0xdc 11/0. */
11268FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
11269{
11270 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
11271 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
11272}
11273
11274
11275/** Opcode 0xdc 11/1. */
11276FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
11277{
11278 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
11279 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
11280}
11281
11282
11283/** Opcode 0xdc 11/4. */
11284FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
11285{
11286 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
11287 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
11288}
11289
11290
11291/** Opcode 0xdc 11/5. */
11292FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
11293{
11294 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
11295 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
11296}
11297
11298
11299/** Opcode 0xdc 11/6. */
11300FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
11301{
11302 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
11303 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
11304}
11305
11306
11307/** Opcode 0xdc 11/7. */
11308FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
11309{
11310 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
11311 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
11312}
11313
11314
11315/**
11316 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
11317 * memory operand, and storing the result in ST0.
11318 *
11319 * @param bRm Mod R/M byte.
11320 * @param pfnImpl Pointer to the instruction implementation (assembly).
11321 */
11322FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
11323{
11324 IEM_MC_BEGIN(3, 3, 0, 0);
11325 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11326 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11327 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
11328 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11329 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
11330 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
11331
11332 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11334 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11335 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11336
11337 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11338 IEM_MC_PREPARE_FPU_USAGE();
11339 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0) {
11340 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
11341 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11342 } IEM_MC_ELSE() {
11343 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11344 } IEM_MC_ENDIF();
11345 IEM_MC_ADVANCE_RIP_AND_FINISH();
11346
11347 IEM_MC_END();
11348}
11349
11350
11351/** Opcode 0xdc !11/0. */
11352FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
11353{
11354 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
11355 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
11356}
11357
11358
11359/** Opcode 0xdc !11/1. */
11360FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
11361{
11362 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
11363 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
11364}
11365
11366
11367/** Opcode 0xdc !11/2. */
11368FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
11369{
11370 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
11371
11372 IEM_MC_BEGIN(3, 3, 0, 0);
11373 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11374 IEM_MC_LOCAL(uint16_t, u16Fsw);
11375 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
11376 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11377 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11378 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
11379
11380 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11381 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11382
11383 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11384 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11385 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11386
11387 IEM_MC_PREPARE_FPU_USAGE();
11388 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11389 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
11390 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11391 } IEM_MC_ELSE() {
11392 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11393 } IEM_MC_ENDIF();
11394 IEM_MC_ADVANCE_RIP_AND_FINISH();
11395
11396 IEM_MC_END();
11397}
11398
11399
11400/** Opcode 0xdc !11/3. */
11401FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
11402{
11403 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
11404
11405 IEM_MC_BEGIN(3, 3, 0, 0);
11406 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11407 IEM_MC_LOCAL(uint16_t, u16Fsw);
11408 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
11409 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11410 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11411 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
11412
11413 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11415
11416 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11417 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11418 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11419
11420 IEM_MC_PREPARE_FPU_USAGE();
11421 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11422 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
11423 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11424 } IEM_MC_ELSE() {
11425 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11426 } IEM_MC_ENDIF();
11427 IEM_MC_ADVANCE_RIP_AND_FINISH();
11428
11429 IEM_MC_END();
11430}
11431
11432
11433/** Opcode 0xdc !11/4. */
11434FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
11435{
11436 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
11437 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
11438}
11439
11440
11441/** Opcode 0xdc !11/5. */
11442FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
11443{
11444 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
11445 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
11446}
11447
11448
11449/** Opcode 0xdc !11/6. */
11450FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
11451{
11452 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
11453 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
11454}
11455
11456
11457/** Opcode 0xdc !11/7. */
11458FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
11459{
11460 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
11461 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
11462}
11463
11464
11465/**
11466 * @opcode 0xdc
11467 */
11468FNIEMOP_DEF(iemOp_EscF4)
11469{
11470 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11471 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
11472 if (IEM_IS_MODRM_REG_MODE(bRm))
11473 {
11474 switch (IEM_GET_MODRM_REG_8(bRm))
11475 {
11476 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
11477 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
11478 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
11479 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
11480 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
11481 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
11482 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
11483 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
11484 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11485 }
11486 }
11487 else
11488 {
11489 switch (IEM_GET_MODRM_REG_8(bRm))
11490 {
11491 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
11492 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
11493 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
11494 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
11495 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
11496 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
11497 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
11498 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
11499 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11500 }
11501 }
11502}
11503
11504
11505/** Opcode 0xdd !11/0.
11506 * @sa iemOp_fld_m32r */
11507FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
11508{
11509 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
11510
11511 IEM_MC_BEGIN(2, 3, 0, 0);
11512 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11513 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11514 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
11515 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11516 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
11517
11518 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11520 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11521 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11522
11523 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11524 IEM_MC_PREPARE_FPU_USAGE();
11525 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
11526 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r64, pFpuRes, pr64Val);
11527 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11528 } IEM_MC_ELSE() {
11529 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11530 } IEM_MC_ENDIF();
11531 IEM_MC_ADVANCE_RIP_AND_FINISH();
11532
11533 IEM_MC_END();
11534}
11535
11536
11537/** Opcode 0xdd !11/0. */
11538FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
11539{
11540 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
11541 IEM_MC_BEGIN(3, 3, 0, 0);
11542 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11543 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11544
11545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11546 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11547 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11548 IEM_MC_PREPARE_FPU_USAGE();
11549
11550 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11551 IEM_MC_ARG(int64_t *, pi64Dst, 1);
11552 IEM_MC_MEM_MAP_I64_WO(pi64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11553
11554 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11555 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11556 IEM_MC_LOCAL(uint16_t, u16Fsw);
11557 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11558 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
11559 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11560 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11561 } IEM_MC_ELSE() {
11562 IEM_MC_IF_FCW_IM() {
11563 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
11564 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11565 } IEM_MC_ELSE() {
11566 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11567 } IEM_MC_ENDIF();
11568 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11569 } IEM_MC_ENDIF();
11570 IEM_MC_ADVANCE_RIP_AND_FINISH();
11571
11572 IEM_MC_END();
11573}
11574
11575
11576/** Opcode 0xdd !11/0. */
11577FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
11578{
11579 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
11580 IEM_MC_BEGIN(3, 3, 0, 0);
11581 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11582 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11583
11584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11585 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11586 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11587 IEM_MC_PREPARE_FPU_USAGE();
11588
11589 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11590 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
11591 IEM_MC_MEM_MAP_R64_WO(pr64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11592
11593 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11594 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11595 IEM_MC_LOCAL(uint16_t, u16Fsw);
11596 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11597 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
11598 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11599 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11600 } IEM_MC_ELSE() {
11601 IEM_MC_IF_FCW_IM() {
11602 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
11603 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11604 } IEM_MC_ELSE() {
11605 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11606 } IEM_MC_ENDIF();
11607 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11608 } IEM_MC_ENDIF();
11609 IEM_MC_ADVANCE_RIP_AND_FINISH();
11610
11611 IEM_MC_END();
11612}
11613
11614
11615
11616
11617/** Opcode 0xdd !11/0. */
11618FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
11619{
11620 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
11621 IEM_MC_BEGIN(3, 3, 0, 0);
11622 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11623 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11624
11625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11626 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11627 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11628 IEM_MC_PREPARE_FPU_USAGE();
11629
11630 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
11631 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
11632 IEM_MC_MEM_MAP_R64_WO(pr64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11633
11634 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
11635 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11636 IEM_MC_LOCAL(uint16_t, u16Fsw);
11637 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
11638 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
11639 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
11640 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11641 } IEM_MC_ELSE() {
11642 IEM_MC_IF_FCW_IM() {
11643 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
11644 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
11645 } IEM_MC_ELSE() {
11646 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
11647 } IEM_MC_ENDIF();
11648 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
11649 } IEM_MC_ENDIF();
11650 IEM_MC_ADVANCE_RIP_AND_FINISH();
11651
11652 IEM_MC_END();
11653}
11654
11655
11656/** Opcode 0xdd !11/0. */
11657FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
11658{
11659 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
11660 IEM_MC_BEGIN(3, 0, 0, 0);
11661 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
11662 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11663
11664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11665 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11666 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11667
11668 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
11669 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
11670 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
11671 IEM_MC_END();
11672}
11673
11674
11675/** Opcode 0xdd !11/0. */
11676FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
11677{
11678 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
11679 IEM_MC_BEGIN(3, 0, 0, 0);
11680 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
11681 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11682
11683 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11684 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11685 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); /* Note! Implicit fninit after the save, do not use FOR_READ here! */
11686
11687 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
11688 IEM_MC_ARG_CONST(uint8_t, iEffSeg, /*=*/ pVCpu->iem.s.iEffSeg, 1);
11689 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_FPU, 0, iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
11690 IEM_MC_END();
11691}
11692
11693/** Opcode 0xdd !11/0. */
11694FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
11695{
11696 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
11697
11698 IEM_MC_BEGIN(0, 2, 0, 0);
11699 IEM_MC_LOCAL(uint16_t, u16Tmp);
11700 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11701
11702 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11704 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11705
11706 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
11707 IEM_MC_FETCH_FSW(u16Tmp);
11708 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
11709 IEM_MC_ADVANCE_RIP_AND_FINISH();
11710
11711/** @todo Debug / drop a hint to the verifier that things may differ
11712 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
11713 * NT4SP1. (X86_FSW_PE) */
11714 IEM_MC_END();
11715}
11716
11717
11718/** Opcode 0xdd 11/0. */
11719FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
11720{
11721 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
11722 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
11723 unmodified. */
11724 IEM_MC_BEGIN(0, 0, 0, 0);
11725 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11726
11727 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11728 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11729
11730 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
11731 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
11732 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
11733
11734 IEM_MC_ADVANCE_RIP_AND_FINISH();
11735 IEM_MC_END();
11736}
11737
11738
11739/** Opcode 0xdd 11/1. */
11740FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
11741{
11742 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
11743 IEM_MC_BEGIN(0, 2, 0, 0);
11744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11745 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
11746 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11747 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11748 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11749
11750 IEM_MC_PREPARE_FPU_USAGE();
11751 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
11752 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
11753 IEM_MC_STORE_FPU_RESULT(FpuRes, IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11754 } IEM_MC_ELSE() {
11755 IEM_MC_FPU_STACK_UNDERFLOW(IEM_GET_MODRM_RM_8(bRm), pVCpu->iem.s.uFpuOpcode);
11756 } IEM_MC_ENDIF();
11757
11758 IEM_MC_ADVANCE_RIP_AND_FINISH();
11759 IEM_MC_END();
11760}
11761
11762
11763/** Opcode 0xdd 11/3. */
11764FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
11765{
11766 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
11767 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
11768}
11769
11770
11771/** Opcode 0xdd 11/4. */
11772FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
11773{
11774 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
11775 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
11776}
11777
11778
11779/**
11780 * @opcode 0xdd
11781 */
11782FNIEMOP_DEF(iemOp_EscF5)
11783{
11784 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11785 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
11786 if (IEM_IS_MODRM_REG_MODE(bRm))
11787 {
11788 switch (IEM_GET_MODRM_REG_8(bRm))
11789 {
11790 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
11791 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
11792 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
11793 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
11794 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
11795 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
11796 case 6: IEMOP_RAISE_INVALID_OPCODE_RET();
11797 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
11798 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11799 }
11800 }
11801 else
11802 {
11803 switch (IEM_GET_MODRM_REG_8(bRm))
11804 {
11805 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
11806 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
11807 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
11808 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
11809 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
11810 case 5: IEMOP_RAISE_INVALID_OPCODE_RET();
11811 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
11812 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
11813 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11814 }
11815 }
11816}
11817
11818
11819/** Opcode 0xde 11/0. */
11820FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
11821{
11822 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
11823 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
11824}
11825
11826
11827/** Opcode 0xde 11/0. */
11828FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
11829{
11830 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
11831 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
11832}
11833
11834
11835/** Opcode 0xde 0xd9. */
11836FNIEMOP_DEF(iemOp_fcompp)
11837{
11838 IEMOP_MNEMONIC(fcompp, "fcompp");
11839 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_st1_pop_pop, iemAImpl_fcom_r80_by_r80);
11840}
11841
11842
11843/** Opcode 0xde 11/4. */
11844FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
11845{
11846 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
11847 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
11848}
11849
11850
11851/** Opcode 0xde 11/5. */
11852FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
11853{
11854 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
11855 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
11856}
11857
11858
11859/** Opcode 0xde 11/6. */
11860FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
11861{
11862 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
11863 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
11864}
11865
11866
11867/** Opcode 0xde 11/7. */
11868FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
11869{
11870 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
11871 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
11872}
11873
11874
11875/**
11876 * Common worker for FPU instructions working on ST0 and an m16i, and storing
11877 * the result in ST0.
11878 *
11879 * @param bRm Mod R/M byte.
11880 * @param pfnAImpl Pointer to the instruction implementation (assembly).
11881 */
11882FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
11883{
11884 IEM_MC_BEGIN(3, 3, 0, 0);
11885 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11886 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
11887 IEM_MC_LOCAL(int16_t, i16Val2);
11888 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
11889 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11890 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11891
11892 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11893 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11894
11895 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11896 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11897 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11898
11899 IEM_MC_PREPARE_FPU_USAGE();
11900 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11901 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
11902 IEM_MC_STORE_FPU_RESULT(FpuRes, 0, pVCpu->iem.s.uFpuOpcode);
11903 } IEM_MC_ELSE() {
11904 IEM_MC_FPU_STACK_UNDERFLOW(0, pVCpu->iem.s.uFpuOpcode);
11905 } IEM_MC_ENDIF();
11906 IEM_MC_ADVANCE_RIP_AND_FINISH();
11907
11908 IEM_MC_END();
11909}
11910
11911
11912/** Opcode 0xde !11/0. */
11913FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
11914{
11915 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
11916 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
11917}
11918
11919
11920/** Opcode 0xde !11/1. */
11921FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
11922{
11923 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
11924 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
11925}
11926
11927
11928/** Opcode 0xde !11/2. */
11929FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
11930{
11931 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
11932
11933 IEM_MC_BEGIN(3, 3, 0, 0);
11934 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11935 IEM_MC_LOCAL(uint16_t, u16Fsw);
11936 IEM_MC_LOCAL(int16_t, i16Val2);
11937 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11938 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11939 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11940
11941 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11943
11944 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11945 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11946 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11947
11948 IEM_MC_PREPARE_FPU_USAGE();
11949 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11950 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
11951 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11952 } IEM_MC_ELSE() {
11953 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11954 } IEM_MC_ENDIF();
11955 IEM_MC_ADVANCE_RIP_AND_FINISH();
11956
11957 IEM_MC_END();
11958}
11959
11960
11961/** Opcode 0xde !11/3. */
11962FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
11963{
11964 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
11965
11966 IEM_MC_BEGIN(3, 3, 0, 0);
11967 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11968 IEM_MC_LOCAL(uint16_t, u16Fsw);
11969 IEM_MC_LOCAL(int16_t, i16Val2);
11970 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
11971 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
11972 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
11973
11974 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11976
11977 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
11978 IEM_MC_MAYBE_RAISE_FPU_XCPT();
11979 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11980
11981 IEM_MC_PREPARE_FPU_USAGE();
11982 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0) {
11983 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
11984 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11985 } IEM_MC_ELSE() {
11986 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
11987 } IEM_MC_ENDIF();
11988 IEM_MC_ADVANCE_RIP_AND_FINISH();
11989
11990 IEM_MC_END();
11991}
11992
11993
11994/** Opcode 0xde !11/4. */
11995FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
11996{
11997 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
11998 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
11999}
12000
12001
12002/** Opcode 0xde !11/5. */
12003FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
12004{
12005 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
12006 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
12007}
12008
12009
12010/** Opcode 0xde !11/6. */
12011FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
12012{
12013 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
12014 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
12015}
12016
12017
12018/** Opcode 0xde !11/7. */
12019FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
12020{
12021 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
12022 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
12023}
12024
12025
12026/**
12027 * @opcode 0xde
12028 */
12029FNIEMOP_DEF(iemOp_EscF6)
12030{
12031 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12032 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
12033 if (IEM_IS_MODRM_REG_MODE(bRm))
12034 {
12035 switch (IEM_GET_MODRM_REG_8(bRm))
12036 {
12037 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
12038 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
12039 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
12040 case 3: if (bRm == 0xd9)
12041 return FNIEMOP_CALL(iemOp_fcompp);
12042 IEMOP_RAISE_INVALID_OPCODE_RET();
12043 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
12044 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
12045 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
12046 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
12047 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12048 }
12049 }
12050 else
12051 {
12052 switch (IEM_GET_MODRM_REG_8(bRm))
12053 {
12054 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
12055 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
12056 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
12057 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
12058 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
12059 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
12060 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
12061 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
12062 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12063 }
12064 }
12065}
12066
12067
12068/** Opcode 0xdf 11/0.
12069 * Undocument instruction, assumed to work like ffree + fincstp. */
12070FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
12071{
12072 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
12073 IEM_MC_BEGIN(0, 0, 0, 0);
12074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12075
12076 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12077 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12078
12079 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
12080 IEM_MC_FPU_STACK_FREE(IEM_GET_MODRM_RM_8(bRm));
12081 IEM_MC_FPU_STACK_INC_TOP();
12082 IEM_MC_UPDATE_FPU_OPCODE_IP(pVCpu->iem.s.uFpuOpcode);
12083
12084 IEM_MC_ADVANCE_RIP_AND_FINISH();
12085 IEM_MC_END();
12086}
12087
12088
12089/** Opcode 0xdf 0xe0. */
12090FNIEMOP_DEF(iemOp_fnstsw_ax)
12091{
12092 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
12093 IEM_MC_BEGIN(0, 1, 0, 0);
12094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12095 IEM_MC_LOCAL(uint16_t, u16Tmp);
12096 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12097 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
12098 IEM_MC_FETCH_FSW(u16Tmp);
12099 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
12100 IEM_MC_ADVANCE_RIP_AND_FINISH();
12101 IEM_MC_END();
12102}
12103
12104
12105/** Opcode 0xdf 11/5. */
12106FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
12107{
12108 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
12109 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
12110 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
12111 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
12112}
12113
12114
12115/** Opcode 0xdf 11/6. */
12116FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
12117{
12118 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
12119 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_FPU | IEM_CIMPL_F_STATUS_FLAGS, 0,
12120 iemCImpl_fcomi_fucomi, IEM_GET_MODRM_RM_8(bRm), false /*fUCmp*/,
12121 RT_BIT_32(31) /*fPop*/ | pVCpu->iem.s.uFpuOpcode);
12122}
12123
12124
12125/** Opcode 0xdf !11/0. */
12126FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
12127{
12128 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
12129
12130 IEM_MC_BEGIN(2, 3, 0, 0);
12131 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12132 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12133 IEM_MC_LOCAL(int16_t, i16Val);
12134 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12135 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
12136
12137 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12139
12140 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12141 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12142 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12143
12144 IEM_MC_PREPARE_FPU_USAGE();
12145 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12146 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i16, pFpuRes, pi16Val);
12147 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12148 } IEM_MC_ELSE() {
12149 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12150 } IEM_MC_ENDIF();
12151 IEM_MC_ADVANCE_RIP_AND_FINISH();
12152
12153 IEM_MC_END();
12154}
12155
12156
12157/** Opcode 0xdf !11/1. */
12158FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
12159{
12160 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
12161 IEM_MC_BEGIN(3, 3, 0, 0);
12162 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12164
12165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12166 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12167 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12168 IEM_MC_PREPARE_FPU_USAGE();
12169
12170 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12171 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12172 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12173
12174 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12175 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12176 IEM_MC_LOCAL(uint16_t, u16Fsw);
12177 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12178 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12179 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12180 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12181 } IEM_MC_ELSE() {
12182 IEM_MC_IF_FCW_IM() {
12183 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12184 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12185 } IEM_MC_ELSE() {
12186 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12187 } IEM_MC_ENDIF();
12188 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12189 } IEM_MC_ENDIF();
12190 IEM_MC_ADVANCE_RIP_AND_FINISH();
12191
12192 IEM_MC_END();
12193}
12194
12195
12196/** Opcode 0xdf !11/2. */
12197FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
12198{
12199 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
12200 IEM_MC_BEGIN(3, 3, 0, 0);
12201 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12202 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12203
12204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12205 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12206 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12207 IEM_MC_PREPARE_FPU_USAGE();
12208
12209 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12210 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12211 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12212
12213 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12214 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12215 IEM_MC_LOCAL(uint16_t, u16Fsw);
12216 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12217 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12218 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12219 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12220 } IEM_MC_ELSE() {
12221 IEM_MC_IF_FCW_IM() {
12222 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12223 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12224 } IEM_MC_ELSE() {
12225 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12226 } IEM_MC_ENDIF();
12227 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12228 } IEM_MC_ENDIF();
12229 IEM_MC_ADVANCE_RIP_AND_FINISH();
12230
12231 IEM_MC_END();
12232}
12233
12234
12235/** Opcode 0xdf !11/3. */
12236FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
12237{
12238 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
12239 IEM_MC_BEGIN(3, 3, 0, 0);
12240 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12241 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12242
12243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12244 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12245 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12246 IEM_MC_PREPARE_FPU_USAGE();
12247
12248 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12249 IEM_MC_ARG(int16_t *, pi16Dst, 1);
12250 IEM_MC_MEM_MAP_I16_WO(pi16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12251
12252 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12253 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12254 IEM_MC_LOCAL(uint16_t, u16Fsw);
12255 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12256 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
12257 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12258 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12259 } IEM_MC_ELSE() {
12260 IEM_MC_IF_FCW_IM() {
12261 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
12262 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12263 } IEM_MC_ELSE() {
12264 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12265 } IEM_MC_ENDIF();
12266 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12267 } IEM_MC_ENDIF();
12268 IEM_MC_ADVANCE_RIP_AND_FINISH();
12269
12270 IEM_MC_END();
12271}
12272
12273
12274/** Opcode 0xdf !11/4. */
12275FNIEMOP_DEF_1(iemOp_fbld_m80d, uint8_t, bRm)
12276{
12277 IEMOP_MNEMONIC(fbld_m80d, "fbld m80d");
12278
12279 IEM_MC_BEGIN(2, 3, 0, 0);
12280 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12281 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12282 IEM_MC_LOCAL(RTPBCD80U, d80Val);
12283 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12284 IEM_MC_ARG_LOCAL_REF(PCRTPBCD80U, pd80Val, d80Val, 1);
12285
12286 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12287 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12288
12289 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12290 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12291 IEM_MC_FETCH_MEM_D80(d80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12292
12293 IEM_MC_PREPARE_FPU_USAGE();
12294 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12295 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_d80, pFpuRes, pd80Val);
12296 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12297 } IEM_MC_ELSE() {
12298 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12299 } IEM_MC_ENDIF();
12300 IEM_MC_ADVANCE_RIP_AND_FINISH();
12301
12302 IEM_MC_END();
12303}
12304
12305
12306/** Opcode 0xdf !11/5. */
12307FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
12308{
12309 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
12310
12311 IEM_MC_BEGIN(2, 3, 0, 0);
12312 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
12313 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
12314 IEM_MC_LOCAL(int64_t, i64Val);
12315 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
12316 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
12317
12318 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
12319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12320
12321 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12322 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12323 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
12324
12325 IEM_MC_PREPARE_FPU_USAGE();
12326 IEM_MC_IF_FPUREG_IS_EMPTY(7) {
12327 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_r80_from_i64, pFpuRes, pi64Val);
12328 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12329 } IEM_MC_ELSE() {
12330 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, pVCpu->iem.s.uFpuOpcode);
12331 } IEM_MC_ENDIF();
12332 IEM_MC_ADVANCE_RIP_AND_FINISH();
12333
12334 IEM_MC_END();
12335}
12336
12337
12338/** Opcode 0xdf !11/6. */
12339FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
12340{
12341 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
12342 IEM_MC_BEGIN(3, 3, 0, 0);
12343 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12344 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12345
12346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12347 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12348 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12349 IEM_MC_PREPARE_FPU_USAGE();
12350
12351 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12352 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
12353 IEM_MC_MEM_MAP_D80_WO(pd80Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12354
12355 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12356 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12357 IEM_MC_LOCAL(uint16_t, u16Fsw);
12358 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12359 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
12360 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12361 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12362 } IEM_MC_ELSE() {
12363 IEM_MC_IF_FCW_IM() {
12364 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
12365 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12366 } IEM_MC_ELSE() {
12367 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12368 } IEM_MC_ENDIF();
12369 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12370 } IEM_MC_ENDIF();
12371 IEM_MC_ADVANCE_RIP_AND_FINISH();
12372
12373 IEM_MC_END();
12374}
12375
12376
12377/** Opcode 0xdf !11/7. */
12378FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
12379{
12380 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
12381 IEM_MC_BEGIN(3, 3, 0, 0);
12382 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
12383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
12384
12385 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12386 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
12387 IEM_MC_MAYBE_RAISE_FPU_XCPT();
12388 IEM_MC_PREPARE_FPU_USAGE();
12389
12390 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
12391 IEM_MC_ARG(int64_t *, pi64Dst, 1);
12392 IEM_MC_MEM_MAP_I64_WO(pi64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
12393
12394 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
12395 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
12396 IEM_MC_LOCAL(uint16_t, u16Fsw);
12397 IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw, u16Fsw, 0);
12398 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
12399 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(bUnmapInfo, u16Fsw);
12400 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12401 } IEM_MC_ELSE() {
12402 IEM_MC_IF_FCW_IM() {
12403 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
12404 IEM_MC_MEM_COMMIT_AND_UNMAP_WO(bUnmapInfo);
12405 } IEM_MC_ELSE() {
12406 IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(bUnmapInfo);
12407 } IEM_MC_ENDIF();
12408 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
12409 } IEM_MC_ENDIF();
12410 IEM_MC_ADVANCE_RIP_AND_FINISH();
12411
12412 IEM_MC_END();
12413}
12414
12415
12416/**
12417 * @opcode 0xdf
12418 */
12419FNIEMOP_DEF(iemOp_EscF7)
12420{
12421 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
12422 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdf & 0x7);
12423 if (IEM_IS_MODRM_REG_MODE(bRm))
12424 {
12425 switch (IEM_GET_MODRM_REG_8(bRm))
12426 {
12427 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
12428 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
12429 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
12430 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
12431 case 4: if (bRm == 0xe0)
12432 return FNIEMOP_CALL(iemOp_fnstsw_ax);
12433 IEMOP_RAISE_INVALID_OPCODE_RET();
12434 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
12435 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
12436 case 7: IEMOP_RAISE_INVALID_OPCODE_RET();
12437 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12438 }
12439 }
12440 else
12441 {
12442 switch (IEM_GET_MODRM_REG_8(bRm))
12443 {
12444 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
12445 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
12446 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
12447 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
12448 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
12449 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
12450 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
12451 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
12452 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12453 }
12454 }
12455}
12456
12457
12458/**
12459 * @opcode 0xe0
12460 */
12461FNIEMOP_DEF(iemOp_loopne_Jb)
12462{
12463 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
12464 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12465 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12466
12467 switch (pVCpu->iem.s.enmEffAddrMode)
12468 {
12469 case IEMMODE_16BIT:
12470 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12472 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
12473 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12474 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12475 } IEM_MC_ELSE() {
12476 IEM_MC_ADVANCE_RIP_AND_FINISH();
12477 } IEM_MC_ENDIF();
12478 IEM_MC_END();
12479 break;
12480
12481 case IEMMODE_32BIT:
12482 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12484 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
12485 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12486 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12487 } IEM_MC_ELSE() {
12488 IEM_MC_ADVANCE_RIP_AND_FINISH();
12489 } IEM_MC_ENDIF();
12490 IEM_MC_END();
12491 break;
12492
12493 case IEMMODE_64BIT:
12494 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12496 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
12497 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
12498 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12499 } IEM_MC_ELSE() {
12500 IEM_MC_ADVANCE_RIP_AND_FINISH();
12501 } IEM_MC_ENDIF();
12502 IEM_MC_END();
12503 break;
12504
12505 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12506 }
12507}
12508
12509
12510/**
12511 * @opcode 0xe1
12512 */
12513FNIEMOP_DEF(iemOp_loope_Jb)
12514{
12515 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
12516 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12517 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12518
12519 switch (pVCpu->iem.s.enmEffAddrMode)
12520 {
12521 case IEMMODE_16BIT:
12522 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12523 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12524 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
12525 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
12526 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12527 } IEM_MC_ELSE() {
12528 IEM_MC_ADVANCE_RIP_AND_FINISH();
12529 } IEM_MC_ENDIF();
12530 IEM_MC_END();
12531 break;
12532
12533 case IEMMODE_32BIT:
12534 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12535 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12536 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
12537 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
12538 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12539 } IEM_MC_ELSE() {
12540 IEM_MC_ADVANCE_RIP_AND_FINISH();
12541 } IEM_MC_ENDIF();
12542 IEM_MC_END();
12543 break;
12544
12545 case IEMMODE_64BIT:
12546 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12548 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
12549 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
12550 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12551 } IEM_MC_ELSE() {
12552 IEM_MC_ADVANCE_RIP_AND_FINISH();
12553 } IEM_MC_ENDIF();
12554 IEM_MC_END();
12555 break;
12556
12557 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12558 }
12559}
12560
12561
12562/**
12563 * @opcode 0xe2
12564 */
12565FNIEMOP_DEF(iemOp_loop_Jb)
12566{
12567 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
12568 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12569 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12570
12571 /** @todo Check out the \#GP case if EIP < CS.Base or EIP > CS.Limit when
12572 * using the 32-bit operand size override. How can that be restarted? See
12573 * weird pseudo code in intel manual. */
12574
12575 /* NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
12576 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
12577 * the loop causes guest crashes, but when logging it's nice to skip a few million
12578 * lines of useless output. */
12579#if defined(LOG_ENABLED)
12580 if ((LogIs3Enabled() || LogIs4Enabled()) && -(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm)
12581 switch (pVCpu->iem.s.enmEffAddrMode)
12582 {
12583 case IEMMODE_16BIT:
12584 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12586 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
12587 IEM_MC_ADVANCE_RIP_AND_FINISH();
12588 IEM_MC_END();
12589 break;
12590
12591 case IEMMODE_32BIT:
12592 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12594 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
12595 IEM_MC_ADVANCE_RIP_AND_FINISH();
12596 IEM_MC_END();
12597 break;
12598
12599 case IEMMODE_64BIT:
12600 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12602 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
12603 IEM_MC_ADVANCE_RIP_AND_FINISH();
12604 IEM_MC_END();
12605 break;
12606
12607 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12608 }
12609#endif
12610
12611 switch (pVCpu->iem.s.enmEffAddrMode)
12612 {
12613 case IEMMODE_16BIT:
12614 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12616 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
12617 IEM_MC_IF_CX_IS_NZ() {
12618 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12619 } IEM_MC_ELSE() {
12620 IEM_MC_ADVANCE_RIP_AND_FINISH();
12621 } IEM_MC_ENDIF();
12622 IEM_MC_END();
12623 break;
12624
12625 case IEMMODE_32BIT:
12626 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12627 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12628 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
12629 IEM_MC_IF_ECX_IS_NZ() {
12630 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12631 } IEM_MC_ELSE() {
12632 IEM_MC_ADVANCE_RIP_AND_FINISH();
12633 } IEM_MC_ENDIF();
12634 IEM_MC_END();
12635 break;
12636
12637 case IEMMODE_64BIT:
12638 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12640 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
12641 IEM_MC_IF_RCX_IS_NZ() {
12642 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12643 } IEM_MC_ELSE() {
12644 IEM_MC_ADVANCE_RIP_AND_FINISH();
12645 } IEM_MC_ENDIF();
12646 IEM_MC_END();
12647 break;
12648
12649 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12650 }
12651}
12652
12653
12654/**
12655 * @opcode 0xe3
12656 */
12657FNIEMOP_DEF(iemOp_jecxz_Jb)
12658{
12659 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
12660 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12661 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
12662
12663 switch (pVCpu->iem.s.enmEffAddrMode)
12664 {
12665 case IEMMODE_16BIT:
12666 IEM_MC_BEGIN(0, 0, IEM_MC_F_NOT_64BIT, 0);
12667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12668 IEM_MC_IF_CX_IS_NZ() {
12669 IEM_MC_ADVANCE_RIP_AND_FINISH();
12670 } IEM_MC_ELSE() {
12671 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12672 } IEM_MC_ENDIF();
12673 IEM_MC_END();
12674 break;
12675
12676 case IEMMODE_32BIT:
12677 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12679 IEM_MC_IF_ECX_IS_NZ() {
12680 IEM_MC_ADVANCE_RIP_AND_FINISH();
12681 } IEM_MC_ELSE() {
12682 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12683 } IEM_MC_ENDIF();
12684 IEM_MC_END();
12685 break;
12686
12687 case IEMMODE_64BIT:
12688 IEM_MC_BEGIN(0, 0, IEM_MC_F_64BIT, 0);
12689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12690 IEM_MC_IF_RCX_IS_NZ() {
12691 IEM_MC_ADVANCE_RIP_AND_FINISH();
12692 } IEM_MC_ELSE() {
12693 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12694 } IEM_MC_ENDIF();
12695 IEM_MC_END();
12696 break;
12697
12698 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12699 }
12700}
12701
12702
12703/** Opcode 0xe4 */
12704FNIEMOP_DEF(iemOp_in_AL_Ib)
12705{
12706 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
12707 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12709 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
12710 iemCImpl_in, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12711}
12712
12713
12714/** Opcode 0xe5 */
12715FNIEMOP_DEF(iemOp_in_eAX_Ib)
12716{
12717 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
12718 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12720 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
12721 iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12722 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12723}
12724
12725
12726/** Opcode 0xe6 */
12727FNIEMOP_DEF(iemOp_out_Ib_AL)
12728{
12729 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
12730 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12731 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12732 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
12733 iemCImpl_out, u8Imm, 1, 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12734}
12735
12736
12737/** Opcode 0xe7 */
12738FNIEMOP_DEF(iemOp_out_Ib_eAX)
12739{
12740 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
12741 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
12742 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12743 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
12744 iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12745 0x80 /* fImm */ | pVCpu->iem.s.enmEffAddrMode);
12746}
12747
12748
12749/**
12750 * @opcode 0xe8
12751 */
12752FNIEMOP_DEF(iemOp_call_Jv)
12753{
12754 IEMOP_MNEMONIC(call_Jv, "call Jv");
12755 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12756 switch (pVCpu->iem.s.enmEffOpSize)
12757 {
12758 case IEMMODE_16BIT:
12759 {
12760 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
12761 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
12762 iemCImpl_call_rel_16, (int16_t)u16Imm);
12763 }
12764
12765 case IEMMODE_32BIT:
12766 {
12767 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
12768 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
12769 iemCImpl_call_rel_32, (int32_t)u32Imm);
12770 }
12771
12772 case IEMMODE_64BIT:
12773 {
12774 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
12775 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_RELATIVE | IEM_CIMPL_F_BRANCH_STACK, 0,
12776 iemCImpl_call_rel_64, u64Imm);
12777 }
12778
12779 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12780 }
12781}
12782
12783
12784/**
12785 * @opcode 0xe9
12786 */
12787FNIEMOP_DEF(iemOp_jmp_Jv)
12788{
12789 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
12790 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12791 switch (pVCpu->iem.s.enmEffOpSize)
12792 {
12793 case IEMMODE_16BIT:
12794 IEM_MC_BEGIN(0, 0, 0, 0);
12795 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
12796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12797 IEM_MC_REL_JMP_S16_AND_FINISH(i16Imm);
12798 IEM_MC_END();
12799 break;
12800
12801 case IEMMODE_64BIT:
12802 case IEMMODE_32BIT:
12803 IEM_MC_BEGIN(0, 0, IEM_MC_F_MIN_386, 0);
12804 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
12805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12806 IEM_MC_REL_JMP_S32_AND_FINISH(i32Imm);
12807 IEM_MC_END();
12808 break;
12809
12810 IEM_NOT_REACHED_DEFAULT_CASE_RET();
12811 }
12812}
12813
12814
12815/**
12816 * @opcode 0xea
12817 */
12818FNIEMOP_DEF(iemOp_jmp_Ap)
12819{
12820 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
12821 IEMOP_HLP_NO_64BIT();
12822
12823 /* Decode the far pointer address and pass it on to the far call C implementation. */
12824 uint32_t off32Seg;
12825 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
12826 IEM_OPCODE_GET_NEXT_U32(&off32Seg);
12827 else
12828 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&off32Seg);
12829 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel);
12830 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12831 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_FAR
12832 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, UINT64_MAX,
12833 iemCImpl_FarJmp, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize);
12834 /** @todo make task-switches, ring-switches, ++ return non-zero status */
12835}
12836
12837
12838/**
12839 * @opcode 0xeb
12840 */
12841FNIEMOP_DEF(iemOp_jmp_Jb)
12842{
12843 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
12844 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
12845 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
12846
12847 IEM_MC_BEGIN(0, 0, 0, 0);
12848 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12849 IEM_MC_REL_JMP_S8_AND_FINISH(i8Imm);
12850 IEM_MC_END();
12851}
12852
12853
12854/** Opcode 0xec */
12855FNIEMOP_DEF(iemOp_in_AL_DX)
12856{
12857 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
12858 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12859 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12860 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
12861 iemCImpl_in_eAX_DX, 1, pVCpu->iem.s.enmEffAddrMode);
12862}
12863
12864
12865/** Opcode 0xed */
12866FNIEMOP_DEF(iemOp_in_eAX_DX)
12867{
12868 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
12869 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12870 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO,
12871 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xAX),
12872 iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12873 pVCpu->iem.s.enmEffAddrMode);
12874}
12875
12876
12877/** Opcode 0xee */
12878FNIEMOP_DEF(iemOp_out_DX_AL)
12879{
12880 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
12881 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12882 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
12883 iemCImpl_out_DX_eAX, 1, pVCpu->iem.s.enmEffAddrMode);
12884}
12885
12886
12887/** Opcode 0xef */
12888FNIEMOP_DEF(iemOp_out_DX_eAX)
12889{
12890 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
12891 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12892 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_IO, 0,
12893 iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4,
12894 pVCpu->iem.s.enmEffAddrMode);
12895}
12896
12897
12898/**
12899 * @opcode 0xf0
12900 */
12901FNIEMOP_DEF(iemOp_lock)
12902{
12903 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
12904 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
12905
12906 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12907 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12908}
12909
12910
12911/**
12912 * @opcode 0xf1
12913 */
12914FNIEMOP_DEF(iemOp_int1)
12915{
12916 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
12917 /** @todo Does not generate \#UD on 286, or so they say... Was allegedly a
12918 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
12919 * LOADALL memo. Needs some testing. */
12920 IEMOP_HLP_MIN_386();
12921 /** @todo testcase! */
12922 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR
12923 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 0,
12924 iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
12925}
12926
12927
12928/**
12929 * @opcode 0xf2
12930 */
12931FNIEMOP_DEF(iemOp_repne)
12932{
12933 /* This overrides any previous REPE prefix. */
12934 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
12935 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
12936 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
12937
12938 /* For the 4 entry opcode tables, REPNZ overrides any previous
12939 REPZ and operand size prefixes. */
12940 pVCpu->iem.s.idxPrefix = 3;
12941
12942 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12943 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12944}
12945
12946
12947/**
12948 * @opcode 0xf3
12949 */
12950FNIEMOP_DEF(iemOp_repe)
12951{
12952 /* This overrides any previous REPNE prefix. */
12953 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
12954 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
12955 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
12956
12957 /* For the 4 entry opcode tables, REPNZ overrides any previous
12958 REPNZ and operand size prefixes. */
12959 pVCpu->iem.s.idxPrefix = 2;
12960
12961 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
12962 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
12963}
12964
12965
12966/**
12967 * @opcode 0xf4
12968 */
12969FNIEMOP_DEF(iemOp_hlt)
12970{
12971 IEMOP_MNEMONIC(hlt, "hlt");
12972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12973 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_END_TB | IEM_CIMPL_F_VMEXIT, 0, iemCImpl_hlt);
12974}
12975
12976
12977/**
12978 * @opcode 0xf5
12979 */
12980FNIEMOP_DEF(iemOp_cmc)
12981{
12982 IEMOP_MNEMONIC(cmc, "cmc");
12983 IEM_MC_BEGIN(0, 0, 0, 0);
12984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
12985 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
12986 IEM_MC_ADVANCE_RIP_AND_FINISH();
12987 IEM_MC_END();
12988}
12989
12990
12991/**
12992 * Body for of 'inc/dec/not/neg Eb'.
12993 */
12994#define IEMOP_BODY_UNARY_Eb(a_bRm, a_fnNormalU8, a_fnLockedU8) \
12995 if (IEM_IS_MODRM_REG_MODE(a_bRm)) \
12996 { \
12997 /* register access */ \
12998 IEM_MC_BEGIN(2, 0, 0, 0); \
12999 IEMOP_HLP_DONE_DECODING(); \
13000 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
13001 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13002 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, a_bRm)); \
13003 IEM_MC_REF_EFLAGS(pEFlags); \
13004 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
13005 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13006 IEM_MC_END(); \
13007 } \
13008 else \
13009 { \
13010 /* memory access. */ \
13011 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
13012 { \
13013 IEM_MC_BEGIN(2, 2, 0, 0); \
13014 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
13015 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13016 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13017 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13018 \
13019 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
13020 IEMOP_HLP_DONE_DECODING(); \
13021 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13022 IEM_MC_FETCH_EFLAGS(EFlags); \
13023 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU8, pu8Dst, pEFlags); \
13024 \
13025 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13026 IEM_MC_COMMIT_EFLAGS(EFlags); \
13027 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13028 IEM_MC_END(); \
13029 } \
13030 else \
13031 { \
13032 IEM_MC_BEGIN(2, 2, 0, 0); \
13033 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \
13034 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13035 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13036 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13037 \
13038 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, a_bRm, 0); \
13039 IEMOP_HLP_DONE_DECODING(); \
13040 IEM_MC_MEM_MAP_U8_RW(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13041 IEM_MC_FETCH_EFLAGS(EFlags); \
13042 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU8, pu8Dst, pEFlags); \
13043 \
13044 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13045 IEM_MC_COMMIT_EFLAGS(EFlags); \
13046 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13047 IEM_MC_END(); \
13048 } \
13049 } \
13050 (void)0
13051
13052
13053/**
13054 * Body for 'inc/dec/not/neg Ev' (groups 3 and 5).
13055 */
13056#define IEMOP_BODY_UNARY_Ev(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64) \
13057 if (IEM_IS_MODRM_REG_MODE(bRm)) \
13058 { \
13059 /* \
13060 * Register target \
13061 */ \
13062 switch (pVCpu->iem.s.enmEffOpSize) \
13063 { \
13064 case IEMMODE_16BIT: \
13065 IEM_MC_BEGIN(2, 0, 0, 0); \
13066 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13067 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13068 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13069 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13070 IEM_MC_REF_EFLAGS(pEFlags); \
13071 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
13072 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13073 IEM_MC_END(); \
13074 break; \
13075 \
13076 case IEMMODE_32BIT: \
13077 IEM_MC_BEGIN(2, 0, IEM_MC_F_MIN_386, 0); \
13078 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13079 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13080 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13081 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13082 IEM_MC_REF_EFLAGS(pEFlags); \
13083 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
13084 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm)); \
13085 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13086 IEM_MC_END(); \
13087 break; \
13088 \
13089 case IEMMODE_64BIT: \
13090 IEM_MC_BEGIN(2, 0, IEM_MC_F_64BIT, 0); \
13091 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13092 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13093 IEM_MC_ARG(uint32_t *, pEFlags, 1); \
13094 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \
13095 IEM_MC_REF_EFLAGS(pEFlags); \
13096 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
13097 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13098 IEM_MC_END(); \
13099 break; \
13100 \
13101 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13102 } \
13103 } \
13104 else \
13105 { \
13106 /* \
13107 * Memory target. \
13108 */ \
13109 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) || (pVCpu->iem.s.fExec & IEM_F_X86_DISREGARD_LOCK)) \
13110 { \
13111 switch (pVCpu->iem.s.enmEffOpSize) \
13112 { \
13113 case IEMMODE_16BIT: \
13114 IEM_MC_BEGIN(2, 3, 0, 0); \
13115 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13116 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13117 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13118 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13119 \
13120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13122 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13123 IEM_MC_FETCH_EFLAGS(EFlags); \
13124 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU16, pu16Dst, pEFlags); \
13125 \
13126 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13127 IEM_MC_COMMIT_EFLAGS(EFlags); \
13128 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13129 IEM_MC_END(); \
13130 break; \
13131 \
13132 case IEMMODE_32BIT: \
13133 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386, 0); \
13134 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13135 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13136 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13137 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13138 \
13139 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13141 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13142 IEM_MC_FETCH_EFLAGS(EFlags); \
13143 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU32, pu32Dst, pEFlags); \
13144 \
13145 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13146 IEM_MC_COMMIT_EFLAGS(EFlags); \
13147 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13148 IEM_MC_END(); \
13149 break; \
13150 \
13151 case IEMMODE_64BIT: \
13152 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT, 0); \
13153 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13154 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13155 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13156 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13157 \
13158 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13159 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
13160 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13161 IEM_MC_FETCH_EFLAGS(EFlags); \
13162 IEM_MC_CALL_VOID_AIMPL_2(a_fnNormalU64, pu64Dst, pEFlags); \
13163 \
13164 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13165 IEM_MC_COMMIT_EFLAGS(EFlags); \
13166 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13167 IEM_MC_END(); \
13168 break; \
13169 \
13170 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13171 } \
13172 } \
13173 else \
13174 { \
13175 (void)0
13176
13177#define IEMOP_BODY_UNARY_Ev_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \
13178 switch (pVCpu->iem.s.enmEffOpSize) \
13179 { \
13180 case IEMMODE_16BIT: \
13181 IEM_MC_BEGIN(2, 3, 0, 0); \
13182 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \
13183 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13184 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13185 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13186 \
13187 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13188 IEMOP_HLP_DONE_DECODING(); \
13189 IEM_MC_MEM_MAP_U16_RW(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13190 IEM_MC_FETCH_EFLAGS(EFlags); \
13191 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU16, pu16Dst, pEFlags); \
13192 \
13193 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13194 IEM_MC_COMMIT_EFLAGS(EFlags); \
13195 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13196 IEM_MC_END(); \
13197 break; \
13198 \
13199 case IEMMODE_32BIT: \
13200 IEM_MC_BEGIN(2, 3, IEM_MC_F_MIN_386, 0); \
13201 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \
13202 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13203 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13204 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13205 \
13206 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13207 IEMOP_HLP_DONE_DECODING(); \
13208 IEM_MC_MEM_MAP_U32_RW(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13209 IEM_MC_FETCH_EFLAGS(EFlags); \
13210 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU32, pu32Dst, pEFlags); \
13211 \
13212 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13213 IEM_MC_COMMIT_EFLAGS(EFlags); \
13214 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13215 IEM_MC_END(); \
13216 break; \
13217 \
13218 case IEMMODE_64BIT: \
13219 IEM_MC_BEGIN(2, 3, IEM_MC_F_64BIT, 0); \
13220 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \
13221 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1); \
13222 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \
13223 IEM_MC_LOCAL(uint8_t, bUnmapInfo); \
13224 \
13225 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \
13226 IEMOP_HLP_DONE_DECODING(); \
13227 IEM_MC_MEM_MAP_U64_RW(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \
13228 IEM_MC_FETCH_EFLAGS(EFlags); \
13229 IEM_MC_CALL_VOID_AIMPL_2(a_fnLockedU64, pu64Dst, pEFlags); \
13230 \
13231 IEM_MC_MEM_COMMIT_AND_UNMAP_RW(bUnmapInfo); \
13232 IEM_MC_COMMIT_EFLAGS(EFlags); \
13233 IEM_MC_ADVANCE_RIP_AND_FINISH(); \
13234 IEM_MC_END(); \
13235 break; \
13236 \
13237 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
13238 } \
13239 } \
13240 } \
13241 (void)0
13242
13243
13244/**
13245 * @opmaps grp3_f6
13246 * @opcode /0
13247 * @todo also /1
13248 */
13249FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
13250{
13251 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
13252 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
13253
13254 if (IEM_IS_MODRM_REG_MODE(bRm))
13255 {
13256 /* register access */
13257 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13258 IEM_MC_BEGIN(3, 0, 0, 0);
13259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13260 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
13261 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
13262 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13263 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13264 IEM_MC_REF_EFLAGS(pEFlags);
13265 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
13266 IEM_MC_ADVANCE_RIP_AND_FINISH();
13267 IEM_MC_END();
13268 }
13269 else
13270 {
13271 /* memory access. */
13272 IEM_MC_BEGIN(3, 3, 0, 0);
13273 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13274 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
13275
13276 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
13277 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13278
13279 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13280 IEM_MC_ARG(uint8_t const *, pu8Dst, 0);
13281 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13282
13283 IEM_MC_ARG_CONST(uint8_t, u8Src, u8Imm, 1);
13284 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13285 IEM_MC_FETCH_EFLAGS(EFlags);
13286 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
13287
13288 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
13289 IEM_MC_COMMIT_EFLAGS(EFlags);
13290 IEM_MC_ADVANCE_RIP_AND_FINISH();
13291 IEM_MC_END();
13292 }
13293}
13294
13295
13296/** Opcode 0xf6 /4, /5, /6 and /7. */
13297FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
13298{
13299 if (IEM_IS_MODRM_REG_MODE(bRm))
13300 {
13301 /* register access */
13302 IEM_MC_BEGIN(3, 1, 0, 0);
13303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13304 IEM_MC_ARG(uint16_t *, pu16AX, 0);
13305 IEM_MC_ARG(uint8_t, u8Value, 1);
13306 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13307 IEM_MC_LOCAL(int32_t, rc);
13308
13309 IEM_MC_FETCH_GREG_U8(u8Value, IEM_GET_MODRM_RM(pVCpu, bRm));
13310 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
13311 IEM_MC_REF_EFLAGS(pEFlags);
13312 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
13313 IEM_MC_IF_LOCAL_IS_Z(rc) {
13314 IEM_MC_ADVANCE_RIP_AND_FINISH();
13315 } IEM_MC_ELSE() {
13316 IEM_MC_RAISE_DIVIDE_ERROR();
13317 } IEM_MC_ENDIF();
13318
13319 IEM_MC_END();
13320 }
13321 else
13322 {
13323 /* memory access. */
13324 IEM_MC_BEGIN(3, 2, 0, 0);
13325 IEM_MC_ARG(uint16_t *, pu16AX, 0);
13326 IEM_MC_ARG(uint8_t, u8Value, 1);
13327 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13328 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13329 IEM_MC_LOCAL(int32_t, rc);
13330
13331 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13333 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13334 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
13335 IEM_MC_REF_EFLAGS(pEFlags);
13336 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
13337 IEM_MC_IF_LOCAL_IS_Z(rc) {
13338 IEM_MC_ADVANCE_RIP_AND_FINISH();
13339 } IEM_MC_ELSE() {
13340 IEM_MC_RAISE_DIVIDE_ERROR();
13341 } IEM_MC_ENDIF();
13342
13343 IEM_MC_END();
13344 }
13345}
13346
13347
13348/** Opcode 0xf7 /4, /5, /6 and /7. */
13349FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
13350{
13351 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13352
13353 if (IEM_IS_MODRM_REG_MODE(bRm))
13354 {
13355 /* register access */
13356 switch (pVCpu->iem.s.enmEffOpSize)
13357 {
13358 case IEMMODE_16BIT:
13359 IEM_MC_BEGIN(4, 1, 0, 0);
13360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13361 IEM_MC_ARG(uint16_t *, pu16AX, 0);
13362 IEM_MC_ARG(uint16_t *, pu16DX, 1);
13363 IEM_MC_ARG(uint16_t, u16Value, 2);
13364 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13365 IEM_MC_LOCAL(int32_t, rc);
13366
13367 IEM_MC_FETCH_GREG_U16(u16Value, IEM_GET_MODRM_RM(pVCpu, bRm));
13368 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
13369 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
13370 IEM_MC_REF_EFLAGS(pEFlags);
13371 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
13372 IEM_MC_IF_LOCAL_IS_Z(rc) {
13373 IEM_MC_ADVANCE_RIP_AND_FINISH();
13374 } IEM_MC_ELSE() {
13375 IEM_MC_RAISE_DIVIDE_ERROR();
13376 } IEM_MC_ENDIF();
13377
13378 IEM_MC_END();
13379 break;
13380
13381 case IEMMODE_32BIT:
13382 IEM_MC_BEGIN(4, 1, IEM_MC_F_MIN_386, 0);
13383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13384 IEM_MC_ARG(uint32_t *, pu32AX, 0);
13385 IEM_MC_ARG(uint32_t *, pu32DX, 1);
13386 IEM_MC_ARG(uint32_t, u32Value, 2);
13387 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13388 IEM_MC_LOCAL(int32_t, rc);
13389
13390 IEM_MC_FETCH_GREG_U32(u32Value, IEM_GET_MODRM_RM(pVCpu, bRm));
13391 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
13392 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
13393 IEM_MC_REF_EFLAGS(pEFlags);
13394 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
13395 IEM_MC_IF_LOCAL_IS_Z(rc) {
13396 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX);
13397 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX);
13398 IEM_MC_ADVANCE_RIP_AND_FINISH();
13399 } IEM_MC_ELSE() {
13400 IEM_MC_RAISE_DIVIDE_ERROR();
13401 } IEM_MC_ENDIF();
13402
13403 IEM_MC_END();
13404 break;
13405
13406 case IEMMODE_64BIT:
13407 IEM_MC_BEGIN(4, 1, IEM_MC_F_64BIT, 0);
13408 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13409 IEM_MC_ARG(uint64_t *, pu64AX, 0);
13410 IEM_MC_ARG(uint64_t *, pu64DX, 1);
13411 IEM_MC_ARG(uint64_t, u64Value, 2);
13412 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13413 IEM_MC_LOCAL(int32_t, rc);
13414
13415 IEM_MC_FETCH_GREG_U64(u64Value, IEM_GET_MODRM_RM(pVCpu, bRm));
13416 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
13417 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
13418 IEM_MC_REF_EFLAGS(pEFlags);
13419 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
13420 IEM_MC_IF_LOCAL_IS_Z(rc) {
13421 IEM_MC_ADVANCE_RIP_AND_FINISH();
13422 } IEM_MC_ELSE() {
13423 IEM_MC_RAISE_DIVIDE_ERROR();
13424 } IEM_MC_ENDIF();
13425
13426 IEM_MC_END();
13427 break;
13428
13429 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13430 }
13431 }
13432 else
13433 {
13434 /* memory access. */
13435 switch (pVCpu->iem.s.enmEffOpSize)
13436 {
13437 case IEMMODE_16BIT:
13438 IEM_MC_BEGIN(4, 2, 0, 0);
13439 IEM_MC_ARG(uint16_t *, pu16AX, 0);
13440 IEM_MC_ARG(uint16_t *, pu16DX, 1);
13441 IEM_MC_ARG(uint16_t, u16Value, 2);
13442 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13443 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13444 IEM_MC_LOCAL(int32_t, rc);
13445
13446 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13448 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13449 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
13450 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
13451 IEM_MC_REF_EFLAGS(pEFlags);
13452 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
13453 IEM_MC_IF_LOCAL_IS_Z(rc) {
13454 IEM_MC_ADVANCE_RIP_AND_FINISH();
13455 } IEM_MC_ELSE() {
13456 IEM_MC_RAISE_DIVIDE_ERROR();
13457 } IEM_MC_ENDIF();
13458
13459 IEM_MC_END();
13460 break;
13461
13462 case IEMMODE_32BIT:
13463 IEM_MC_BEGIN(4, 2, IEM_MC_F_MIN_386, 0);
13464 IEM_MC_ARG(uint32_t *, pu32AX, 0);
13465 IEM_MC_ARG(uint32_t *, pu32DX, 1);
13466 IEM_MC_ARG(uint32_t, u32Value, 2);
13467 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13468 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13469 IEM_MC_LOCAL(int32_t, rc);
13470
13471 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13473 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13474 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
13475 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
13476 IEM_MC_REF_EFLAGS(pEFlags);
13477 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
13478 IEM_MC_IF_LOCAL_IS_Z(rc) {
13479 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xAX);
13480 IEM_MC_CLEAR_HIGH_GREG_U64(X86_GREG_xDX);
13481 IEM_MC_ADVANCE_RIP_AND_FINISH();
13482 } IEM_MC_ELSE() {
13483 IEM_MC_RAISE_DIVIDE_ERROR();
13484 } IEM_MC_ENDIF();
13485
13486 IEM_MC_END();
13487 break;
13488
13489 case IEMMODE_64BIT:
13490 IEM_MC_BEGIN(4, 2, IEM_MC_F_64BIT, 0);
13491 IEM_MC_ARG(uint64_t *, pu64AX, 0);
13492 IEM_MC_ARG(uint64_t *, pu64DX, 1);
13493 IEM_MC_ARG(uint64_t, u64Value, 2);
13494 IEM_MC_ARG(uint32_t *, pEFlags, 3);
13495 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13496 IEM_MC_LOCAL(int32_t, rc);
13497
13498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
13499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13500 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13501 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
13502 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
13503 IEM_MC_REF_EFLAGS(pEFlags);
13504 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
13505 IEM_MC_IF_LOCAL_IS_Z(rc) {
13506 IEM_MC_ADVANCE_RIP_AND_FINISH();
13507 } IEM_MC_ELSE() {
13508 IEM_MC_RAISE_DIVIDE_ERROR();
13509 } IEM_MC_ENDIF();
13510
13511 IEM_MC_END();
13512 break;
13513
13514 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13515 }
13516 }
13517}
13518
13519
13520/**
13521 * @opmaps grp3_f6
13522 * @opcode /2
13523 */
13524FNIEMOP_DEF_1(iemOp_grp3_not_Eb, uint8_t, bRm)
13525{
13526 IEMOP_MNEMONIC(not_Eb, "not Eb");
13527 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_not_u8, iemAImpl_not_u8_locked);
13528}
13529
13530
13531/**
13532 * @opmaps grp3_f6
13533 * @opcode /3
13534 */
13535FNIEMOP_DEF_1(iemOp_grp3_neg_Eb, uint8_t, bRm)
13536{
13537 IEMOP_MNEMONIC(net_Eb, "neg Eb");
13538 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_neg_u8, iemAImpl_neg_u8_locked);
13539}
13540
13541
13542/**
13543 * @opcode 0xf6
13544 */
13545FNIEMOP_DEF(iemOp_Grp3_Eb)
13546{
13547 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13548 switch (IEM_GET_MODRM_REG_8(bRm))
13549 {
13550 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
13551 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
13552 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Eb, bRm);
13553 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Eb, bRm);
13554 case 4:
13555 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
13556 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13557 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
13558 case 5:
13559 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
13560 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13561 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
13562 case 6:
13563 IEMOP_MNEMONIC(div_Eb, "div Eb");
13564 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13565 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
13566 case 7:
13567 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
13568 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13569 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
13570 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13571 }
13572}
13573
13574
13575/** Opcode 0xf7 /0. */
13576FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
13577{
13578 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
13579 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
13580
13581 if (IEM_IS_MODRM_REG_MODE(bRm))
13582 {
13583 /* register access */
13584 switch (pVCpu->iem.s.enmEffOpSize)
13585 {
13586 case IEMMODE_16BIT:
13587 IEM_MC_BEGIN(3, 0, 0, 0);
13588 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13589 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13590 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
13591 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
13592 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13593 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13594 IEM_MC_REF_EFLAGS(pEFlags);
13595 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
13596 IEM_MC_ADVANCE_RIP_AND_FINISH();
13597 IEM_MC_END();
13598 break;
13599
13600 case IEMMODE_32BIT:
13601 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0);
13602 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13604 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
13605 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
13606 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13607 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13608 IEM_MC_REF_EFLAGS(pEFlags);
13609 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
13610 /* No clearing the high dword here - test doesn't write back the result. */
13611 IEM_MC_ADVANCE_RIP_AND_FINISH();
13612 IEM_MC_END();
13613 break;
13614
13615 case IEMMODE_64BIT:
13616 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0);
13617 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13619 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
13620 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
13621 IEM_MC_ARG(uint32_t *, pEFlags, 2);
13622 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));
13623 IEM_MC_REF_EFLAGS(pEFlags);
13624 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
13625 IEM_MC_ADVANCE_RIP_AND_FINISH();
13626 IEM_MC_END();
13627 break;
13628
13629 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13630 }
13631 }
13632 else
13633 {
13634 /* memory access. */
13635 switch (pVCpu->iem.s.enmEffOpSize)
13636 {
13637 case IEMMODE_16BIT:
13638 IEM_MC_BEGIN(3, 3, 0, 0);
13639 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13640 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
13641
13642 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
13643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13644
13645 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13646 IEM_MC_ARG(uint16_t const *, pu16Dst, 0);
13647 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13648
13649 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1);
13650 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13651 IEM_MC_FETCH_EFLAGS(EFlags);
13652 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
13653
13654 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
13655 IEM_MC_COMMIT_EFLAGS(EFlags);
13656 IEM_MC_ADVANCE_RIP_AND_FINISH();
13657 IEM_MC_END();
13658 break;
13659
13660 case IEMMODE_32BIT:
13661 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0);
13662 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13663 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13664
13665 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
13666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13667
13668 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13669 IEM_MC_ARG(uint32_t const *, pu32Dst, 0);
13670 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13671
13672 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1);
13673 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13674 IEM_MC_FETCH_EFLAGS(EFlags);
13675 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
13676
13677 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
13678 IEM_MC_COMMIT_EFLAGS(EFlags);
13679 IEM_MC_ADVANCE_RIP_AND_FINISH();
13680 IEM_MC_END();
13681 break;
13682
13683 case IEMMODE_64BIT:
13684 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0);
13685 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
13686 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
13687
13688 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
13689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13690
13691 IEM_MC_ARG(uint64_t const *, pu64Dst, 0);
13692 IEM_MC_LOCAL(uint8_t, bUnmapInfo);
13693 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
13694
13695 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1);
13696 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
13697 IEM_MC_FETCH_EFLAGS(EFlags);
13698 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
13699
13700 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo);
13701 IEM_MC_COMMIT_EFLAGS(EFlags);
13702 IEM_MC_ADVANCE_RIP_AND_FINISH();
13703 IEM_MC_END();
13704 break;
13705
13706 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13707 }
13708 }
13709}
13710
13711
13712/** Opcode 0xf7 /2. */
13713FNIEMOP_DEF_1(iemOp_grp3_not_Ev, uint8_t, bRm)
13714{
13715 IEMOP_MNEMONIC(not_Ev, "not Ev");
13716 IEMOP_BODY_UNARY_Ev( iemAImpl_not_u16, iemAImpl_not_u32, iemAImpl_not_u64);
13717 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_not_u16_locked, iemAImpl_not_u32_locked, iemAImpl_not_u64_locked);
13718}
13719
13720
13721/** Opcode 0xf7 /3. */
13722FNIEMOP_DEF_1(iemOp_grp3_neg_Ev, uint8_t, bRm)
13723{
13724 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
13725 IEMOP_BODY_UNARY_Ev( iemAImpl_neg_u16, iemAImpl_neg_u32, iemAImpl_neg_u64);
13726 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_neg_u16_locked, iemAImpl_neg_u32_locked, iemAImpl_neg_u64_locked);
13727}
13728
13729
13730/**
13731 * @opcode 0xf7
13732 */
13733FNIEMOP_DEF(iemOp_Grp3_Ev)
13734{
13735 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13736 switch (IEM_GET_MODRM_REG_8(bRm))
13737 {
13738 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
13739 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
13740 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Ev, bRm);
13741 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Ev, bRm);
13742 case 4:
13743 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
13744 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13745 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
13746 case 5:
13747 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
13748 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
13749 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
13750 case 6:
13751 IEMOP_MNEMONIC(div_Ev, "div Ev");
13752 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13753 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
13754 case 7:
13755 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
13756 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
13757 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
13758 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13759 }
13760}
13761
13762
13763/**
13764 * @opcode 0xf8
13765 */
13766FNIEMOP_DEF(iemOp_clc)
13767{
13768 IEMOP_MNEMONIC(clc, "clc");
13769 IEM_MC_BEGIN(0, 0, 0, 0);
13770 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13771 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
13772 IEM_MC_ADVANCE_RIP_AND_FINISH();
13773 IEM_MC_END();
13774}
13775
13776
13777/**
13778 * @opcode 0xf9
13779 */
13780FNIEMOP_DEF(iemOp_stc)
13781{
13782 IEMOP_MNEMONIC(stc, "stc");
13783 IEM_MC_BEGIN(0, 0, 0, 0);
13784 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13785 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
13786 IEM_MC_ADVANCE_RIP_AND_FINISH();
13787 IEM_MC_END();
13788}
13789
13790
13791/**
13792 * @opcode 0xfa
13793 */
13794FNIEMOP_DEF(iemOp_cli)
13795{
13796 IEMOP_MNEMONIC(cli, "cli");
13797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13798 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_CHECK_IRQ_BEFORE, 0, iemCImpl_cli);
13799}
13800
13801
13802FNIEMOP_DEF(iemOp_sti)
13803{
13804 IEMOP_MNEMONIC(sti, "sti");
13805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13806 IEM_MC_DEFER_TO_CIMPL_0_RET( IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_CHECK_IRQ_AFTER
13807 | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_INHIBIT_SHADOW, 0, iemCImpl_sti);
13808}
13809
13810
13811/**
13812 * @opcode 0xfc
13813 */
13814FNIEMOP_DEF(iemOp_cld)
13815{
13816 IEMOP_MNEMONIC(cld, "cld");
13817 IEM_MC_BEGIN(0, 0, 0, 0);
13818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13819 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
13820 IEM_MC_ADVANCE_RIP_AND_FINISH();
13821 IEM_MC_END();
13822}
13823
13824
13825/**
13826 * @opcode 0xfd
13827 */
13828FNIEMOP_DEF(iemOp_std)
13829{
13830 IEMOP_MNEMONIC(std, "std");
13831 IEM_MC_BEGIN(0, 0, 0, 0);
13832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13833 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
13834 IEM_MC_ADVANCE_RIP_AND_FINISH();
13835 IEM_MC_END();
13836}
13837
13838
13839/**
13840 * @opmaps grp4
13841 * @opcode /0
13842 */
13843FNIEMOP_DEF_1(iemOp_Grp4_inc_Eb, uint8_t, bRm)
13844{
13845 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
13846 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_inc_u8, iemAImpl_inc_u8_locked);
13847}
13848
13849
13850/**
13851 * @opmaps grp4
13852 * @opcode /1
13853 */
13854FNIEMOP_DEF_1(iemOp_Grp4_dec_Eb, uint8_t, bRm)
13855{
13856 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
13857 IEMOP_BODY_UNARY_Eb(bRm, iemAImpl_dec_u8, iemAImpl_dec_u8_locked);
13858}
13859
13860
13861/**
13862 * @opcode 0xfe
13863 */
13864FNIEMOP_DEF(iemOp_Grp4)
13865{
13866 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
13867 switch (IEM_GET_MODRM_REG_8(bRm))
13868 {
13869 case 0: return FNIEMOP_CALL_1(iemOp_Grp4_inc_Eb, bRm);
13870 case 1: return FNIEMOP_CALL_1(iemOp_Grp4_dec_Eb, bRm);
13871 default:
13872 /** @todo is the eff-addr decoded? */
13873 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
13874 IEMOP_RAISE_INVALID_OPCODE_RET();
13875 }
13876}
13877
13878/** Opcode 0xff /0. */
13879FNIEMOP_DEF_1(iemOp_Grp5_inc_Ev, uint8_t, bRm)
13880{
13881 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
13882 IEMOP_BODY_UNARY_Ev( iemAImpl_inc_u16, iemAImpl_inc_u32, iemAImpl_inc_u64);
13883 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_inc_u16_locked, iemAImpl_inc_u32_locked, iemAImpl_inc_u64_locked);
13884}
13885
13886
13887/** Opcode 0xff /1. */
13888FNIEMOP_DEF_1(iemOp_Grp5_dec_Ev, uint8_t, bRm)
13889{
13890 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
13891 IEMOP_BODY_UNARY_Ev( iemAImpl_dec_u16, iemAImpl_dec_u32, iemAImpl_dec_u64);
13892 IEMOP_BODY_UNARY_Ev_LOCKED(iemAImpl_dec_u16_locked, iemAImpl_dec_u32_locked, iemAImpl_dec_u64_locked);
13893}
13894
13895
13896/**
13897 * Opcode 0xff /2.
13898 * @param bRm The RM byte.
13899 */
13900FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
13901{
13902 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
13903 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
13904
13905 if (IEM_IS_MODRM_REG_MODE(bRm))
13906 {
13907 /* The new RIP is taken from a register. */
13908 switch (pVCpu->iem.s.enmEffOpSize)
13909 {
13910 case IEMMODE_16BIT:
13911 IEM_MC_BEGIN(1, 0, 0, 0);
13912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13913 IEM_MC_ARG(uint16_t, u16Target, 0);
13914 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13915 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_16, u16Target);
13916 IEM_MC_END();
13917 break;
13918
13919 case IEMMODE_32BIT:
13920 IEM_MC_BEGIN(1, 0, IEM_MC_F_MIN_386, 0);
13921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13922 IEM_MC_ARG(uint32_t, u32Target, 0);
13923 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13924 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_32, u32Target);
13925 IEM_MC_END();
13926 break;
13927
13928 case IEMMODE_64BIT:
13929 IEM_MC_BEGIN(1, 0, IEM_MC_F_64BIT, 0);
13930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13931 IEM_MC_ARG(uint64_t, u64Target, 0);
13932 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
13933 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_64, u64Target);
13934 IEM_MC_END();
13935 break;
13936
13937 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13938 }
13939 }
13940 else
13941 {
13942 /* The new RIP is taken from a register. */
13943 switch (pVCpu->iem.s.enmEffOpSize)
13944 {
13945 case IEMMODE_16BIT:
13946 IEM_MC_BEGIN(1, 1, 0, 0);
13947 IEM_MC_ARG(uint16_t, u16Target, 0);
13948 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13949 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13951 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13952 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_16, u16Target);
13953 IEM_MC_END();
13954 break;
13955
13956 case IEMMODE_32BIT:
13957 IEM_MC_BEGIN(1, 1, IEM_MC_F_MIN_386, 0);
13958 IEM_MC_ARG(uint32_t, u32Target, 0);
13959 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13960 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13962 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13963 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_32, u32Target);
13964 IEM_MC_END();
13965 break;
13966
13967 case IEMMODE_64BIT:
13968 IEM_MC_BEGIN(1, 1, IEM_MC_F_64BIT, 0);
13969 IEM_MC_ARG(uint64_t, u64Target, 0);
13970 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
13971 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
13972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
13973 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
13974 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_STACK, 0, iemCImpl_call_64, u64Target);
13975 IEM_MC_END();
13976 break;
13977
13978 IEM_NOT_REACHED_DEFAULT_CASE_RET();
13979 }
13980 }
13981}
13982
13983#define IEMOP_BODY_GRP5_FAR_EP(a_bRm, a_fnCImpl, a_fCImplExtra) \
13984 /* Registers? How?? */ \
13985 if (RT_LIKELY(IEM_IS_MODRM_MEM_MODE(a_bRm))) \
13986 { /* likely */ } \
13987 else \
13988 IEMOP_RAISE_INVALID_OPCODE_RET(); /* callf eax is not legal */ \
13989 \
13990 /* 64-bit mode: Default is 32-bit, but only intel respects a REX.W prefix. */ \
13991 /** @todo what does VIA do? */ \
13992 if (!IEM_IS_64BIT_CODE(pVCpu) || pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT || IEM_IS_GUEST_CPU_INTEL(pVCpu)) \
13993 { /* likely */ } \
13994 else \
13995 pVCpu->iem.s.enmEffOpSize = IEMMODE_32BIT; \
13996 \
13997 /* Far pointer loaded from memory. */ \
13998 switch (pVCpu->iem.s.enmEffOpSize) \
13999 { \
14000 case IEMMODE_16BIT: \
14001 IEM_MC_BEGIN(3, 1, 0, 0); \
14002 IEM_MC_ARG(uint16_t, u16Sel, 0); \
14003 IEM_MC_ARG(uint16_t, offSeg, 1); \
14004 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2); \
14005 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
14006 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
14007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14008 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
14009 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2); \
14010 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
14011 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 0, \
14012 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
14013 IEM_MC_END(); \
14014 break; \
14015 \
14016 case IEMMODE_32BIT: \
14017 IEM_MC_BEGIN(3, 1, IEM_MC_F_MIN_386, 0); \
14018 IEM_MC_ARG(uint16_t, u16Sel, 0); \
14019 IEM_MC_ARG(uint32_t, offSeg, 1); \
14020 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2); \
14021 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
14022 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
14023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14024 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
14025 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4); \
14026 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
14027 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 0, \
14028 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
14029 IEM_MC_END(); \
14030 break; \
14031 \
14032 case IEMMODE_64BIT: \
14033 Assert(!IEM_IS_GUEST_CPU_AMD(pVCpu)); \
14034 IEM_MC_BEGIN(3, 1, IEM_MC_F_64BIT, 0); \
14035 IEM_MC_ARG(uint16_t, u16Sel, 0); \
14036 IEM_MC_ARG(uint64_t, offSeg, 1); \
14037 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_64BIT, 2); \
14038 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \
14039 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, a_bRm, 0); \
14040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
14041 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \
14042 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8); \
14043 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | (a_fCImplExtra) \
14044 | IEM_CIMPL_F_MODE /* no gates */, 0, \
14045 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \
14046 IEM_MC_END(); \
14047 break; \
14048 \
14049 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
14050 } do {} while (0)
14051
14052
14053/**
14054 * Opcode 0xff /3.
14055 * @param bRm The RM byte.
14056 */
14057FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
14058{
14059 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
14060 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_callf, IEM_CIMPL_F_BRANCH_STACK);
14061}
14062
14063
14064/**
14065 * Opcode 0xff /4.
14066 * @param bRm The RM byte.
14067 */
14068FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
14069{
14070 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
14071 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX();
14072
14073 if (IEM_IS_MODRM_REG_MODE(bRm))
14074 {
14075 /* The new RIP is taken from a register. */
14076 switch (pVCpu->iem.s.enmEffOpSize)
14077 {
14078 case IEMMODE_16BIT:
14079 IEM_MC_BEGIN(0, 1, 0, 0);
14080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14081 IEM_MC_LOCAL(uint16_t, u16Target);
14082 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14083 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
14084 IEM_MC_END();
14085 break;
14086
14087 case IEMMODE_32BIT:
14088 IEM_MC_BEGIN(0, 1, IEM_MC_F_MIN_386, 0);
14089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14090 IEM_MC_LOCAL(uint32_t, u32Target);
14091 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14092 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
14093 IEM_MC_END();
14094 break;
14095
14096 case IEMMODE_64BIT:
14097 IEM_MC_BEGIN(0, 1, IEM_MC_F_64BIT, 0);
14098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14099 IEM_MC_LOCAL(uint64_t, u64Target);
14100 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm));
14101 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
14102 IEM_MC_END();
14103 break;
14104
14105 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14106 }
14107 }
14108 else
14109 {
14110 /* The new RIP is taken from a memory location. */
14111 switch (pVCpu->iem.s.enmEffOpSize)
14112 {
14113 case IEMMODE_16BIT:
14114 IEM_MC_BEGIN(0, 2, 0, 0);
14115 IEM_MC_LOCAL(uint16_t, u16Target);
14116 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14117 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14119 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14120 IEM_MC_SET_RIP_U16_AND_FINISH(u16Target);
14121 IEM_MC_END();
14122 break;
14123
14124 case IEMMODE_32BIT:
14125 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386, 0);
14126 IEM_MC_LOCAL(uint32_t, u32Target);
14127 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14128 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14130 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14131 IEM_MC_SET_RIP_U32_AND_FINISH(u32Target);
14132 IEM_MC_END();
14133 break;
14134
14135 case IEMMODE_64BIT:
14136 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
14137 IEM_MC_LOCAL(uint64_t, u64Target);
14138 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14139 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14141 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14142 IEM_MC_SET_RIP_U64_AND_FINISH(u64Target);
14143 IEM_MC_END();
14144 break;
14145
14146 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14147 }
14148 }
14149}
14150
14151
14152/**
14153 * Opcode 0xff /5.
14154 * @param bRm The RM byte.
14155 */
14156FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
14157{
14158 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
14159 IEMOP_BODY_GRP5_FAR_EP(bRm, iemCImpl_FarJmp, 0);
14160}
14161
14162
14163/**
14164 * Opcode 0xff /6.
14165 * @param bRm The RM byte.
14166 */
14167FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
14168{
14169 IEMOP_MNEMONIC(push_Ev, "push Ev");
14170
14171 /* Registers are handled by a common worker. */
14172 if (IEM_IS_MODRM_REG_MODE(bRm))
14173 return FNIEMOP_CALL_1(iemOpCommonPushGReg, IEM_GET_MODRM_RM(pVCpu, bRm));
14174
14175 /* Memory we do here. */
14176 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
14177 switch (pVCpu->iem.s.enmEffOpSize)
14178 {
14179 case IEMMODE_16BIT:
14180 IEM_MC_BEGIN(0, 2, 0, 0);
14181 IEM_MC_LOCAL(uint16_t, u16Src);
14182 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14183 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14185 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14186 IEM_MC_PUSH_U16(u16Src);
14187 IEM_MC_ADVANCE_RIP_AND_FINISH();
14188 IEM_MC_END();
14189 break;
14190
14191 case IEMMODE_32BIT:
14192 IEM_MC_BEGIN(0, 2, IEM_MC_F_MIN_386 | IEM_MC_F_NOT_64BIT, 0);
14193 IEM_MC_LOCAL(uint32_t, u32Src);
14194 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14195 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14196 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14197 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14198 IEM_MC_PUSH_U32(u32Src);
14199 IEM_MC_ADVANCE_RIP_AND_FINISH();
14200 IEM_MC_END();
14201 break;
14202
14203 case IEMMODE_64BIT:
14204 IEM_MC_BEGIN(0, 2, IEM_MC_F_64BIT, 0);
14205 IEM_MC_LOCAL(uint64_t, u64Src);
14206 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
14207 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
14208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
14209 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
14210 IEM_MC_PUSH_U64(u64Src);
14211 IEM_MC_ADVANCE_RIP_AND_FINISH();
14212 IEM_MC_END();
14213 break;
14214
14215 IEM_NOT_REACHED_DEFAULT_CASE_RET();
14216 }
14217}
14218
14219
14220/**
14221 * @opcode 0xff
14222 */
14223FNIEMOP_DEF(iemOp_Grp5)
14224{
14225 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
14226 switch (IEM_GET_MODRM_REG_8(bRm))
14227 {
14228 case 0:
14229 return FNIEMOP_CALL_1(iemOp_Grp5_inc_Ev, bRm);
14230 case 1:
14231 return FNIEMOP_CALL_1(iemOp_Grp5_dec_Ev, bRm);
14232 case 2:
14233 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
14234 case 3:
14235 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
14236 case 4:
14237 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
14238 case 5:
14239 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
14240 case 6:
14241 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
14242 case 7:
14243 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
14244 IEMOP_RAISE_INVALID_OPCODE_RET();
14245 }
14246 AssertFailedReturn(VERR_IEM_IPE_3);
14247}
14248
14249
14250
14251const PFNIEMOP g_apfnOneByteMap[256] =
14252{
14253 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
14254 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
14255 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
14256 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
14257 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
14258 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
14259 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
14260 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
14261 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
14262 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
14263 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
14264 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
14265 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
14266 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
14267 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
14268 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
14269 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
14270 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
14271 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
14272 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
14273 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
14274 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
14275 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
14276 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
14277 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
14278 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
14279 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
14280 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
14281 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
14282 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
14283 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
14284 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
14285 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
14286 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
14287 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
14288 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
14289 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
14290 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
14291 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
14292 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
14293 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
14294 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
14295 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
14296 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
14297 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
14298 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
14299 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
14300 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
14301 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
14302 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
14303 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
14304 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
14305 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
14306 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
14307 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
14308 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
14309 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
14310 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
14311 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
14312 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
14313 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
14314 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
14315 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
14316 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
14317};
14318
14319
14320/** @} */
14321
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette