VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/target-x86/IEMInternal-x86.h@ 108226

Last change on this file since 108226 was 108220, checked in by vboxsync, 3 months ago

VMM/IEM: Splitting up IEMAll.cpp. jiraref:VBP-1531

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 212.3 KB
Line 
1/* $Id: IEMInternal-x86.h 108220 2025-02-14 11:40:20Z vboxsync $ */
2/** @file
3 * IEM - Internal header file, x86 target specifics.
4 */
5
6/*
7 * Copyright (C) 2011-2024 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28#ifndef VMM_INCLUDED_SRC_VMMAll_target_x86_IEMInternal_x86_h
29#define VMM_INCLUDED_SRC_VMMAll_target_x86_IEMInternal_x86_h
30#ifndef RT_WITHOUT_PRAGMA_ONCE
31# pragma once
32#endif
33
34
35RT_C_DECLS_BEGIN
36
37
38/** @defgroup grp_iem_int_x86 X86 Target Internals
39 * @ingroup grp_iem_int
40 * @internal
41 * @{
42 */
43
44
45/** @name Prefix constants (IEMCPU::fPrefixes)
46 * @note x86 specific
47 * @{ */
48#define IEM_OP_PRF_SEG_CS RT_BIT_32(0) /**< CS segment prefix (0x2e). */
49#define IEM_OP_PRF_SEG_SS RT_BIT_32(1) /**< SS segment prefix (0x36). */
50#define IEM_OP_PRF_SEG_DS RT_BIT_32(2) /**< DS segment prefix (0x3e). */
51#define IEM_OP_PRF_SEG_ES RT_BIT_32(3) /**< ES segment prefix (0x26). */
52#define IEM_OP_PRF_SEG_FS RT_BIT_32(4) /**< FS segment prefix (0x64). */
53#define IEM_OP_PRF_SEG_GS RT_BIT_32(5) /**< GS segment prefix (0x65). */
54#define IEM_OP_PRF_SEG_MASK UINT32_C(0x3f)
55
56#define IEM_OP_PRF_SIZE_OP RT_BIT_32(8) /**< Operand size prefix (0x66). */
57#define IEM_OP_PRF_SIZE_REX_W RT_BIT_32(9) /**< REX.W prefix (0x48-0x4f). */
58#define IEM_OP_PRF_SIZE_ADDR RT_BIT_32(10) /**< Address size prefix (0x67). */
59
60#define IEM_OP_PRF_LOCK RT_BIT_32(16) /**< Lock prefix (0xf0). */
61#define IEM_OP_PRF_REPNZ RT_BIT_32(17) /**< Repeat-not-zero prefix (0xf2). */
62#define IEM_OP_PRF_REPZ RT_BIT_32(18) /**< Repeat-if-zero prefix (0xf3). */
63
64#define IEM_OP_PRF_REX RT_BIT_32(24) /**< Any REX prefix (0x40-0x4f). */
65#define IEM_OP_PRF_REX_B RT_BIT_32(25) /**< REX.B prefix (0x41,0x43,0x45,0x47,0x49,0x4b,0x4d,0x4f). */
66#define IEM_OP_PRF_REX_X RT_BIT_32(26) /**< REX.X prefix (0x42,0x43,0x46,0x47,0x4a,0x4b,0x4e,0x4f). */
67#define IEM_OP_PRF_REX_R RT_BIT_32(27) /**< REX.R prefix (0x44,0x45,0x46,0x47,0x4c,0x4d,0x4e,0x4f). */
68/** Mask with all the REX prefix flags.
69 * This is generally for use when needing to undo the REX prefixes when they
70 * are followed legacy prefixes and therefore does not immediately preceed
71 * the first opcode byte.
72 * For testing whether any REX prefix is present, use IEM_OP_PRF_REX instead. */
73#define IEM_OP_PRF_REX_MASK (IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W )
74
75#define IEM_OP_PRF_VEX RT_BIT_32(28) /**< Indiciates VEX prefix. */
76#define IEM_OP_PRF_EVEX RT_BIT_32(29) /**< Indiciates EVEX prefix. */
77#define IEM_OP_PRF_XOP RT_BIT_32(30) /**< Indiciates XOP prefix. */
78/** @} */
79
80/** @name IEMOPFORM_XXX - Opcode forms
81 * @note These are ORed together with IEMOPHINT_XXX.
82 * @note x86 specific
83 * @{ */
84/** ModR/M: reg, r/m */
85#define IEMOPFORM_RM 0
86/** ModR/M: reg, r/m (register) */
87#define IEMOPFORM_RM_REG (IEMOPFORM_RM | IEMOPFORM_MOD3)
88/** ModR/M: reg, r/m (memory) */
89#define IEMOPFORM_RM_MEM (IEMOPFORM_RM | IEMOPFORM_NOT_MOD3)
90/** ModR/M: reg, r/m, imm */
91#define IEMOPFORM_RMI 1
92/** ModR/M: reg, r/m (register), imm */
93#define IEMOPFORM_RMI_REG (IEMOPFORM_RMI | IEMOPFORM_MOD3)
94/** ModR/M: reg, r/m (memory), imm */
95#define IEMOPFORM_RMI_MEM (IEMOPFORM_RMI | IEMOPFORM_NOT_MOD3)
96/** ModR/M: reg, r/m, xmm0 */
97#define IEMOPFORM_RM0 2
98/** ModR/M: reg, r/m (register), xmm0 */
99#define IEMOPFORM_RM0_REG (IEMOPFORM_RM0 | IEMOPFORM_MOD3)
100/** ModR/M: reg, r/m (memory), xmm0 */
101#define IEMOPFORM_RM0_MEM (IEMOPFORM_RM0 | IEMOPFORM_NOT_MOD3)
102/** ModR/M: r/m, reg */
103#define IEMOPFORM_MR 3
104/** ModR/M: r/m (register), reg */
105#define IEMOPFORM_MR_REG (IEMOPFORM_MR | IEMOPFORM_MOD3)
106/** ModR/M: r/m (memory), reg */
107#define IEMOPFORM_MR_MEM (IEMOPFORM_MR | IEMOPFORM_NOT_MOD3)
108/** ModR/M: r/m, reg, imm */
109#define IEMOPFORM_MRI 4
110/** ModR/M: r/m (register), reg, imm */
111#define IEMOPFORM_MRI_REG (IEMOPFORM_MRI | IEMOPFORM_MOD3)
112/** ModR/M: r/m (memory), reg, imm */
113#define IEMOPFORM_MRI_MEM (IEMOPFORM_MRI | IEMOPFORM_NOT_MOD3)
114/** ModR/M: r/m only */
115#define IEMOPFORM_M 5
116/** ModR/M: r/m only (register). */
117#define IEMOPFORM_M_REG (IEMOPFORM_M | IEMOPFORM_MOD3)
118/** ModR/M: r/m only (memory). */
119#define IEMOPFORM_M_MEM (IEMOPFORM_M | IEMOPFORM_NOT_MOD3)
120/** ModR/M: r/m, imm */
121#define IEMOPFORM_MI 6
122/** ModR/M: r/m (register), imm */
123#define IEMOPFORM_MI_REG (IEMOPFORM_MI | IEMOPFORM_MOD3)
124/** ModR/M: r/m (memory), imm */
125#define IEMOPFORM_MI_MEM (IEMOPFORM_MI | IEMOPFORM_NOT_MOD3)
126/** ModR/M: r/m, 1 (shift and rotate instructions) */
127#define IEMOPFORM_M1 7
128/** ModR/M: r/m (register), 1. */
129#define IEMOPFORM_M1_REG (IEMOPFORM_M1 | IEMOPFORM_MOD3)
130/** ModR/M: r/m (memory), 1. */
131#define IEMOPFORM_M1_MEM (IEMOPFORM_M1 | IEMOPFORM_NOT_MOD3)
132/** ModR/M: r/m, CL (shift and rotate instructions)
133 * @todo This should just've been a generic fixed register. But the python
134 * code doesn't needs more convincing. */
135#define IEMOPFORM_M_CL 8
136/** ModR/M: r/m (register), CL. */
137#define IEMOPFORM_M_CL_REG (IEMOPFORM_M_CL | IEMOPFORM_MOD3)
138/** ModR/M: r/m (memory), CL. */
139#define IEMOPFORM_M_CL_MEM (IEMOPFORM_M_CL | IEMOPFORM_NOT_MOD3)
140/** ModR/M: reg only */
141#define IEMOPFORM_R 9
142
143/** VEX+ModR/M: reg, r/m */
144#define IEMOPFORM_VEX_RM 16
145/** VEX+ModR/M: reg, r/m (register) */
146#define IEMOPFORM_VEX_RM_REG (IEMOPFORM_VEX_RM | IEMOPFORM_MOD3)
147/** VEX+ModR/M: reg, r/m (memory) */
148#define IEMOPFORM_VEX_RM_MEM (IEMOPFORM_VEX_RM | IEMOPFORM_NOT_MOD3)
149/** VEX+ModR/M: r/m, reg */
150#define IEMOPFORM_VEX_MR 17
151/** VEX+ModR/M: r/m (register), reg */
152#define IEMOPFORM_VEX_MR_REG (IEMOPFORM_VEX_MR | IEMOPFORM_MOD3)
153/** VEX+ModR/M: r/m (memory), reg */
154#define IEMOPFORM_VEX_MR_MEM (IEMOPFORM_VEX_MR | IEMOPFORM_NOT_MOD3)
155/** VEX+ModR/M: r/m, reg, imm8 */
156#define IEMOPFORM_VEX_MRI 18
157/** VEX+ModR/M: r/m (register), reg, imm8 */
158#define IEMOPFORM_VEX_MRI_REG (IEMOPFORM_VEX_MRI | IEMOPFORM_MOD3)
159/** VEX+ModR/M: r/m (memory), reg, imm8 */
160#define IEMOPFORM_VEX_MRI_MEM (IEMOPFORM_VEX_MRI | IEMOPFORM_NOT_MOD3)
161/** VEX+ModR/M: r/m only */
162#define IEMOPFORM_VEX_M 19
163/** VEX+ModR/M: r/m only (register). */
164#define IEMOPFORM_VEX_M_REG (IEMOPFORM_VEX_M | IEMOPFORM_MOD3)
165/** VEX+ModR/M: r/m only (memory). */
166#define IEMOPFORM_VEX_M_MEM (IEMOPFORM_VEX_M | IEMOPFORM_NOT_MOD3)
167/** VEX+ModR/M: reg only */
168#define IEMOPFORM_VEX_R 20
169/** VEX+ModR/M: reg, vvvv, r/m */
170#define IEMOPFORM_VEX_RVM 21
171/** VEX+ModR/M: reg, vvvv, r/m (register). */
172#define IEMOPFORM_VEX_RVM_REG (IEMOPFORM_VEX_RVM | IEMOPFORM_MOD3)
173/** VEX+ModR/M: reg, vvvv, r/m (memory). */
174#define IEMOPFORM_VEX_RVM_MEM (IEMOPFORM_VEX_RVM | IEMOPFORM_NOT_MOD3)
175/** VEX+ModR/M: reg, vvvv, r/m, imm */
176#define IEMOPFORM_VEX_RVMI 22
177/** VEX+ModR/M: reg, vvvv, r/m (register), imm. */
178#define IEMOPFORM_VEX_RVMI_REG (IEMOPFORM_VEX_RVMI | IEMOPFORM_MOD3)
179/** VEX+ModR/M: reg, vvvv, r/m (memory), imm. */
180#define IEMOPFORM_VEX_RVMI_MEM (IEMOPFORM_VEX_RVMI | IEMOPFORM_NOT_MOD3)
181/** VEX+ModR/M: reg, vvvv, r/m, imm(reg) */
182#define IEMOPFORM_VEX_RVMR 23
183/** VEX+ModR/M: reg, vvvv, r/m (register), imm(reg). */
184#define IEMOPFORM_VEX_RVMR_REG (IEMOPFORM_VEX_RVMI | IEMOPFORM_MOD3)
185/** VEX+ModR/M: reg, vvvv, r/m (memory), imm(reg). */
186#define IEMOPFORM_VEX_RVMR_MEM (IEMOPFORM_VEX_RVMI | IEMOPFORM_NOT_MOD3)
187/** VEX+ModR/M: reg, r/m, vvvv */
188#define IEMOPFORM_VEX_RMV 24
189/** VEX+ModR/M: reg, r/m, vvvv (register). */
190#define IEMOPFORM_VEX_RMV_REG (IEMOPFORM_VEX_RMV | IEMOPFORM_MOD3)
191/** VEX+ModR/M: reg, r/m, vvvv (memory). */
192#define IEMOPFORM_VEX_RMV_MEM (IEMOPFORM_VEX_RMV | IEMOPFORM_NOT_MOD3)
193/** VEX+ModR/M: reg, r/m, imm8 */
194#define IEMOPFORM_VEX_RMI 25
195/** VEX+ModR/M: reg, r/m, imm8 (register). */
196#define IEMOPFORM_VEX_RMI_REG (IEMOPFORM_VEX_RMI | IEMOPFORM_MOD3)
197/** VEX+ModR/M: reg, r/m, imm8 (memory). */
198#define IEMOPFORM_VEX_RMI_MEM (IEMOPFORM_VEX_RMI | IEMOPFORM_NOT_MOD3)
199/** VEX+ModR/M: r/m, vvvv, reg */
200#define IEMOPFORM_VEX_MVR 26
201/** VEX+ModR/M: r/m, vvvv, reg (register) */
202#define IEMOPFORM_VEX_MVR_REG (IEMOPFORM_VEX_MVR | IEMOPFORM_MOD3)
203/** VEX+ModR/M: r/m, vvvv, reg (memory) */
204#define IEMOPFORM_VEX_MVR_MEM (IEMOPFORM_VEX_MVR | IEMOPFORM_NOT_MOD3)
205/** VEX+ModR/M+/n: vvvv, r/m */
206#define IEMOPFORM_VEX_VM 27
207/** VEX+ModR/M+/n: vvvv, r/m (register) */
208#define IEMOPFORM_VEX_VM_REG (IEMOPFORM_VEX_VM | IEMOPFORM_MOD3)
209/** VEX+ModR/M+/n: vvvv, r/m (memory) */
210#define IEMOPFORM_VEX_VM_MEM (IEMOPFORM_VEX_VM | IEMOPFORM_NOT_MOD3)
211/** VEX+ModR/M+/n: vvvv, r/m, imm8 */
212#define IEMOPFORM_VEX_VMI 28
213/** VEX+ModR/M+/n: vvvv, r/m, imm8 (register) */
214#define IEMOPFORM_VEX_VMI_REG (IEMOPFORM_VEX_VMI | IEMOPFORM_MOD3)
215/** VEX+ModR/M+/n: vvvv, r/m, imm8 (memory) */
216#define IEMOPFORM_VEX_VMI_MEM (IEMOPFORM_VEX_VMI | IEMOPFORM_NOT_MOD3)
217
218/** Fixed register instruction, no R/M. */
219#define IEMOPFORM_FIXED 32
220
221/** The r/m is a register. */
222#define IEMOPFORM_MOD3 RT_BIT_32(8)
223/** The r/m is a memory access. */
224#define IEMOPFORM_NOT_MOD3 RT_BIT_32(9)
225/** @} */
226
227/** @name IEMOPHINT_XXX - Additional Opcode Hints
228 * @note These are ORed together with IEMOPFORM_XXX.
229 * @note x86 specific
230 * @{ */
231/** Ignores the operand size prefix (66h). */
232#define IEMOPHINT_IGNORES_OZ_PFX RT_BIT_32(10)
233/** Ignores REX.W (aka WIG). */
234#define IEMOPHINT_IGNORES_REXW RT_BIT_32(11)
235/** Both the operand size prefixes (66h + REX.W) are ignored. */
236#define IEMOPHINT_IGNORES_OP_SIZES (IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW)
237/** Allowed with the lock prefix. */
238#define IEMOPHINT_LOCK_ALLOWED RT_BIT_32(11)
239/** The VEX.L value is ignored (aka LIG). */
240#define IEMOPHINT_VEX_L_IGNORED RT_BIT_32(12)
241/** The VEX.L value must be zero (i.e. 128-bit width only). */
242#define IEMOPHINT_VEX_L_ZERO RT_BIT_32(13)
243/** The VEX.L value must be one (i.e. 256-bit width only). */
244#define IEMOPHINT_VEX_L_ONE RT_BIT_32(14)
245/** The VEX.V value must be zero. */
246#define IEMOPHINT_VEX_V_ZERO RT_BIT_32(15)
247/** The REX.W/VEX.V value must be zero. */
248#define IEMOPHINT_REX_W_ZERO RT_BIT_32(16)
249#define IEMOPHINT_VEX_W_ZERO IEMOPHINT_REX_W_ZERO
250/** The REX.W/VEX.V value must be one. */
251#define IEMOPHINT_REX_W_ONE RT_BIT_32(17)
252#define IEMOPHINT_VEX_W_ONE IEMOPHINT_REX_W_ONE
253
254/** Hint to IEMAllInstructionPython.py that this macro should be skipped. */
255#define IEMOPHINT_SKIP_PYTHON RT_BIT_32(31)
256/** @} */
257
258/**
259 * Possible hardware task switch sources - iemTaskSwitch(), iemVmxVmexitTaskSwitch().
260 * @note x86 specific
261 */
262typedef enum IEMTASKSWITCH
263{
264 /** Task switch caused by an interrupt/exception. */
265 IEMTASKSWITCH_INT_XCPT = 1,
266 /** Task switch caused by a far CALL. */
267 IEMTASKSWITCH_CALL,
268 /** Task switch caused by a far JMP. */
269 IEMTASKSWITCH_JUMP,
270 /** Task switch caused by an IRET. */
271 IEMTASKSWITCH_IRET
272} IEMTASKSWITCH;
273AssertCompileSize(IEMTASKSWITCH, 4);
274
275/**
276 * Possible CrX load (write) sources - iemCImpl_load_CrX().
277 * @note x86 specific
278 */
279typedef enum IEMACCESSCRX
280{
281 /** CrX access caused by 'mov crX' instruction. */
282 IEMACCESSCRX_MOV_CRX,
283 /** CrX (CR0) write caused by 'lmsw' instruction. */
284 IEMACCESSCRX_LMSW,
285 /** CrX (CR0) write caused by 'clts' instruction. */
286 IEMACCESSCRX_CLTS,
287 /** CrX (CR0) read caused by 'smsw' instruction. */
288 IEMACCESSCRX_SMSW
289} IEMACCESSCRX;
290
291#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
292/** @name IEM_SLAT_FAIL_XXX - Second-level address translation failure information.
293 *
294 * These flags provide further context to SLAT page-walk failures that could not be
295 * determined by PGM (e.g, PGM is not privy to memory access permissions).
296 *
297 * @{
298 */
299/** Translating a nested-guest linear address failed accessing a nested-guest
300 * physical address. */
301# define IEM_SLAT_FAIL_LINEAR_TO_PHYS_ADDR RT_BIT_32(0)
302/** Translating a nested-guest linear address failed accessing a
303 * paging-structure entry or updating accessed/dirty bits. */
304# define IEM_SLAT_FAIL_LINEAR_TO_PAGE_TABLE RT_BIT_32(1)
305/** @} */
306
307DECLCALLBACK(FNPGMPHYSHANDLER) iemVmxApicAccessPageHandler;
308# ifndef IN_RING3
309DECLCALLBACK(FNPGMRZPHYSPFHANDLER) iemVmxApicAccessPagePfHandler;
310# endif
311#endif
312
313/**
314 * Indicates to the verifier that the given flag set is undefined.
315 *
316 * Can be invoked again to add more flags.
317 *
318 * This is a NOOP if the verifier isn't compiled in.
319 *
320 * @note We're temporarily keeping this until code is converted to new
321 * disassembler style opcode handling.
322 */
323#define IEMOP_VERIFICATION_UNDEFINED_EFLAGS(a_fEfl) do { } while (0)
324
325
326/** Defined in IEMAllAImplC.cpp but also used by IEMAllAImplA.asm. */
327RT_C_DECLS_BEGIN
328extern uint8_t const g_afParity[256];
329RT_C_DECLS_END
330
331
332/** @name Arithmetic assignment operations on bytes (binary).
333 * @{ */
334typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLBINU8, (uint32_t fEFlagsIn, uint8_t *pu8Dst, uint8_t u8Src));
335typedef FNIEMAIMPLBINU8 *PFNIEMAIMPLBINU8;
336FNIEMAIMPLBINU8 iemAImpl_add_u8, iemAImpl_add_u8_locked;
337FNIEMAIMPLBINU8 iemAImpl_adc_u8, iemAImpl_adc_u8_locked;
338FNIEMAIMPLBINU8 iemAImpl_sub_u8, iemAImpl_sub_u8_locked;
339FNIEMAIMPLBINU8 iemAImpl_sbb_u8, iemAImpl_sbb_u8_locked;
340FNIEMAIMPLBINU8 iemAImpl_or_u8, iemAImpl_or_u8_locked;
341FNIEMAIMPLBINU8 iemAImpl_xor_u8, iemAImpl_xor_u8_locked;
342FNIEMAIMPLBINU8 iemAImpl_and_u8, iemAImpl_and_u8_locked;
343/** @} */
344
345/** @name Arithmetic assignment operations on words (binary).
346 * @{ */
347typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLBINU16, (uint32_t fEFlagsIn, uint16_t *pu16Dst, uint16_t u16Src));
348typedef FNIEMAIMPLBINU16 *PFNIEMAIMPLBINU16;
349FNIEMAIMPLBINU16 iemAImpl_add_u16, iemAImpl_add_u16_locked;
350FNIEMAIMPLBINU16 iemAImpl_adc_u16, iemAImpl_adc_u16_locked;
351FNIEMAIMPLBINU16 iemAImpl_sub_u16, iemAImpl_sub_u16_locked;
352FNIEMAIMPLBINU16 iemAImpl_sbb_u16, iemAImpl_sbb_u16_locked;
353FNIEMAIMPLBINU16 iemAImpl_or_u16, iemAImpl_or_u16_locked;
354FNIEMAIMPLBINU16 iemAImpl_xor_u16, iemAImpl_xor_u16_locked;
355FNIEMAIMPLBINU16 iemAImpl_and_u16, iemAImpl_and_u16_locked;
356/** @} */
357
358
359/** @name Arithmetic assignment operations on double words (binary).
360 * @{ */
361typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLBINU32, (uint32_t fEFlagsIn, uint32_t *pu32Dst, uint32_t u32Src));
362typedef FNIEMAIMPLBINU32 *PFNIEMAIMPLBINU32;
363FNIEMAIMPLBINU32 iemAImpl_add_u32, iemAImpl_add_u32_locked;
364FNIEMAIMPLBINU32 iemAImpl_adc_u32, iemAImpl_adc_u32_locked;
365FNIEMAIMPLBINU32 iemAImpl_sub_u32, iemAImpl_sub_u32_locked;
366FNIEMAIMPLBINU32 iemAImpl_sbb_u32, iemAImpl_sbb_u32_locked;
367FNIEMAIMPLBINU32 iemAImpl_or_u32, iemAImpl_or_u32_locked;
368FNIEMAIMPLBINU32 iemAImpl_xor_u32, iemAImpl_xor_u32_locked;
369FNIEMAIMPLBINU32 iemAImpl_and_u32, iemAImpl_and_u32_locked;
370FNIEMAIMPLBINU32 iemAImpl_blsi_u32, iemAImpl_blsi_u32_fallback;
371FNIEMAIMPLBINU32 iemAImpl_blsr_u32, iemAImpl_blsr_u32_fallback;
372FNIEMAIMPLBINU32 iemAImpl_blsmsk_u32, iemAImpl_blsmsk_u32_fallback;
373/** @} */
374
375/** @name Arithmetic assignment operations on quad words (binary).
376 * @{ */
377typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLBINU64, (uint32_t fEFlagsIn, uint64_t *pu64Dst, uint64_t u64Src));
378typedef FNIEMAIMPLBINU64 *PFNIEMAIMPLBINU64;
379FNIEMAIMPLBINU64 iemAImpl_add_u64, iemAImpl_add_u64_locked;
380FNIEMAIMPLBINU64 iemAImpl_adc_u64, iemAImpl_adc_u64_locked;
381FNIEMAIMPLBINU64 iemAImpl_sub_u64, iemAImpl_sub_u64_locked;
382FNIEMAIMPLBINU64 iemAImpl_sbb_u64, iemAImpl_sbb_u64_locked;
383FNIEMAIMPLBINU64 iemAImpl_or_u64, iemAImpl_or_u64_locked;
384FNIEMAIMPLBINU64 iemAImpl_xor_u64, iemAImpl_xor_u64_locked;
385FNIEMAIMPLBINU64 iemAImpl_and_u64, iemAImpl_and_u64_locked;
386FNIEMAIMPLBINU64 iemAImpl_blsi_u64, iemAImpl_blsi_u64_fallback;
387FNIEMAIMPLBINU64 iemAImpl_blsr_u64, iemAImpl_blsr_u64_fallback;
388FNIEMAIMPLBINU64 iemAImpl_blsmsk_u64, iemAImpl_blsmsk_u64_fallback;
389/** @} */
390
391typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLBINROU8, (uint32_t fEFlagsIn, uint8_t const *pu8Dst, uint8_t u8Src));
392typedef FNIEMAIMPLBINROU8 *PFNIEMAIMPLBINROU8;
393typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLBINROU16,(uint32_t fEFlagsIn, uint16_t const *pu16Dst, uint16_t u16Src));
394typedef FNIEMAIMPLBINROU16 *PFNIEMAIMPLBINROU16;
395typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLBINROU32,(uint32_t fEFlagsIn, uint32_t const *pu32Dst, uint32_t u32Src));
396typedef FNIEMAIMPLBINROU32 *PFNIEMAIMPLBINROU32;
397typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLBINROU64,(uint32_t fEFlagsIn, uint64_t const *pu64Dst, uint64_t u64Src));
398typedef FNIEMAIMPLBINROU64 *PFNIEMAIMPLBINROU64;
399
400/** @name Compare operations (thrown in with the binary ops).
401 * @{ */
402FNIEMAIMPLBINROU8 iemAImpl_cmp_u8;
403FNIEMAIMPLBINROU16 iemAImpl_cmp_u16;
404FNIEMAIMPLBINROU32 iemAImpl_cmp_u32;
405FNIEMAIMPLBINROU64 iemAImpl_cmp_u64;
406/** @} */
407
408/** @name Test operations (thrown in with the binary ops).
409 * @{ */
410FNIEMAIMPLBINROU8 iemAImpl_test_u8;
411FNIEMAIMPLBINROU16 iemAImpl_test_u16;
412FNIEMAIMPLBINROU32 iemAImpl_test_u32;
413FNIEMAIMPLBINROU64 iemAImpl_test_u64;
414/** @} */
415
416/** @name Bit operations operations (thrown in with the binary ops).
417 * @{ */
418FNIEMAIMPLBINROU16 iemAImpl_bt_u16;
419FNIEMAIMPLBINROU32 iemAImpl_bt_u32;
420FNIEMAIMPLBINROU64 iemAImpl_bt_u64;
421FNIEMAIMPLBINU16 iemAImpl_btc_u16, iemAImpl_btc_u16_locked;
422FNIEMAIMPLBINU32 iemAImpl_btc_u32, iemAImpl_btc_u32_locked;
423FNIEMAIMPLBINU64 iemAImpl_btc_u64, iemAImpl_btc_u64_locked;
424FNIEMAIMPLBINU16 iemAImpl_btr_u16, iemAImpl_btr_u16_locked;
425FNIEMAIMPLBINU32 iemAImpl_btr_u32, iemAImpl_btr_u32_locked;
426FNIEMAIMPLBINU64 iemAImpl_btr_u64, iemAImpl_btr_u64_locked;
427FNIEMAIMPLBINU16 iemAImpl_bts_u16, iemAImpl_bts_u16_locked;
428FNIEMAIMPLBINU32 iemAImpl_bts_u32, iemAImpl_bts_u32_locked;
429FNIEMAIMPLBINU64 iemAImpl_bts_u64, iemAImpl_bts_u64_locked;
430/** @} */
431
432/** @name Arithmetic three operand operations on double words (binary).
433 * @{ */
434typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLBINVEXU32, (uint32_t *pu32Dst, uint32_t u32Src1, uint32_t u32Src2, uint32_t *pEFlags));
435typedef FNIEMAIMPLBINVEXU32 *PFNIEMAIMPLBINVEXU32;
436FNIEMAIMPLBINVEXU32 iemAImpl_andn_u32, iemAImpl_andn_u32_fallback;
437FNIEMAIMPLBINVEXU32 iemAImpl_bextr_u32, iemAImpl_bextr_u32_fallback;
438FNIEMAIMPLBINVEXU32 iemAImpl_bzhi_u32, iemAImpl_bzhi_u32_fallback;
439/** @} */
440
441/** @name Arithmetic three operand operations on quad words (binary).
442 * @{ */
443typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLBINVEXU64, (uint64_t *pu64Dst, uint64_t u64Src1, uint64_t u64Src2, uint32_t *pEFlags));
444typedef FNIEMAIMPLBINVEXU64 *PFNIEMAIMPLBINVEXU64;
445FNIEMAIMPLBINVEXU64 iemAImpl_andn_u64, iemAImpl_andn_u64_fallback;
446FNIEMAIMPLBINVEXU64 iemAImpl_bextr_u64, iemAImpl_bextr_u64_fallback;
447FNIEMAIMPLBINVEXU64 iemAImpl_bzhi_u64, iemAImpl_bzhi_u64_fallback;
448/** @} */
449
450/** @name Arithmetic three operand operations on double words w/o EFLAGS (binary).
451 * @{ */
452typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLBINVEXU32NOEFL, (uint32_t *pu32Dst, uint32_t u32Src1, uint32_t u32Src2));
453typedef FNIEMAIMPLBINVEXU32NOEFL *PFNIEMAIMPLBINVEXU32NOEFL;
454FNIEMAIMPLBINVEXU32NOEFL iemAImpl_pdep_u32, iemAImpl_pdep_u32_fallback;
455FNIEMAIMPLBINVEXU32NOEFL iemAImpl_pext_u32, iemAImpl_pext_u32_fallback;
456FNIEMAIMPLBINVEXU32NOEFL iemAImpl_sarx_u32, iemAImpl_sarx_u32_fallback;
457FNIEMAIMPLBINVEXU32NOEFL iemAImpl_shlx_u32, iemAImpl_shlx_u32_fallback;
458FNIEMAIMPLBINVEXU32NOEFL iemAImpl_shrx_u32, iemAImpl_shrx_u32_fallback;
459FNIEMAIMPLBINVEXU32NOEFL iemAImpl_rorx_u32;
460/** @} */
461
462/** @name Arithmetic three operand operations on quad words w/o EFLAGS (binary).
463 * @{ */
464typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLBINVEXU64NOEFL, (uint64_t *pu64Dst, uint64_t u64Src1, uint64_t u64Src2));
465typedef FNIEMAIMPLBINVEXU64NOEFL *PFNIEMAIMPLBINVEXU64NOEFL;
466FNIEMAIMPLBINVEXU64NOEFL iemAImpl_pdep_u64, iemAImpl_pdep_u64_fallback;
467FNIEMAIMPLBINVEXU64NOEFL iemAImpl_pext_u64, iemAImpl_pext_u64_fallback;
468FNIEMAIMPLBINVEXU64NOEFL iemAImpl_sarx_u64, iemAImpl_sarx_u64_fallback;
469FNIEMAIMPLBINVEXU64NOEFL iemAImpl_shlx_u64, iemAImpl_shlx_u64_fallback;
470FNIEMAIMPLBINVEXU64NOEFL iemAImpl_shrx_u64, iemAImpl_shrx_u64_fallback;
471FNIEMAIMPLBINVEXU64NOEFL iemAImpl_rorx_u64;
472/** @} */
473
474/** @name MULX 32-bit and 64-bit.
475 * @{ */
476typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMULXVEXU32, (uint32_t *puDst1, uint32_t *puDst2, uint32_t uSrc1, uint32_t uSrc2));
477typedef FNIEMAIMPLMULXVEXU32 *PFNIEMAIMPLMULXVEXU32;
478FNIEMAIMPLMULXVEXU32 iemAImpl_mulx_u32, iemAImpl_mulx_u32_fallback;
479
480typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMULXVEXU64, (uint64_t *puDst1, uint64_t *puDst2, uint64_t uSrc1, uint64_t uSrc2));
481typedef FNIEMAIMPLMULXVEXU64 *PFNIEMAIMPLMULXVEXU64;
482FNIEMAIMPLMULXVEXU64 iemAImpl_mulx_u64, iemAImpl_mulx_u64_fallback;
483/** @} */
484
485
486/** @name Exchange memory with register operations.
487 * @{ */
488IEM_DECL_IMPL_DEF(void, iemAImpl_xchg_u8_locked, (uint8_t *pu8Mem, uint8_t *pu8Reg));
489IEM_DECL_IMPL_DEF(void, iemAImpl_xchg_u16_locked,(uint16_t *pu16Mem, uint16_t *pu16Reg));
490IEM_DECL_IMPL_DEF(void, iemAImpl_xchg_u32_locked,(uint32_t *pu32Mem, uint32_t *pu32Reg));
491IEM_DECL_IMPL_DEF(void, iemAImpl_xchg_u64_locked,(uint64_t *pu64Mem, uint64_t *pu64Reg));
492IEM_DECL_IMPL_DEF(void, iemAImpl_xchg_u8_unlocked, (uint8_t *pu8Mem, uint8_t *pu8Reg));
493IEM_DECL_IMPL_DEF(void, iemAImpl_xchg_u16_unlocked,(uint16_t *pu16Mem, uint16_t *pu16Reg));
494IEM_DECL_IMPL_DEF(void, iemAImpl_xchg_u32_unlocked,(uint32_t *pu32Mem, uint32_t *pu32Reg));
495IEM_DECL_IMPL_DEF(void, iemAImpl_xchg_u64_unlocked,(uint64_t *pu64Mem, uint64_t *pu64Reg));
496/** @} */
497
498/** @name Exchange and add operations.
499 * @{ */
500IEM_DECL_IMPL_DEF(void, iemAImpl_xadd_u8, (uint8_t *pu8Dst, uint8_t *pu8Reg, uint32_t *pEFlags));
501IEM_DECL_IMPL_DEF(void, iemAImpl_xadd_u16,(uint16_t *pu16Dst, uint16_t *pu16Reg, uint32_t *pEFlags));
502IEM_DECL_IMPL_DEF(void, iemAImpl_xadd_u32,(uint32_t *pu32Dst, uint32_t *pu32Reg, uint32_t *pEFlags));
503IEM_DECL_IMPL_DEF(void, iemAImpl_xadd_u64,(uint64_t *pu64Dst, uint64_t *pu64Reg, uint32_t *pEFlags));
504IEM_DECL_IMPL_DEF(void, iemAImpl_xadd_u8_locked, (uint8_t *pu8Dst, uint8_t *pu8Reg, uint32_t *pEFlags));
505IEM_DECL_IMPL_DEF(void, iemAImpl_xadd_u16_locked,(uint16_t *pu16Dst, uint16_t *pu16Reg, uint32_t *pEFlags));
506IEM_DECL_IMPL_DEF(void, iemAImpl_xadd_u32_locked,(uint32_t *pu32Dst, uint32_t *pu32Reg, uint32_t *pEFlags));
507IEM_DECL_IMPL_DEF(void, iemAImpl_xadd_u64_locked,(uint64_t *pu64Dst, uint64_t *pu64Reg, uint32_t *pEFlags));
508/** @} */
509
510/** @name Compare and exchange.
511 * @{ */
512IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg_u8, (uint8_t *pu8Dst, uint8_t *puAl, uint8_t uSrcReg, uint32_t *pEFlags));
513IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg_u8_locked, (uint8_t *pu8Dst, uint8_t *puAl, uint8_t uSrcReg, uint32_t *pEFlags));
514IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg_u16, (uint16_t *pu16Dst, uint16_t *puAx, uint16_t uSrcReg, uint32_t *pEFlags));
515IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg_u16_locked,(uint16_t *pu16Dst, uint16_t *puAx, uint16_t uSrcReg, uint32_t *pEFlags));
516IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg_u32, (uint32_t *pu32Dst, uint32_t *puEax, uint32_t uSrcReg, uint32_t *pEFlags));
517IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg_u32_locked,(uint32_t *pu32Dst, uint32_t *puEax, uint32_t uSrcReg, uint32_t *pEFlags));
518#if ARCH_BITS == 32
519IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg_u64, (uint64_t *pu64Dst, uint64_t *puRax, uint64_t *puSrcReg, uint32_t *pEFlags));
520IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg_u64_locked,(uint64_t *pu64Dst, uint64_t *puRax, uint64_t *puSrcReg, uint32_t *pEFlags));
521#else
522IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg_u64, (uint64_t *pu64Dst, uint64_t *puRax, uint64_t uSrcReg, uint32_t *pEFlags));
523IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg_u64_locked,(uint64_t *pu64Dst, uint64_t *puRax, uint64_t uSrcReg, uint32_t *pEFlags));
524#endif
525IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg8b,(uint64_t *pu64Dst, PRTUINT64U pu64EaxEdx, PRTUINT64U pu64EbxEcx,
526 uint32_t *pEFlags));
527IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg8b_locked,(uint64_t *pu64Dst, PRTUINT64U pu64EaxEdx, PRTUINT64U pu64EbxEcx,
528 uint32_t *pEFlags));
529IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg16b,(PRTUINT128U pu128Dst, PRTUINT128U pu128RaxRdx, PRTUINT128U pu128RbxRcx,
530 uint32_t *pEFlags));
531IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg16b_locked,(PRTUINT128U pu128Dst, PRTUINT128U pu128RaxRdx, PRTUINT128U pu128RbxRcx,
532 uint32_t *pEFlags));
533#ifndef RT_ARCH_ARM64
534IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg16b_fallback,(PRTUINT128U pu128Dst, PRTUINT128U pu128RaxRdx,
535 PRTUINT128U pu128RbxRcx, uint32_t *pEFlags));
536#endif
537/** @} */
538
539/** @name Memory ordering
540 * @{ */
541typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEMFENCE,(void));
542typedef FNIEMAIMPLMEMFENCE *PFNIEMAIMPLMEMFENCE;
543IEM_DECL_IMPL_DEF(void, iemAImpl_mfence,(void));
544IEM_DECL_IMPL_DEF(void, iemAImpl_sfence,(void));
545IEM_DECL_IMPL_DEF(void, iemAImpl_lfence,(void));
546#ifndef RT_ARCH_ARM64
547IEM_DECL_IMPL_DEF(void, iemAImpl_alt_mem_fence,(void));
548#endif
549/** @} */
550
551/** @name Double precision shifts
552 * @{ */
553typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLSHIFTDBLU16,(uint16_t *pu16Dst, uint16_t u16Src, uint8_t cShift, uint32_t *pEFlags));
554typedef FNIEMAIMPLSHIFTDBLU16 *PFNIEMAIMPLSHIFTDBLU16;
555typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLSHIFTDBLU32,(uint32_t *pu32Dst, uint32_t u32Src, uint8_t cShift, uint32_t *pEFlags));
556typedef FNIEMAIMPLSHIFTDBLU32 *PFNIEMAIMPLSHIFTDBLU32;
557typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLSHIFTDBLU64,(uint64_t *pu64Dst, uint64_t u64Src, uint8_t cShift, uint32_t *pEFlags));
558typedef FNIEMAIMPLSHIFTDBLU64 *PFNIEMAIMPLSHIFTDBLU64;
559FNIEMAIMPLSHIFTDBLU16 iemAImpl_shld_u16, iemAImpl_shld_u16_amd, iemAImpl_shld_u16_intel;
560FNIEMAIMPLSHIFTDBLU32 iemAImpl_shld_u32, iemAImpl_shld_u32_amd, iemAImpl_shld_u32_intel;
561FNIEMAIMPLSHIFTDBLU64 iemAImpl_shld_u64, iemAImpl_shld_u64_amd, iemAImpl_shld_u64_intel;
562FNIEMAIMPLSHIFTDBLU16 iemAImpl_shrd_u16, iemAImpl_shrd_u16_amd, iemAImpl_shrd_u16_intel;
563FNIEMAIMPLSHIFTDBLU32 iemAImpl_shrd_u32, iemAImpl_shrd_u32_amd, iemAImpl_shrd_u32_intel;
564FNIEMAIMPLSHIFTDBLU64 iemAImpl_shrd_u64, iemAImpl_shrd_u64_amd, iemAImpl_shrd_u64_intel;
565/** @} */
566
567
568/** @name Bit search operations (thrown in with the binary ops).
569 * @{ */
570FNIEMAIMPLBINU16 iemAImpl_bsf_u16, iemAImpl_bsf_u16_amd, iemAImpl_bsf_u16_intel;
571FNIEMAIMPLBINU32 iemAImpl_bsf_u32, iemAImpl_bsf_u32_amd, iemAImpl_bsf_u32_intel;
572FNIEMAIMPLBINU64 iemAImpl_bsf_u64, iemAImpl_bsf_u64_amd, iemAImpl_bsf_u64_intel;
573FNIEMAIMPLBINU16 iemAImpl_bsr_u16, iemAImpl_bsr_u16_amd, iemAImpl_bsr_u16_intel;
574FNIEMAIMPLBINU32 iemAImpl_bsr_u32, iemAImpl_bsr_u32_amd, iemAImpl_bsr_u32_intel;
575FNIEMAIMPLBINU64 iemAImpl_bsr_u64, iemAImpl_bsr_u64_amd, iemAImpl_bsr_u64_intel;
576FNIEMAIMPLBINU16 iemAImpl_lzcnt_u16, iemAImpl_lzcnt_u16_amd, iemAImpl_lzcnt_u16_intel;
577FNIEMAIMPLBINU32 iemAImpl_lzcnt_u32, iemAImpl_lzcnt_u32_amd, iemAImpl_lzcnt_u32_intel;
578FNIEMAIMPLBINU64 iemAImpl_lzcnt_u64, iemAImpl_lzcnt_u64_amd, iemAImpl_lzcnt_u64_intel;
579FNIEMAIMPLBINU16 iemAImpl_tzcnt_u16, iemAImpl_tzcnt_u16_amd, iemAImpl_tzcnt_u16_intel;
580FNIEMAIMPLBINU32 iemAImpl_tzcnt_u32, iemAImpl_tzcnt_u32_amd, iemAImpl_tzcnt_u32_intel;
581FNIEMAIMPLBINU64 iemAImpl_tzcnt_u64, iemAImpl_tzcnt_u64_amd, iemAImpl_tzcnt_u64_intel;
582FNIEMAIMPLBINU16 iemAImpl_popcnt_u16, iemAImpl_popcnt_u16_fallback;
583FNIEMAIMPLBINU32 iemAImpl_popcnt_u32, iemAImpl_popcnt_u32_fallback;
584FNIEMAIMPLBINU64 iemAImpl_popcnt_u64, iemAImpl_popcnt_u64_fallback;
585/** @} */
586
587/** @name Signed multiplication operations (thrown in with the binary ops).
588 * @{ */
589FNIEMAIMPLBINU16 iemAImpl_imul_two_u16, iemAImpl_imul_two_u16_amd, iemAImpl_imul_two_u16_intel;
590FNIEMAIMPLBINU32 iemAImpl_imul_two_u32, iemAImpl_imul_two_u32_amd, iemAImpl_imul_two_u32_intel;
591FNIEMAIMPLBINU64 iemAImpl_imul_two_u64, iemAImpl_imul_two_u64_amd, iemAImpl_imul_two_u64_intel;
592/** @} */
593
594/** @name Arithmetic assignment operations on bytes (unary).
595 * @{ */
596typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLUNARYU8, (uint8_t *pu8Dst, uint32_t *pEFlags));
597typedef FNIEMAIMPLUNARYU8 *PFNIEMAIMPLUNARYU8;
598FNIEMAIMPLUNARYU8 iemAImpl_inc_u8, iemAImpl_inc_u8_locked;
599FNIEMAIMPLUNARYU8 iemAImpl_dec_u8, iemAImpl_dec_u8_locked;
600FNIEMAIMPLUNARYU8 iemAImpl_not_u8, iemAImpl_not_u8_locked;
601FNIEMAIMPLUNARYU8 iemAImpl_neg_u8, iemAImpl_neg_u8_locked;
602/** @} */
603
604/** @name Arithmetic assignment operations on words (unary).
605 * @{ */
606typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLUNARYU16, (uint16_t *pu16Dst, uint32_t *pEFlags));
607typedef FNIEMAIMPLUNARYU16 *PFNIEMAIMPLUNARYU16;
608FNIEMAIMPLUNARYU16 iemAImpl_inc_u16, iemAImpl_inc_u16_locked;
609FNIEMAIMPLUNARYU16 iemAImpl_dec_u16, iemAImpl_dec_u16_locked;
610FNIEMAIMPLUNARYU16 iemAImpl_not_u16, iemAImpl_not_u16_locked;
611FNIEMAIMPLUNARYU16 iemAImpl_neg_u16, iemAImpl_neg_u16_locked;
612/** @} */
613
614/** @name Arithmetic assignment operations on double words (unary).
615 * @{ */
616typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLUNARYU32, (uint32_t *pu32Dst, uint32_t *pEFlags));
617typedef FNIEMAIMPLUNARYU32 *PFNIEMAIMPLUNARYU32;
618FNIEMAIMPLUNARYU32 iemAImpl_inc_u32, iemAImpl_inc_u32_locked;
619FNIEMAIMPLUNARYU32 iemAImpl_dec_u32, iemAImpl_dec_u32_locked;
620FNIEMAIMPLUNARYU32 iemAImpl_not_u32, iemAImpl_not_u32_locked;
621FNIEMAIMPLUNARYU32 iemAImpl_neg_u32, iemAImpl_neg_u32_locked;
622/** @} */
623
624/** @name Arithmetic assignment operations on quad words (unary).
625 * @{ */
626typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLUNARYU64, (uint64_t *pu64Dst, uint32_t *pEFlags));
627typedef FNIEMAIMPLUNARYU64 *PFNIEMAIMPLUNARYU64;
628FNIEMAIMPLUNARYU64 iemAImpl_inc_u64, iemAImpl_inc_u64_locked;
629FNIEMAIMPLUNARYU64 iemAImpl_dec_u64, iemAImpl_dec_u64_locked;
630FNIEMAIMPLUNARYU64 iemAImpl_not_u64, iemAImpl_not_u64_locked;
631FNIEMAIMPLUNARYU64 iemAImpl_neg_u64, iemAImpl_neg_u64_locked;
632/** @} */
633
634
635/** @name Shift operations on bytes (Group 2).
636 * @{ */
637typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSHIFTU8,(uint32_t fEFlagsIn, uint8_t *pu8Dst, uint8_t cShift));
638typedef FNIEMAIMPLSHIFTU8 *PFNIEMAIMPLSHIFTU8;
639FNIEMAIMPLSHIFTU8 iemAImpl_rol_u8, iemAImpl_rol_u8_amd, iemAImpl_rol_u8_intel;
640FNIEMAIMPLSHIFTU8 iemAImpl_ror_u8, iemAImpl_ror_u8_amd, iemAImpl_ror_u8_intel;
641FNIEMAIMPLSHIFTU8 iemAImpl_rcl_u8, iemAImpl_rcl_u8_amd, iemAImpl_rcl_u8_intel;
642FNIEMAIMPLSHIFTU8 iemAImpl_rcr_u8, iemAImpl_rcr_u8_amd, iemAImpl_rcr_u8_intel;
643FNIEMAIMPLSHIFTU8 iemAImpl_shl_u8, iemAImpl_shl_u8_amd, iemAImpl_shl_u8_intel;
644FNIEMAIMPLSHIFTU8 iemAImpl_shr_u8, iemAImpl_shr_u8_amd, iemAImpl_shr_u8_intel;
645FNIEMAIMPLSHIFTU8 iemAImpl_sar_u8, iemAImpl_sar_u8_amd, iemAImpl_sar_u8_intel;
646/** @} */
647
648/** @name Shift operations on words (Group 2).
649 * @{ */
650typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSHIFTU16,(uint32_t fEFlagsIn, uint16_t *pu16Dst, uint8_t cShift));
651typedef FNIEMAIMPLSHIFTU16 *PFNIEMAIMPLSHIFTU16;
652FNIEMAIMPLSHIFTU16 iemAImpl_rol_u16, iemAImpl_rol_u16_amd, iemAImpl_rol_u16_intel;
653FNIEMAIMPLSHIFTU16 iemAImpl_ror_u16, iemAImpl_ror_u16_amd, iemAImpl_ror_u16_intel;
654FNIEMAIMPLSHIFTU16 iemAImpl_rcl_u16, iemAImpl_rcl_u16_amd, iemAImpl_rcl_u16_intel;
655FNIEMAIMPLSHIFTU16 iemAImpl_rcr_u16, iemAImpl_rcr_u16_amd, iemAImpl_rcr_u16_intel;
656FNIEMAIMPLSHIFTU16 iemAImpl_shl_u16, iemAImpl_shl_u16_amd, iemAImpl_shl_u16_intel;
657FNIEMAIMPLSHIFTU16 iemAImpl_shr_u16, iemAImpl_shr_u16_amd, iemAImpl_shr_u16_intel;
658FNIEMAIMPLSHIFTU16 iemAImpl_sar_u16, iemAImpl_sar_u16_amd, iemAImpl_sar_u16_intel;
659/** @} */
660
661/** @name Shift operations on double words (Group 2).
662 * @{ */
663typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSHIFTU32,(uint32_t fEFlagsIn, uint32_t *pu32Dst, uint8_t cShift));
664typedef FNIEMAIMPLSHIFTU32 *PFNIEMAIMPLSHIFTU32;
665FNIEMAIMPLSHIFTU32 iemAImpl_rol_u32, iemAImpl_rol_u32_amd, iemAImpl_rol_u32_intel;
666FNIEMAIMPLSHIFTU32 iemAImpl_ror_u32, iemAImpl_ror_u32_amd, iemAImpl_ror_u32_intel;
667FNIEMAIMPLSHIFTU32 iemAImpl_rcl_u32, iemAImpl_rcl_u32_amd, iemAImpl_rcl_u32_intel;
668FNIEMAIMPLSHIFTU32 iemAImpl_rcr_u32, iemAImpl_rcr_u32_amd, iemAImpl_rcr_u32_intel;
669FNIEMAIMPLSHIFTU32 iemAImpl_shl_u32, iemAImpl_shl_u32_amd, iemAImpl_shl_u32_intel;
670FNIEMAIMPLSHIFTU32 iemAImpl_shr_u32, iemAImpl_shr_u32_amd, iemAImpl_shr_u32_intel;
671FNIEMAIMPLSHIFTU32 iemAImpl_sar_u32, iemAImpl_sar_u32_amd, iemAImpl_sar_u32_intel;
672/** @} */
673
674/** @name Shift operations on words (Group 2).
675 * @{ */
676typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSHIFTU64,(uint32_t fEFlagsIn, uint64_t *pu64Dst, uint8_t cShift));
677typedef FNIEMAIMPLSHIFTU64 *PFNIEMAIMPLSHIFTU64;
678FNIEMAIMPLSHIFTU64 iemAImpl_rol_u64, iemAImpl_rol_u64_amd, iemAImpl_rol_u64_intel;
679FNIEMAIMPLSHIFTU64 iemAImpl_ror_u64, iemAImpl_ror_u64_amd, iemAImpl_ror_u64_intel;
680FNIEMAIMPLSHIFTU64 iemAImpl_rcl_u64, iemAImpl_rcl_u64_amd, iemAImpl_rcl_u64_intel;
681FNIEMAIMPLSHIFTU64 iemAImpl_rcr_u64, iemAImpl_rcr_u64_amd, iemAImpl_rcr_u64_intel;
682FNIEMAIMPLSHIFTU64 iemAImpl_shl_u64, iemAImpl_shl_u64_amd, iemAImpl_shl_u64_intel;
683FNIEMAIMPLSHIFTU64 iemAImpl_shr_u64, iemAImpl_shr_u64_amd, iemAImpl_shr_u64_intel;
684FNIEMAIMPLSHIFTU64 iemAImpl_sar_u64, iemAImpl_sar_u64_amd, iemAImpl_sar_u64_intel;
685/** @} */
686
687/** @name Multiplication and division operations.
688 * @{ */
689typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMULDIVU8,(uint16_t *pu16AX, uint8_t u8FactorDivisor, uint32_t fEFlags));
690typedef FNIEMAIMPLMULDIVU8 *PFNIEMAIMPLMULDIVU8;
691FNIEMAIMPLMULDIVU8 iemAImpl_mul_u8, iemAImpl_mul_u8_amd, iemAImpl_mul_u8_intel;
692FNIEMAIMPLMULDIVU8 iemAImpl_imul_u8, iemAImpl_imul_u8_amd, iemAImpl_imul_u8_intel;
693FNIEMAIMPLMULDIVU8 iemAImpl_div_u8, iemAImpl_div_u8_amd, iemAImpl_div_u8_intel;
694FNIEMAIMPLMULDIVU8 iemAImpl_idiv_u8, iemAImpl_idiv_u8_amd, iemAImpl_idiv_u8_intel;
695
696typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMULDIVU16,(uint16_t *pu16AX, uint16_t *pu16DX, uint16_t u16FactorDivisor, uint32_t fEFlags));
697typedef FNIEMAIMPLMULDIVU16 *PFNIEMAIMPLMULDIVU16;
698FNIEMAIMPLMULDIVU16 iemAImpl_mul_u16, iemAImpl_mul_u16_amd, iemAImpl_mul_u16_intel;
699FNIEMAIMPLMULDIVU16 iemAImpl_imul_u16, iemAImpl_imul_u16_amd, iemAImpl_imul_u16_intel;
700FNIEMAIMPLMULDIVU16 iemAImpl_div_u16, iemAImpl_div_u16_amd, iemAImpl_div_u16_intel;
701FNIEMAIMPLMULDIVU16 iemAImpl_idiv_u16, iemAImpl_idiv_u16_amd, iemAImpl_idiv_u16_intel;
702
703typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMULDIVU32,(uint32_t *pu32EAX, uint32_t *pu32EDX, uint32_t u32FactorDivisor, uint32_t fEFlags));
704typedef FNIEMAIMPLMULDIVU32 *PFNIEMAIMPLMULDIVU32;
705FNIEMAIMPLMULDIVU32 iemAImpl_mul_u32, iemAImpl_mul_u32_amd, iemAImpl_mul_u32_intel;
706FNIEMAIMPLMULDIVU32 iemAImpl_imul_u32, iemAImpl_imul_u32_amd, iemAImpl_imul_u32_intel;
707FNIEMAIMPLMULDIVU32 iemAImpl_div_u32, iemAImpl_div_u32_amd, iemAImpl_div_u32_intel;
708FNIEMAIMPLMULDIVU32 iemAImpl_idiv_u32, iemAImpl_idiv_u32_amd, iemAImpl_idiv_u32_intel;
709
710typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMULDIVU64,(uint64_t *pu64RAX, uint64_t *pu64RDX, uint64_t u64FactorDivisor, uint32_t fEFlags));
711typedef FNIEMAIMPLMULDIVU64 *PFNIEMAIMPLMULDIVU64;
712FNIEMAIMPLMULDIVU64 iemAImpl_mul_u64, iemAImpl_mul_u64_amd, iemAImpl_mul_u64_intel;
713FNIEMAIMPLMULDIVU64 iemAImpl_imul_u64, iemAImpl_imul_u64_amd, iemAImpl_imul_u64_intel;
714FNIEMAIMPLMULDIVU64 iemAImpl_div_u64, iemAImpl_div_u64_amd, iemAImpl_div_u64_intel;
715FNIEMAIMPLMULDIVU64 iemAImpl_idiv_u64, iemAImpl_idiv_u64_amd, iemAImpl_idiv_u64_intel;
716/** @} */
717
718/** @name Byte Swap.
719 * @{ */
720IEM_DECL_IMPL_TYPE(void, iemAImpl_bswap_u16,(uint32_t *pu32Dst)); /* Yes, 32-bit register access. */
721IEM_DECL_IMPL_TYPE(void, iemAImpl_bswap_u32,(uint32_t *pu32Dst));
722IEM_DECL_IMPL_TYPE(void, iemAImpl_bswap_u64,(uint64_t *pu64Dst));
723/** @} */
724
725/** @name Misc.
726 * @{ */
727FNIEMAIMPLBINU16 iemAImpl_arpl;
728/** @} */
729
730/** @name RDRAND and RDSEED
731 * @{ */
732typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLRDRANDSEEDU16,(uint16_t *puDst, uint32_t *pEFlags));
733typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLRDRANDSEEDU32,(uint32_t *puDst, uint32_t *pEFlags));
734typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLRDRANDSEEDU64,(uint64_t *puDst, uint32_t *pEFlags));
735typedef FNIEMAIMPLRDRANDSEEDU16 *PFNIEMAIMPLRDRANDSEEDU16;
736typedef FNIEMAIMPLRDRANDSEEDU32 *PFNIEMAIMPLRDRANDSEEDU32;
737typedef FNIEMAIMPLRDRANDSEEDU64 *PFNIEMAIMPLRDRANDSEEDU64;
738
739FNIEMAIMPLRDRANDSEEDU16 iemAImpl_rdrand_u16, iemAImpl_rdrand_u16_fallback;
740FNIEMAIMPLRDRANDSEEDU32 iemAImpl_rdrand_u32, iemAImpl_rdrand_u32_fallback;
741FNIEMAIMPLRDRANDSEEDU64 iemAImpl_rdrand_u64, iemAImpl_rdrand_u64_fallback;
742FNIEMAIMPLRDRANDSEEDU16 iemAImpl_rdseed_u16, iemAImpl_rdseed_u16_fallback;
743FNIEMAIMPLRDRANDSEEDU32 iemAImpl_rdseed_u32, iemAImpl_rdseed_u32_fallback;
744FNIEMAIMPLRDRANDSEEDU64 iemAImpl_rdseed_u64, iemAImpl_rdseed_u64_fallback;
745/** @} */
746
747/** @name ADOX and ADCX
748 * @{ */
749FNIEMAIMPLBINU32 iemAImpl_adcx_u32, iemAImpl_adcx_u32_fallback;
750FNIEMAIMPLBINU64 iemAImpl_adcx_u64, iemAImpl_adcx_u64_fallback;
751FNIEMAIMPLBINU32 iemAImpl_adox_u32, iemAImpl_adox_u32_fallback;
752FNIEMAIMPLBINU64 iemAImpl_adox_u64, iemAImpl_adox_u64_fallback;
753/** @} */
754
755
756/**
757 * A FPU result.
758 * @note x86 specific
759 */
760typedef struct IEMFPURESULT
761{
762 /** The output value. */
763 RTFLOAT80U r80Result;
764 /** The output status. */
765 uint16_t FSW;
766} IEMFPURESULT;
767AssertCompileMemberOffset(IEMFPURESULT, FSW, 10);
768/** Pointer to a FPU result. */
769typedef IEMFPURESULT *PIEMFPURESULT;
770/** Pointer to a const FPU result. */
771typedef IEMFPURESULT const *PCIEMFPURESULT;
772
773/** @name FPU operations taking a 32-bit float argument
774 * @{ */
775typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUR32FSW,(PCX86FXSTATE pFpuState, uint16_t *pFSW,
776 PCRTFLOAT80U pr80Val1, PCRTFLOAT32U pr32Val2));
777typedef FNIEMAIMPLFPUR32FSW *PFNIEMAIMPLFPUR32FSW;
778
779typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUR32,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes,
780 PCRTFLOAT80U pr80Val1, PCRTFLOAT32U pr32Val2));
781typedef FNIEMAIMPLFPUR32 *PFNIEMAIMPLFPUR32;
782
783FNIEMAIMPLFPUR32FSW iemAImpl_fcom_r80_by_r32;
784FNIEMAIMPLFPUR32 iemAImpl_fadd_r80_by_r32;
785FNIEMAIMPLFPUR32 iemAImpl_fmul_r80_by_r32;
786FNIEMAIMPLFPUR32 iemAImpl_fsub_r80_by_r32;
787FNIEMAIMPLFPUR32 iemAImpl_fsubr_r80_by_r32;
788FNIEMAIMPLFPUR32 iemAImpl_fdiv_r80_by_r32;
789FNIEMAIMPLFPUR32 iemAImpl_fdivr_r80_by_r32;
790
791IEM_DECL_IMPL_DEF(void, iemAImpl_fld_r80_from_r32,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes, PCRTFLOAT32U pr32Val));
792IEM_DECL_IMPL_DEF(void, iemAImpl_fst_r80_to_r32,(PCX86FXSTATE pFpuState, uint16_t *pu16FSW,
793 PRTFLOAT32U pr32Val, PCRTFLOAT80U pr80Val));
794/** @} */
795
796/** @name FPU operations taking a 64-bit float argument
797 * @{ */
798typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUR64FSW,(PCX86FXSTATE pFpuState, uint16_t *pFSW,
799 PCRTFLOAT80U pr80Val1, PCRTFLOAT64U pr64Val2));
800typedef FNIEMAIMPLFPUR64FSW *PFNIEMAIMPLFPUR64FSW;
801
802typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUR64,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes,
803 PCRTFLOAT80U pr80Val1, PCRTFLOAT64U pr64Val2));
804typedef FNIEMAIMPLFPUR64 *PFNIEMAIMPLFPUR64;
805
806FNIEMAIMPLFPUR64FSW iemAImpl_fcom_r80_by_r64;
807FNIEMAIMPLFPUR64 iemAImpl_fadd_r80_by_r64;
808FNIEMAIMPLFPUR64 iemAImpl_fmul_r80_by_r64;
809FNIEMAIMPLFPUR64 iemAImpl_fsub_r80_by_r64;
810FNIEMAIMPLFPUR64 iemAImpl_fsubr_r80_by_r64;
811FNIEMAIMPLFPUR64 iemAImpl_fdiv_r80_by_r64;
812FNIEMAIMPLFPUR64 iemAImpl_fdivr_r80_by_r64;
813
814IEM_DECL_IMPL_DEF(void, iemAImpl_fld_r80_from_r64,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes, PCRTFLOAT64U pr64Val));
815IEM_DECL_IMPL_DEF(void, iemAImpl_fst_r80_to_r64,(PCX86FXSTATE pFpuState, uint16_t *pu16FSW,
816 PRTFLOAT64U pr32Val, PCRTFLOAT80U pr80Val));
817/** @} */
818
819/** @name FPU operations taking a 80-bit float argument
820 * @{ */
821typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUR80,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes,
822 PCRTFLOAT80U pr80Val1, PCRTFLOAT80U pr80Val2));
823typedef FNIEMAIMPLFPUR80 *PFNIEMAIMPLFPUR80;
824FNIEMAIMPLFPUR80 iemAImpl_fadd_r80_by_r80;
825FNIEMAIMPLFPUR80 iemAImpl_fmul_r80_by_r80;
826FNIEMAIMPLFPUR80 iemAImpl_fsub_r80_by_r80;
827FNIEMAIMPLFPUR80 iemAImpl_fsubr_r80_by_r80;
828FNIEMAIMPLFPUR80 iemAImpl_fdiv_r80_by_r80;
829FNIEMAIMPLFPUR80 iemAImpl_fdivr_r80_by_r80;
830FNIEMAIMPLFPUR80 iemAImpl_fprem_r80_by_r80;
831FNIEMAIMPLFPUR80 iemAImpl_fprem1_r80_by_r80;
832FNIEMAIMPLFPUR80 iemAImpl_fscale_r80_by_r80;
833
834FNIEMAIMPLFPUR80 iemAImpl_fpatan_r80_by_r80, iemAImpl_fpatan_r80_by_r80_amd, iemAImpl_fpatan_r80_by_r80_intel;
835FNIEMAIMPLFPUR80 iemAImpl_fyl2x_r80_by_r80, iemAImpl_fyl2x_r80_by_r80_amd, iemAImpl_fyl2x_r80_by_r80_intel;
836FNIEMAIMPLFPUR80 iemAImpl_fyl2xp1_r80_by_r80, iemAImpl_fyl2xp1_r80_by_r80_amd, iemAImpl_fyl2xp1_r80_by_r80_intel;
837
838typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUR80FSW,(PCX86FXSTATE pFpuState, uint16_t *pFSW,
839 PCRTFLOAT80U pr80Val1, PCRTFLOAT80U pr80Val2));
840typedef FNIEMAIMPLFPUR80FSW *PFNIEMAIMPLFPUR80FSW;
841FNIEMAIMPLFPUR80FSW iemAImpl_fcom_r80_by_r80;
842FNIEMAIMPLFPUR80FSW iemAImpl_fucom_r80_by_r80;
843
844typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLFPUR80EFL,(PCX86FXSTATE pFpuState, uint16_t *pu16Fsw,
845 PCRTFLOAT80U pr80Val1, PCRTFLOAT80U pr80Val2));
846typedef FNIEMAIMPLFPUR80EFL *PFNIEMAIMPLFPUR80EFL;
847FNIEMAIMPLFPUR80EFL iemAImpl_fcomi_r80_by_r80;
848FNIEMAIMPLFPUR80EFL iemAImpl_fucomi_r80_by_r80;
849
850typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUR80UNARY,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes, PCRTFLOAT80U pr80Val));
851typedef FNIEMAIMPLFPUR80UNARY *PFNIEMAIMPLFPUR80UNARY;
852FNIEMAIMPLFPUR80UNARY iemAImpl_fabs_r80;
853FNIEMAIMPLFPUR80UNARY iemAImpl_fchs_r80;
854FNIEMAIMPLFPUR80UNARY iemAImpl_f2xm1_r80, iemAImpl_f2xm1_r80_amd, iemAImpl_f2xm1_r80_intel;
855FNIEMAIMPLFPUR80UNARY iemAImpl_fsqrt_r80;
856FNIEMAIMPLFPUR80UNARY iemAImpl_frndint_r80;
857FNIEMAIMPLFPUR80UNARY iemAImpl_fsin_r80, iemAImpl_fsin_r80_amd, iemAImpl_fsin_r80_intel;
858FNIEMAIMPLFPUR80UNARY iemAImpl_fcos_r80, iemAImpl_fcos_r80_amd, iemAImpl_fcos_r80_intel;
859
860typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUR80UNARYFSW,(PCX86FXSTATE pFpuState, uint16_t *pu16Fsw, PCRTFLOAT80U pr80Val));
861typedef FNIEMAIMPLFPUR80UNARYFSW *PFNIEMAIMPLFPUR80UNARYFSW;
862FNIEMAIMPLFPUR80UNARYFSW iemAImpl_ftst_r80;
863FNIEMAIMPLFPUR80UNARYFSW iemAImpl_fxam_r80;
864
865typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUR80LDCONST,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes));
866typedef FNIEMAIMPLFPUR80LDCONST *PFNIEMAIMPLFPUR80LDCONST;
867FNIEMAIMPLFPUR80LDCONST iemAImpl_fld1;
868FNIEMAIMPLFPUR80LDCONST iemAImpl_fldl2t;
869FNIEMAIMPLFPUR80LDCONST iemAImpl_fldl2e;
870FNIEMAIMPLFPUR80LDCONST iemAImpl_fldpi;
871FNIEMAIMPLFPUR80LDCONST iemAImpl_fldlg2;
872FNIEMAIMPLFPUR80LDCONST iemAImpl_fldln2;
873FNIEMAIMPLFPUR80LDCONST iemAImpl_fldz;
874
875/**
876 * A FPU result consisting of two output values and FSW.
877 * @note x86 specific
878 */
879typedef struct IEMFPURESULTTWO
880{
881 /** The first output value. */
882 RTFLOAT80U r80Result1;
883 /** The output status. */
884 uint16_t FSW;
885 /** The second output value. */
886 RTFLOAT80U r80Result2;
887} IEMFPURESULTTWO;
888AssertCompileMemberOffset(IEMFPURESULTTWO, FSW, 10);
889AssertCompileMemberOffset(IEMFPURESULTTWO, r80Result2, 12);
890/** Pointer to a FPU result consisting of two output values and FSW. */
891typedef IEMFPURESULTTWO *PIEMFPURESULTTWO;
892/** Pointer to a const FPU result consisting of two output values and FSW. */
893typedef IEMFPURESULTTWO const *PCIEMFPURESULTTWO;
894
895typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUR80UNARYTWO,(PCX86FXSTATE pFpuState, PIEMFPURESULTTWO pFpuResTwo,
896 PCRTFLOAT80U pr80Val));
897typedef FNIEMAIMPLFPUR80UNARYTWO *PFNIEMAIMPLFPUR80UNARYTWO;
898FNIEMAIMPLFPUR80UNARYTWO iemAImpl_fptan_r80_r80, iemAImpl_fptan_r80_r80_amd, iemAImpl_fptan_r80_r80_intel;
899FNIEMAIMPLFPUR80UNARYTWO iemAImpl_fxtract_r80_r80;
900FNIEMAIMPLFPUR80UNARYTWO iemAImpl_fsincos_r80_r80, iemAImpl_fsincos_r80_r80_amd, iemAImpl_fsincos_r80_r80_intel;
901
902IEM_DECL_IMPL_DEF(void, iemAImpl_fld_r80_from_r80,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes, PCRTFLOAT80U pr80Val));
903IEM_DECL_IMPL_DEF(void, iemAImpl_fst_r80_to_r80,(PCX86FXSTATE pFpuState, uint16_t *pu16FSW,
904 PRTFLOAT80U pr80Dst, PCRTFLOAT80U pr80Src));
905
906IEM_DECL_IMPL_DEF(void, iemAImpl_fld_r80_from_d80,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes, PCRTPBCD80U pd80Val));
907IEM_DECL_IMPL_DEF(void, iemAImpl_fst_r80_to_d80,(PCX86FXSTATE pFpuState, uint16_t *pu16FSW,
908 PRTPBCD80U pd80Dst, PCRTFLOAT80U pr80Src));
909
910/** @} */
911
912/** @name FPU operations taking a 16-bit signed integer argument
913 * @{ */
914typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUI16,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes,
915 PCRTFLOAT80U pr80Val1, int16_t const *pi16Val2));
916typedef FNIEMAIMPLFPUI16 *PFNIEMAIMPLFPUI16;
917typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUSTR80TOI16,(PCX86FXSTATE pFpuState, uint16_t *pFpuRes,
918 int16_t *pi16Dst, PCRTFLOAT80U pr80Src));
919typedef FNIEMAIMPLFPUSTR80TOI16 *PFNIEMAIMPLFPUSTR80TOI16;
920
921FNIEMAIMPLFPUI16 iemAImpl_fiadd_r80_by_i16;
922FNIEMAIMPLFPUI16 iemAImpl_fimul_r80_by_i16;
923FNIEMAIMPLFPUI16 iemAImpl_fisub_r80_by_i16;
924FNIEMAIMPLFPUI16 iemAImpl_fisubr_r80_by_i16;
925FNIEMAIMPLFPUI16 iemAImpl_fidiv_r80_by_i16;
926FNIEMAIMPLFPUI16 iemAImpl_fidivr_r80_by_i16;
927
928typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUI16FSW,(PCX86FXSTATE pFpuState, uint16_t *pFSW,
929 PCRTFLOAT80U pr80Val1, int16_t const *pi16Val2));
930typedef FNIEMAIMPLFPUI16FSW *PFNIEMAIMPLFPUI16FSW;
931FNIEMAIMPLFPUI16FSW iemAImpl_ficom_r80_by_i16;
932
933IEM_DECL_IMPL_DEF(void, iemAImpl_fild_r80_from_i16,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes, int16_t const *pi16Val));
934FNIEMAIMPLFPUSTR80TOI16 iemAImpl_fist_r80_to_i16;
935FNIEMAIMPLFPUSTR80TOI16 iemAImpl_fistt_r80_to_i16, iemAImpl_fistt_r80_to_i16_amd, iemAImpl_fistt_r80_to_i16_intel;
936/** @} */
937
938/** @name FPU operations taking a 32-bit signed integer argument
939 * @{ */
940typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUI32,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes,
941 PCRTFLOAT80U pr80Val1, int32_t const *pi32Val2));
942typedef FNIEMAIMPLFPUI32 *PFNIEMAIMPLFPUI32;
943typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUSTR80TOI32,(PCX86FXSTATE pFpuState, uint16_t *pFpuRes,
944 int32_t *pi32Dst, PCRTFLOAT80U pr80Src));
945typedef FNIEMAIMPLFPUSTR80TOI32 *PFNIEMAIMPLFPUSTR80TOI32;
946
947FNIEMAIMPLFPUI32 iemAImpl_fiadd_r80_by_i32;
948FNIEMAIMPLFPUI32 iemAImpl_fimul_r80_by_i32;
949FNIEMAIMPLFPUI32 iemAImpl_fisub_r80_by_i32;
950FNIEMAIMPLFPUI32 iemAImpl_fisubr_r80_by_i32;
951FNIEMAIMPLFPUI32 iemAImpl_fidiv_r80_by_i32;
952FNIEMAIMPLFPUI32 iemAImpl_fidivr_r80_by_i32;
953
954typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUI32FSW,(PCX86FXSTATE pFpuState, uint16_t *pFSW,
955 PCRTFLOAT80U pr80Val1, int32_t const *pi32Val2));
956typedef FNIEMAIMPLFPUI32FSW *PFNIEMAIMPLFPUI32FSW;
957FNIEMAIMPLFPUI32FSW iemAImpl_ficom_r80_by_i32;
958
959IEM_DECL_IMPL_DEF(void, iemAImpl_fild_r80_from_i32,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes, int32_t const *pi32Val));
960FNIEMAIMPLFPUSTR80TOI32 iemAImpl_fist_r80_to_i32;
961FNIEMAIMPLFPUSTR80TOI32 iemAImpl_fistt_r80_to_i32;
962/** @} */
963
964/** @name FPU operations taking a 64-bit signed integer argument
965 * @{ */
966typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUSTR80TOI64,(PCX86FXSTATE pFpuState, uint16_t *pFpuRes,
967 int64_t *pi64Dst, PCRTFLOAT80U pr80Src));
968typedef FNIEMAIMPLFPUSTR80TOI64 *PFNIEMAIMPLFPUSTR80TOI64;
969
970IEM_DECL_IMPL_DEF(void, iemAImpl_fild_r80_from_i64,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes, int64_t const *pi64Val));
971FNIEMAIMPLFPUSTR80TOI64 iemAImpl_fist_r80_to_i64;
972FNIEMAIMPLFPUSTR80TOI64 iemAImpl_fistt_r80_to_i64;
973/** @} */
974
975
976/** Temporary type representing a 256-bit vector register. */
977typedef struct { uint64_t au64[4]; } IEMVMM256;
978/** Temporary type pointing to a 256-bit vector register. */
979typedef IEMVMM256 *PIEMVMM256;
980/** Temporary type pointing to a const 256-bit vector register. */
981typedef IEMVMM256 *PCIEMVMM256;
982
983
984/** @name Media (SSE/MMX/AVX) operations: full1 + full2 -> full1.
985 * @{ */
986typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAF2U64,(PCX86FXSTATE pFpuState, uint64_t *puDst, uint64_t const *puSrc));
987typedef FNIEMAIMPLMEDIAF2U64 *PFNIEMAIMPLMEDIAF2U64;
988typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMEDIAF2U128,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86XMMREG puSrc));
989typedef FNIEMAIMPLMEDIAF2U128 *PFNIEMAIMPLMEDIAF2U128;
990typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMEDIAF2U256,(uint32_t uMxCsrIn, PX86YMMREG puDst, PCX86YMMREG puSrc));
991typedef FNIEMAIMPLMEDIAF2U256 *PFNIEMAIMPLMEDIAF2U256;
992typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMEDIAF3U128,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86XMMREG puSrc1, PCX86XMMREG puSrc2));
993typedef FNIEMAIMPLMEDIAF3U128 *PFNIEMAIMPLMEDIAF3U128;
994typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMEDIAF3U256,(uint32_t uMxCsrIn, PX86YMMREG puDst, PCX86YMMREG puSrc1, PCX86YMMREG puSrc2));
995typedef FNIEMAIMPLMEDIAF3U256 *PFNIEMAIMPLMEDIAF3U256;
996typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAOPTF2U64,(uint64_t *puDst, uint64_t const *puSrc));
997typedef FNIEMAIMPLMEDIAOPTF2U64 *PFNIEMAIMPLMEDIAOPTF2U64;
998typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAOPTF2U128,(PRTUINT128U puDst, PCRTUINT128U puSrc));
999typedef FNIEMAIMPLMEDIAOPTF2U128 *PFNIEMAIMPLMEDIAOPTF2U128;
1000typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAOPTF3U128,(PRTUINT128U puDst, PCRTUINT128U puSrc1, PCRTUINT128U puSrc2));
1001typedef FNIEMAIMPLMEDIAOPTF3U128 *PFNIEMAIMPLMEDIAOPTF3U128;
1002typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAOPTF3U256,(PRTUINT256U puDst, PCRTUINT256U puSrc1, PCRTUINT256U puSrc2));
1003typedef FNIEMAIMPLMEDIAOPTF3U256 *PFNIEMAIMPLMEDIAOPTF3U256;
1004typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAOPTF2U256,(PRTUINT256U puDst, PCRTUINT256U puSrc));
1005typedef FNIEMAIMPLMEDIAOPTF2U256 *PFNIEMAIMPLMEDIAOPTF2U256;
1006FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_pshufb_u64, iemAImpl_pshufb_u64_fallback;
1007FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_pand_u64, iemAImpl_pandn_u64, iemAImpl_por_u64, iemAImpl_pxor_u64;
1008FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_pcmpeqb_u64, iemAImpl_pcmpeqw_u64, iemAImpl_pcmpeqd_u64;
1009FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_pcmpgtb_u64, iemAImpl_pcmpgtw_u64, iemAImpl_pcmpgtd_u64;
1010FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_paddb_u64, iemAImpl_paddsb_u64, iemAImpl_paddusb_u64;
1011FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_paddw_u64, iemAImpl_paddsw_u64, iemAImpl_paddusw_u64;
1012FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_paddd_u64;
1013FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_paddq_u64;
1014FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_psubb_u64, iemAImpl_psubsb_u64, iemAImpl_psubusb_u64;
1015FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_psubw_u64, iemAImpl_psubsw_u64, iemAImpl_psubusw_u64;
1016FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_psubd_u64;
1017FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_psubq_u64;
1018FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_pmaddwd_u64, iemAImpl_pmaddwd_u64_fallback;
1019FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_pmullw_u64, iemAImpl_pmulhw_u64;
1020FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_pminub_u64, iemAImpl_pmaxub_u64;
1021FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_pminsw_u64, iemAImpl_pmaxsw_u64;
1022FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_pabsb_u64, iemAImpl_pabsb_u64_fallback;
1023FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_pabsw_u64, iemAImpl_pabsw_u64_fallback;
1024FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_pabsd_u64, iemAImpl_pabsd_u64_fallback;
1025FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_psignb_u64, iemAImpl_psignb_u64_fallback;
1026FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_psignw_u64, iemAImpl_psignw_u64_fallback;
1027FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_psignd_u64, iemAImpl_psignd_u64_fallback;
1028FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_phaddw_u64, iemAImpl_phaddw_u64_fallback;
1029FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_phaddd_u64, iemAImpl_phaddd_u64_fallback;
1030FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_phsubw_u64, iemAImpl_phsubw_u64_fallback;
1031FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_phsubd_u64, iemAImpl_phsubd_u64_fallback;
1032FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_phaddsw_u64, iemAImpl_phaddsw_u64_fallback;
1033FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_phsubsw_u64, iemAImpl_phsubsw_u64_fallback;
1034FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_pmaddubsw_u64, iemAImpl_pmaddubsw_u64_fallback;
1035FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_pmulhrsw_u64, iemAImpl_pmulhrsw_u64_fallback;
1036FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_pmuludq_u64;
1037FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_psllw_u64, iemAImpl_psrlw_u64, iemAImpl_psraw_u64;
1038FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_pslld_u64, iemAImpl_psrld_u64, iemAImpl_psrad_u64;
1039FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_psllq_u64, iemAImpl_psrlq_u64;
1040FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_packsswb_u64, iemAImpl_packuswb_u64;
1041FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_packssdw_u64;
1042FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_pmulhuw_u64;
1043FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_pavgb_u64, iemAImpl_pavgw_u64;
1044FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_psadbw_u64;
1045
1046FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pshufb_u128, iemAImpl_pshufb_u128_fallback;
1047FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pand_u128, iemAImpl_pandn_u128, iemAImpl_por_u128, iemAImpl_pxor_u128;
1048FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pcmpeqb_u128, iemAImpl_pcmpeqw_u128, iemAImpl_pcmpeqd_u128;
1049FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pcmpeqq_u128, iemAImpl_pcmpeqq_u128_fallback;
1050FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pcmpgtb_u128, iemAImpl_pcmpgtw_u128, iemAImpl_pcmpgtd_u128;
1051FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pcmpgtq_u128, iemAImpl_pcmpgtq_u128_fallback;
1052FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_paddb_u128, iemAImpl_paddsb_u128, iemAImpl_paddusb_u128;
1053FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_paddw_u128, iemAImpl_paddsw_u128, iemAImpl_paddusw_u128;
1054FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_paddd_u128;
1055FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_paddq_u128;
1056FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_psubb_u128, iemAImpl_psubsb_u128, iemAImpl_psubusb_u128;
1057FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_psubw_u128, iemAImpl_psubsw_u128, iemAImpl_psubusw_u128;
1058FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_psubd_u128;
1059FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_psubq_u128;
1060FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmullw_u128, iemAImpl_pmullw_u128_fallback;
1061FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmulhw_u128;
1062FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmulld_u128, iemAImpl_pmulld_u128_fallback;
1063FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmaddwd_u128, iemAImpl_pmaddwd_u128_fallback;
1064FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pminub_u128;
1065FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pminud_u128, iemAImpl_pminud_u128_fallback;
1066FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pminuw_u128, iemAImpl_pminuw_u128_fallback;
1067FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pminsb_u128, iemAImpl_pminsb_u128_fallback;
1068FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pminsd_u128, iemAImpl_pminsd_u128_fallback;
1069FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pminsw_u128, iemAImpl_pminsw_u128_fallback;
1070FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmaxub_u128;
1071FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmaxud_u128, iemAImpl_pmaxud_u128_fallback;
1072FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmaxuw_u128, iemAImpl_pmaxuw_u128_fallback;
1073FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmaxsb_u128, iemAImpl_pmaxsb_u128_fallback;
1074FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmaxsw_u128;
1075FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmaxsd_u128, iemAImpl_pmaxsd_u128_fallback;
1076FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pabsb_u128, iemAImpl_pabsb_u128_fallback;
1077FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pabsw_u128, iemAImpl_pabsw_u128_fallback;
1078FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pabsd_u128, iemAImpl_pabsd_u128_fallback;
1079FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_psignb_u128, iemAImpl_psignb_u128_fallback;
1080FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_psignw_u128, iemAImpl_psignw_u128_fallback;
1081FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_psignd_u128, iemAImpl_psignd_u128_fallback;
1082FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_phaddw_u128, iemAImpl_phaddw_u128_fallback;
1083FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_phaddd_u128, iemAImpl_phaddd_u128_fallback;
1084FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_phsubw_u128, iemAImpl_phsubw_u128_fallback;
1085FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_phsubd_u128, iemAImpl_phsubd_u128_fallback;
1086FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_phaddsw_u128, iemAImpl_phaddsw_u128_fallback;
1087FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_phsubsw_u128, iemAImpl_phsubsw_u128_fallback;
1088FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmaddubsw_u128, iemAImpl_pmaddubsw_u128_fallback;
1089FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmulhrsw_u128, iemAImpl_pmulhrsw_u128_fallback;
1090FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmuludq_u128;
1091FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmaddwd_u128, iemAImpl_pmaddwd_u128_fallback;
1092FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_packsswb_u128, iemAImpl_packuswb_u128;
1093FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_packssdw_u128, iemAImpl_packusdw_u128;
1094FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_psllw_u128, iemAImpl_psrlw_u128, iemAImpl_psraw_u128;
1095FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pslld_u128, iemAImpl_psrld_u128, iemAImpl_psrad_u128;
1096FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_psllq_u128, iemAImpl_psrlq_u128;
1097FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmulhuw_u128;
1098FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pavgb_u128, iemAImpl_pavgw_u128;
1099FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_psadbw_u128;
1100FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmuldq_u128, iemAImpl_pmuldq_u128_fallback;
1101FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_unpcklps_u128, iemAImpl_unpcklpd_u128;
1102FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_unpckhps_u128, iemAImpl_unpckhpd_u128;
1103FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_phminposuw_u128, iemAImpl_phminposuw_u128_fallback;
1104
1105FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpshufb_u128, iemAImpl_vpshufb_u128_fallback;
1106FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpand_u128, iemAImpl_vpand_u128_fallback;
1107FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpandn_u128, iemAImpl_vpandn_u128_fallback;
1108FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpor_u128, iemAImpl_vpor_u128_fallback;
1109FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpxor_u128, iemAImpl_vpxor_u128_fallback;
1110FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpcmpeqb_u128, iemAImpl_vpcmpeqb_u128_fallback;
1111FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpcmpeqw_u128, iemAImpl_vpcmpeqw_u128_fallback;
1112FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpcmpeqd_u128, iemAImpl_vpcmpeqd_u128_fallback;
1113FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpcmpeqq_u128, iemAImpl_vpcmpeqq_u128_fallback;
1114FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpcmpgtb_u128, iemAImpl_vpcmpgtb_u128_fallback;
1115FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpcmpgtw_u128, iemAImpl_vpcmpgtw_u128_fallback;
1116FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpcmpgtd_u128, iemAImpl_vpcmpgtd_u128_fallback;
1117FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpcmpgtq_u128, iemAImpl_vpcmpgtq_u128_fallback;
1118FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpaddb_u128, iemAImpl_vpaddb_u128_fallback;
1119FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpaddw_u128, iemAImpl_vpaddw_u128_fallback;
1120FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpaddd_u128, iemAImpl_vpaddd_u128_fallback;
1121FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpaddq_u128, iemAImpl_vpaddq_u128_fallback;
1122FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsubb_u128, iemAImpl_vpsubb_u128_fallback;
1123FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsubw_u128, iemAImpl_vpsubw_u128_fallback;
1124FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsubd_u128, iemAImpl_vpsubd_u128_fallback;
1125FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsubq_u128, iemAImpl_vpsubq_u128_fallback;
1126FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpminub_u128, iemAImpl_vpminub_u128_fallback;
1127FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpminuw_u128, iemAImpl_vpminuw_u128_fallback;
1128FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpminud_u128, iemAImpl_vpminud_u128_fallback;
1129FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpminsb_u128, iemAImpl_vpminsb_u128_fallback;
1130FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpminsw_u128, iemAImpl_vpminsw_u128_fallback;
1131FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpminsd_u128, iemAImpl_vpminsd_u128_fallback;
1132FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmaxub_u128, iemAImpl_vpmaxub_u128_fallback;
1133FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmaxuw_u128, iemAImpl_vpmaxuw_u128_fallback;
1134FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmaxud_u128, iemAImpl_vpmaxud_u128_fallback;
1135FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmaxsb_u128, iemAImpl_vpmaxsb_u128_fallback;
1136FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmaxsw_u128, iemAImpl_vpmaxsw_u128_fallback;
1137FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmaxsd_u128, iemAImpl_vpmaxsd_u128_fallback;
1138FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpacksswb_u128, iemAImpl_vpacksswb_u128_fallback;
1139FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpackssdw_u128, iemAImpl_vpackssdw_u128_fallback;
1140FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpackuswb_u128, iemAImpl_vpackuswb_u128_fallback;
1141FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpackusdw_u128, iemAImpl_vpackusdw_u128_fallback;
1142FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmullw_u128, iemAImpl_vpmullw_u128_fallback;
1143FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmulld_u128, iemAImpl_vpmulld_u128_fallback;
1144FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmulhw_u128, iemAImpl_vpmulhw_u128_fallback;
1145FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmulhuw_u128, iemAImpl_vpmulhuw_u128_fallback;
1146FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpavgb_u128, iemAImpl_vpavgb_u128_fallback;
1147FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpavgw_u128, iemAImpl_vpavgw_u128_fallback;
1148FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsignb_u128, iemAImpl_vpsignb_u128_fallback;
1149FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsignw_u128, iemAImpl_vpsignw_u128_fallback;
1150FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsignd_u128, iemAImpl_vpsignd_u128_fallback;
1151FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vphaddw_u128, iemAImpl_vphaddw_u128_fallback;
1152FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vphaddd_u128, iemAImpl_vphaddd_u128_fallback;
1153FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vphsubw_u128, iemAImpl_vphsubw_u128_fallback;
1154FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vphsubd_u128, iemAImpl_vphsubd_u128_fallback;
1155FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vphaddsw_u128, iemAImpl_vphaddsw_u128_fallback;
1156FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vphsubsw_u128, iemAImpl_vphsubsw_u128_fallback;
1157FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmaddubsw_u128, iemAImpl_vpmaddubsw_u128_fallback;
1158FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmulhrsw_u128, iemAImpl_vpmulhrsw_u128_fallback;
1159FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsadbw_u128, iemAImpl_vpsadbw_u128_fallback;
1160FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmuldq_u128, iemAImpl_vpmuldq_u128_fallback;
1161FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmuludq_u128, iemAImpl_vpmuludq_u128_fallback;
1162FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsubsb_u128, iemAImpl_vpsubsb_u128_fallback;
1163FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsubsw_u128, iemAImpl_vpsubsw_u128_fallback;
1164FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsubusb_u128, iemAImpl_vpsubusb_u128_fallback;
1165FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsubusw_u128, iemAImpl_vpsubusw_u128_fallback;
1166FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpaddusb_u128, iemAImpl_vpaddusb_u128_fallback;
1167FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpaddusw_u128, iemAImpl_vpaddusw_u128_fallback;
1168FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpaddsb_u128, iemAImpl_vpaddsb_u128_fallback;
1169FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpaddsw_u128, iemAImpl_vpaddsw_u128_fallback;
1170FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsllw_u128, iemAImpl_vpsllw_u128_fallback;
1171FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpslld_u128, iemAImpl_vpslld_u128_fallback;
1172FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsllq_u128, iemAImpl_vpsllq_u128_fallback;
1173FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsraw_u128, iemAImpl_vpsraw_u128_fallback;
1174FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsrad_u128, iemAImpl_vpsrad_u128_fallback;
1175FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsrlw_u128, iemAImpl_vpsrlw_u128_fallback;
1176FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsrld_u128, iemAImpl_vpsrld_u128_fallback;
1177FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsrlq_u128, iemAImpl_vpsrlq_u128_fallback;
1178FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmaddwd_u128, iemAImpl_vpmaddwd_u128_fallback;
1179
1180FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_vpabsb_u128, iemAImpl_vpabsb_u128_fallback;
1181FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_vpabsw_u128, iemAImpl_vpabsd_u128_fallback;
1182FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_vpabsd_u128, iemAImpl_vpabsw_u128_fallback;
1183FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_vphminposuw_u128, iemAImpl_vphminposuw_u128_fallback;
1184
1185FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpshufb_u256, iemAImpl_vpshufb_u256_fallback;
1186FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpand_u256, iemAImpl_vpand_u256_fallback;
1187FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpandn_u256, iemAImpl_vpandn_u256_fallback;
1188FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpor_u256, iemAImpl_vpor_u256_fallback;
1189FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpxor_u256, iemAImpl_vpxor_u256_fallback;
1190FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpcmpeqb_u256, iemAImpl_vpcmpeqb_u256_fallback;
1191FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpcmpeqw_u256, iemAImpl_vpcmpeqw_u256_fallback;
1192FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpcmpeqd_u256, iemAImpl_vpcmpeqd_u256_fallback;
1193FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpcmpeqq_u256, iemAImpl_vpcmpeqq_u256_fallback;
1194FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpcmpgtb_u256, iemAImpl_vpcmpgtb_u256_fallback;
1195FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpcmpgtw_u256, iemAImpl_vpcmpgtw_u256_fallback;
1196FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpcmpgtd_u256, iemAImpl_vpcmpgtd_u256_fallback;
1197FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpcmpgtq_u256, iemAImpl_vpcmpgtq_u256_fallback;
1198FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpaddb_u256, iemAImpl_vpaddb_u256_fallback;
1199FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpaddw_u256, iemAImpl_vpaddw_u256_fallback;
1200FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpaddd_u256, iemAImpl_vpaddd_u256_fallback;
1201FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpaddq_u256, iemAImpl_vpaddq_u256_fallback;
1202FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsubb_u256, iemAImpl_vpsubb_u256_fallback;
1203FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsubw_u256, iemAImpl_vpsubw_u256_fallback;
1204FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsubd_u256, iemAImpl_vpsubd_u256_fallback;
1205FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsubq_u256, iemAImpl_vpsubq_u256_fallback;
1206FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpminub_u256, iemAImpl_vpminub_u256_fallback;
1207FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpminuw_u256, iemAImpl_vpminuw_u256_fallback;
1208FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpminud_u256, iemAImpl_vpminud_u256_fallback;
1209FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpminsb_u256, iemAImpl_vpminsb_u256_fallback;
1210FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpminsw_u256, iemAImpl_vpminsw_u256_fallback;
1211FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpminsd_u256, iemAImpl_vpminsd_u256_fallback;
1212FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmaxub_u256, iemAImpl_vpmaxub_u256_fallback;
1213FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmaxuw_u256, iemAImpl_vpmaxuw_u256_fallback;
1214FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmaxud_u256, iemAImpl_vpmaxud_u256_fallback;
1215FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmaxsb_u256, iemAImpl_vpmaxsb_u256_fallback;
1216FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmaxsw_u256, iemAImpl_vpmaxsw_u256_fallback;
1217FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmaxsd_u256, iemAImpl_vpmaxsd_u256_fallback;
1218FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpacksswb_u256, iemAImpl_vpacksswb_u256_fallback;
1219FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpackssdw_u256, iemAImpl_vpackssdw_u256_fallback;
1220FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpackuswb_u256, iemAImpl_vpackuswb_u256_fallback;
1221FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpackusdw_u256, iemAImpl_vpackusdw_u256_fallback;
1222FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmullw_u256, iemAImpl_vpmullw_u256_fallback;
1223FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmulld_u256, iemAImpl_vpmulld_u256_fallback;
1224FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmulhw_u256, iemAImpl_vpmulhw_u256_fallback;
1225FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmulhuw_u256, iemAImpl_vpmulhuw_u256_fallback;
1226FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpavgb_u256, iemAImpl_vpavgb_u256_fallback;
1227FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpavgw_u256, iemAImpl_vpavgw_u256_fallback;
1228FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsignb_u256, iemAImpl_vpsignb_u256_fallback;
1229FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsignw_u256, iemAImpl_vpsignw_u256_fallback;
1230FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsignd_u256, iemAImpl_vpsignd_u256_fallback;
1231FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vphaddw_u256, iemAImpl_vphaddw_u256_fallback;
1232FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vphaddd_u256, iemAImpl_vphaddd_u256_fallback;
1233FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vphsubw_u256, iemAImpl_vphsubw_u256_fallback;
1234FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vphsubd_u256, iemAImpl_vphsubd_u256_fallback;
1235FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vphaddsw_u256, iemAImpl_vphaddsw_u256_fallback;
1236FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vphsubsw_u256, iemAImpl_vphsubsw_u256_fallback;
1237FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmaddubsw_u256, iemAImpl_vpmaddubsw_u256_fallback;
1238FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmulhrsw_u256, iemAImpl_vpmulhrsw_u256_fallback;
1239FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsadbw_u256, iemAImpl_vpsadbw_u256_fallback;
1240FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmuldq_u256, iemAImpl_vpmuldq_u256_fallback;
1241FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmuludq_u256, iemAImpl_vpmuludq_u256_fallback;
1242FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsubsb_u256, iemAImpl_vpsubsb_u256_fallback;
1243FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsubsw_u256, iemAImpl_vpsubsw_u256_fallback;
1244FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsubusb_u256, iemAImpl_vpsubusb_u256_fallback;
1245FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsubusw_u256, iemAImpl_vpsubusw_u256_fallback;
1246FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpaddusb_u256, iemAImpl_vpaddusb_u256_fallback;
1247FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpaddusw_u256, iemAImpl_vpaddusw_u256_fallback;
1248FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpaddsb_u256, iemAImpl_vpaddsb_u256_fallback;
1249FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpaddsw_u256, iemAImpl_vpaddsw_u256_fallback;
1250FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsllw_u256, iemAImpl_vpsllw_u256_fallback;
1251FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpslld_u256, iemAImpl_vpslld_u256_fallback;
1252FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsllq_u256, iemAImpl_vpsllq_u256_fallback;
1253FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsraw_u256, iemAImpl_vpsraw_u256_fallback;
1254FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsrad_u256, iemAImpl_vpsrad_u256_fallback;
1255FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsrlw_u256, iemAImpl_vpsrlw_u256_fallback;
1256FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsrld_u256, iemAImpl_vpsrld_u256_fallback;
1257FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsrlq_u256, iemAImpl_vpsrlq_u256_fallback;
1258FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmaddwd_u256, iemAImpl_vpmaddwd_u256_fallback;
1259FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpermps_u256, iemAImpl_vpermps_u256_fallback;
1260FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpermd_u256, iemAImpl_vpermd_u256_fallback;
1261
1262FNIEMAIMPLMEDIAOPTF2U256 iemAImpl_vpabsb_u256, iemAImpl_vpabsb_u256_fallback;
1263FNIEMAIMPLMEDIAOPTF2U256 iemAImpl_vpabsw_u256, iemAImpl_vpabsw_u256_fallback;
1264FNIEMAIMPLMEDIAOPTF2U256 iemAImpl_vpabsd_u256, iemAImpl_vpabsd_u256_fallback;
1265/** @} */
1266
1267/** @name Media (SSE/MMX/AVX) operations: lowhalf1 + lowhalf1 -> full1.
1268 * @{ */
1269FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_punpcklbw_u64, iemAImpl_punpcklwd_u64, iemAImpl_punpckldq_u64;
1270FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_punpcklbw_u128, iemAImpl_punpcklwd_u128, iemAImpl_punpckldq_u128, iemAImpl_punpcklqdq_u128;
1271FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpunpcklbw_u128, iemAImpl_vpunpcklbw_u128_fallback,
1272 iemAImpl_vpunpcklwd_u128, iemAImpl_vpunpcklwd_u128_fallback,
1273 iemAImpl_vpunpckldq_u128, iemAImpl_vpunpckldq_u128_fallback,
1274 iemAImpl_vpunpcklqdq_u128, iemAImpl_vpunpcklqdq_u128_fallback,
1275 iemAImpl_vunpcklps_u128, iemAImpl_vunpcklps_u128_fallback,
1276 iemAImpl_vunpcklpd_u128, iemAImpl_vunpcklpd_u128_fallback,
1277 iemAImpl_vunpckhps_u128, iemAImpl_vunpckhps_u128_fallback,
1278 iemAImpl_vunpckhpd_u128, iemAImpl_vunpckhpd_u128_fallback;
1279
1280FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpunpcklbw_u256, iemAImpl_vpunpcklbw_u256_fallback,
1281 iemAImpl_vpunpcklwd_u256, iemAImpl_vpunpcklwd_u256_fallback,
1282 iemAImpl_vpunpckldq_u256, iemAImpl_vpunpckldq_u256_fallback,
1283 iemAImpl_vpunpcklqdq_u256, iemAImpl_vpunpcklqdq_u256_fallback,
1284 iemAImpl_vunpcklps_u256, iemAImpl_vunpcklps_u256_fallback,
1285 iemAImpl_vunpcklpd_u256, iemAImpl_vunpcklpd_u256_fallback,
1286 iemAImpl_vunpckhps_u256, iemAImpl_vunpckhps_u256_fallback,
1287 iemAImpl_vunpckhpd_u256, iemAImpl_vunpckhpd_u256_fallback;
1288/** @} */
1289
1290/** @name Media (SSE/MMX/AVX) operations: hihalf1 + hihalf2 -> full1.
1291 * @{ */
1292FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_punpckhbw_u64, iemAImpl_punpckhwd_u64, iemAImpl_punpckhdq_u64;
1293FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_punpckhbw_u128, iemAImpl_punpckhwd_u128, iemAImpl_punpckhdq_u128, iemAImpl_punpckhqdq_u128;
1294FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpunpckhbw_u128, iemAImpl_vpunpckhbw_u128_fallback,
1295 iemAImpl_vpunpckhwd_u128, iemAImpl_vpunpckhwd_u128_fallback,
1296 iemAImpl_vpunpckhdq_u128, iemAImpl_vpunpckhdq_u128_fallback,
1297 iemAImpl_vpunpckhqdq_u128, iemAImpl_vpunpckhqdq_u128_fallback;
1298FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpunpckhbw_u256, iemAImpl_vpunpckhbw_u256_fallback,
1299 iemAImpl_vpunpckhwd_u256, iemAImpl_vpunpckhwd_u256_fallback,
1300 iemAImpl_vpunpckhdq_u256, iemAImpl_vpunpckhdq_u256_fallback,
1301 iemAImpl_vpunpckhqdq_u256, iemAImpl_vpunpckhqdq_u256_fallback;
1302/** @} */
1303
1304/** @name Media (SSE/MMX/AVX) operation: Packed Shuffle Stuff (evil)
1305 * @{ */
1306typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAPSHUFU128,(PRTUINT128U puDst, PCRTUINT128U puSrc, uint8_t bEvil));
1307typedef FNIEMAIMPLMEDIAPSHUFU128 *PFNIEMAIMPLMEDIAPSHUFU128;
1308typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAPSHUFU256,(PRTUINT256U puDst, PCRTUINT256U puSrc, uint8_t bEvil));
1309typedef FNIEMAIMPLMEDIAPSHUFU256 *PFNIEMAIMPLMEDIAPSHUFU256;
1310IEM_DECL_IMPL_DEF(void, iemAImpl_pshufw_u64,(uint64_t *puDst, uint64_t const *puSrc, uint8_t bEvil));
1311FNIEMAIMPLMEDIAPSHUFU128 iemAImpl_pshufhw_u128, iemAImpl_pshuflw_u128, iemAImpl_pshufd_u128;
1312#ifndef IEM_WITHOUT_ASSEMBLY
1313FNIEMAIMPLMEDIAPSHUFU256 iemAImpl_vpshufhw_u256, iemAImpl_vpshuflw_u256, iemAImpl_vpshufd_u256;
1314#endif
1315FNIEMAIMPLMEDIAPSHUFU256 iemAImpl_vpshufhw_u256_fallback, iemAImpl_vpshuflw_u256_fallback, iemAImpl_vpshufd_u256_fallback;
1316/** @} */
1317
1318/** @name Media (SSE/MMX/AVX) operation: Shift Immediate Stuff (evil)
1319 * @{ */
1320typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAPSHIFTU64,(uint64_t *puDst, uint8_t bShift));
1321typedef FNIEMAIMPLMEDIAPSHIFTU64 *PFNIEMAIMPLMEDIAPSHIFTU64;
1322typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAPSHIFTU128,(PRTUINT128U puDst, uint8_t bShift));
1323typedef FNIEMAIMPLMEDIAPSHIFTU128 *PFNIEMAIMPLMEDIAPSHIFTU128;
1324typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAPSHIFTU256,(PRTUINT256U puDst, uint8_t bShift));
1325typedef FNIEMAIMPLMEDIAPSHIFTU256 *PFNIEMAIMPLMEDIAPSHIFTU256;
1326FNIEMAIMPLMEDIAPSHIFTU64 iemAImpl_psllw_imm_u64, iemAImpl_pslld_imm_u64, iemAImpl_psllq_imm_u64;
1327FNIEMAIMPLMEDIAPSHIFTU64 iemAImpl_psrlw_imm_u64, iemAImpl_psrld_imm_u64, iemAImpl_psrlq_imm_u64;
1328FNIEMAIMPLMEDIAPSHIFTU64 iemAImpl_psraw_imm_u64, iemAImpl_psrad_imm_u64;
1329FNIEMAIMPLMEDIAPSHIFTU128 iemAImpl_psllw_imm_u128, iemAImpl_pslld_imm_u128, iemAImpl_psllq_imm_u128;
1330FNIEMAIMPLMEDIAPSHIFTU128 iemAImpl_psrlw_imm_u128, iemAImpl_psrld_imm_u128, iemAImpl_psrlq_imm_u128;
1331FNIEMAIMPLMEDIAPSHIFTU128 iemAImpl_psraw_imm_u128, iemAImpl_psrad_imm_u128;
1332FNIEMAIMPLMEDIAPSHIFTU128 iemAImpl_pslldq_imm_u128, iemAImpl_psrldq_imm_u128;
1333/** @} */
1334
1335/** @name Media (SSE/MMX/AVX) operation: Move Byte Mask
1336 * @{ */
1337IEM_DECL_IMPL_DEF(void, iemAImpl_maskmovq_u64,(uint64_t *puMem, uint64_t const *puSrc, uint64_t const *puMsk));
1338IEM_DECL_IMPL_DEF(void, iemAImpl_maskmovdqu_u128,(PRTUINT128U puMem, PCRTUINT128U puSrc, PCRTUINT128U puMsk));
1339IEM_DECL_IMPL_DEF(void, iemAImpl_pmovmskb_u64,(uint64_t *pu64Dst, uint64_t const *puSrc));
1340IEM_DECL_IMPL_DEF(void, iemAImpl_pmovmskb_u128,(uint64_t *pu64Dst, PCRTUINT128U puSrc));
1341#ifndef IEM_WITHOUT_ASSEMBLY
1342IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovmskb_u256,(uint64_t *pu64Dst, PCRTUINT256U puSrc));
1343#endif
1344IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovmskb_u256_fallback,(uint64_t *pu64Dst, PCRTUINT256U puSrc));
1345/** @} */
1346
1347/** @name Media (SSE/MMX/AVX) operations: Variable Blend Packed Bytes/R32/R64.
1348 * @{ */
1349typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLBLENDU128,(PRTUINT128U puDst, PCRTUINT128U puSrc, PCRTUINT128U puMask));
1350typedef FNIEMAIMPLBLENDU128 *PFNIEMAIMPLBLENDU128;
1351typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLAVXBLENDU128,(PRTUINT128U puDst, PCRTUINT128U puSrc1, PCRTUINT128U puSrc2, PCRTUINT128U puMask));
1352typedef FNIEMAIMPLAVXBLENDU128 *PFNIEMAIMPLAVXBLENDU128;
1353typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLAVXBLENDU256,(PRTUINT256U puDst, PCRTUINT256U puSrc1, PCRTUINT256U puSrc2, PCRTUINT256U puMask));
1354typedef FNIEMAIMPLAVXBLENDU256 *PFNIEMAIMPLAVXBLENDU256;
1355
1356FNIEMAIMPLBLENDU128 iemAImpl_pblendvb_u128;
1357FNIEMAIMPLBLENDU128 iemAImpl_pblendvb_u128_fallback;
1358FNIEMAIMPLAVXBLENDU128 iemAImpl_vpblendvb_u128;
1359FNIEMAIMPLAVXBLENDU128 iemAImpl_vpblendvb_u128_fallback;
1360FNIEMAIMPLAVXBLENDU256 iemAImpl_vpblendvb_u256;
1361FNIEMAIMPLAVXBLENDU256 iemAImpl_vpblendvb_u256_fallback;
1362
1363FNIEMAIMPLBLENDU128 iemAImpl_blendvps_u128;
1364FNIEMAIMPLBLENDU128 iemAImpl_blendvps_u128_fallback;
1365FNIEMAIMPLAVXBLENDU128 iemAImpl_vblendvps_u128;
1366FNIEMAIMPLAVXBLENDU128 iemAImpl_vblendvps_u128_fallback;
1367FNIEMAIMPLAVXBLENDU256 iemAImpl_vblendvps_u256;
1368FNIEMAIMPLAVXBLENDU256 iemAImpl_vblendvps_u256_fallback;
1369
1370FNIEMAIMPLBLENDU128 iemAImpl_blendvpd_u128;
1371FNIEMAIMPLBLENDU128 iemAImpl_blendvpd_u128_fallback;
1372FNIEMAIMPLAVXBLENDU128 iemAImpl_vblendvpd_u128;
1373FNIEMAIMPLAVXBLENDU128 iemAImpl_vblendvpd_u128_fallback;
1374FNIEMAIMPLAVXBLENDU256 iemAImpl_vblendvpd_u256;
1375FNIEMAIMPLAVXBLENDU256 iemAImpl_vblendvpd_u256_fallback;
1376/** @} */
1377
1378
1379/** @name Media (SSE/MMX/AVX) operation: Sort this later
1380 * @{ */
1381IEM_DECL_IMPL_DEF(void, iemAImpl_pmovsxbw_u128,(PRTUINT128U puDst, uint64_t uSrc));
1382IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxbw_u128,(PRTUINT128U puDst, uint64_t uSrc));
1383IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxbw_u128_fallback,(PRTUINT128U puDst, uint64_t uSrc));
1384IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxbw_u256,(PRTUINT256U puDst, PCRTUINT128U puSrc));
1385IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxbw_u256_fallback,(PRTUINT256U puDst, PCRTUINT128U puSrc));
1386
1387IEM_DECL_IMPL_DEF(void, iemAImpl_pmovsxbd_u128,(PRTUINT128U puDst, uint32_t uSrc));
1388IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxbd_u128,(PRTUINT128U puDst, uint32_t uSrc));
1389IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxbd_u128_fallback,(PRTUINT128U puDst, uint32_t uSrc));
1390IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxbd_u256,(PRTUINT256U puDst, PCRTUINT128U puSrc));
1391IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxbd_u256_fallback,(PRTUINT256U puDst, PCRTUINT128U puSrc));
1392
1393IEM_DECL_IMPL_DEF(void, iemAImpl_pmovsxbq_u128,(PRTUINT128U puDst, uint16_t uSrc));
1394IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxbq_u128,(PRTUINT128U puDst, uint16_t uSrc));
1395IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxbq_u128_fallback,(PRTUINT128U puDst, uint16_t uSrc));
1396IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxbq_u256,(PRTUINT256U puDst, PCRTUINT128U puSrc));
1397IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxbq_u256_fallback,(PRTUINT256U puDst, PCRTUINT128U puSrc));
1398
1399IEM_DECL_IMPL_DEF(void, iemAImpl_pmovsxwd_u128,(PRTUINT128U puDst, uint64_t uSrc));
1400IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxwd_u128,(PRTUINT128U puDst, uint64_t uSrc));
1401IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxwd_u128_fallback,(PRTUINT128U puDst, uint64_t uSrc));
1402IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxwd_u256,(PRTUINT256U puDst, PCRTUINT128U puSrc));
1403IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxwd_u256_fallback,(PRTUINT256U puDst, PCRTUINT128U puSrc));
1404
1405IEM_DECL_IMPL_DEF(void, iemAImpl_pmovsxwq_u128,(PRTUINT128U puDst, uint32_t uSrc));
1406IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxwq_u128,(PRTUINT128U puDst, uint32_t uSrc));
1407IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxwq_u128_fallback,(PRTUINT128U puDst, uint32_t uSrc));
1408IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxwq_u256,(PRTUINT256U puDst, PCRTUINT128U puSrc));
1409IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxwq_u256_fallback,(PRTUINT256U puDst, PCRTUINT128U puSrc));
1410
1411IEM_DECL_IMPL_DEF(void, iemAImpl_pmovsxdq_u128,(PRTUINT128U puDst, uint64_t uSrc));
1412IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxdq_u128,(PRTUINT128U puDst, uint64_t uSrc));
1413IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxdq_u128_fallback,(PRTUINT128U puDst, uint64_t uSrc));
1414IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxdq_u256,(PRTUINT256U puDst, PCRTUINT128U puSrc));
1415IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxdq_u256_fallback,(PRTUINT256U puDst, PCRTUINT128U puSrc));
1416
1417IEM_DECL_IMPL_DEF(void, iemAImpl_pmovzxbw_u128,(PRTUINT128U puDst, uint64_t uSrc));
1418IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxbw_u128,(PRTUINT128U puDst, uint64_t uSrc));
1419IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxbw_u128_fallback,(PRTUINT128U puDst, uint64_t uSrc));
1420IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxbw_u256,(PRTUINT256U puDst, PCRTUINT128U puSrc));
1421IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxbw_u256_fallback,(PRTUINT256U puDst, PCRTUINT128U puSrc));
1422
1423IEM_DECL_IMPL_DEF(void, iemAImpl_pmovzxbd_u128,(PRTUINT128U puDst, uint32_t uSrc));
1424IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxbd_u128,(PRTUINT128U puDst, uint32_t uSrc));
1425IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxbd_u128_fallback,(PRTUINT128U puDst, uint32_t uSrc));
1426IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxbd_u256,(PRTUINT256U puDst, PCRTUINT128U puSrc));
1427IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxbd_u256_fallback,(PRTUINT256U puDst, PCRTUINT128U puSrc));
1428
1429IEM_DECL_IMPL_DEF(void, iemAImpl_pmovzxbq_u128,(PRTUINT128U puDst, uint16_t uSrc));
1430IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxbq_u128,(PRTUINT128U puDst, uint16_t uSrc));
1431IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxbq_u128_fallback,(PRTUINT128U puDst, uint16_t uSrc));
1432IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxbq_u256,(PRTUINT256U puDst, PCRTUINT128U puSrc));
1433IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxbq_u256_fallback,(PRTUINT256U puDst, PCRTUINT128U puSrc));
1434
1435IEM_DECL_IMPL_DEF(void, iemAImpl_pmovzxwd_u128,(PRTUINT128U puDst, uint64_t uSrc));
1436IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxwd_u128,(PRTUINT128U puDst, uint64_t uSrc));
1437IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxwd_u128_fallback,(PRTUINT128U puDst, uint64_t uSrc));
1438IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxwd_u256,(PRTUINT256U puDst, PCRTUINT128U puSrc));
1439IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxwd_u256_fallback,(PRTUINT256U puDst, PCRTUINT128U puSrc));
1440
1441IEM_DECL_IMPL_DEF(void, iemAImpl_pmovzxwq_u128,(PRTUINT128U puDst, uint32_t uSrc));
1442IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxwq_u128,(PRTUINT128U puDst, uint32_t uSrc));
1443IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxwq_u128_fallback,(PRTUINT128U puDst, uint32_t uSrc));
1444IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxwq_u256,(PRTUINT256U puDst, PCRTUINT128U puSrc));
1445IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxwq_u256_fallback,(PRTUINT256U puDst, PCRTUINT128U puSrc));
1446
1447IEM_DECL_IMPL_DEF(void, iemAImpl_pmovzxdq_u128,(PRTUINT128U puDst, uint64_t uSrc));
1448IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxdq_u128,(PRTUINT128U puDst, uint64_t uSrc));
1449IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxdq_u128_fallback,(PRTUINT128U puDst, uint64_t uSrc));
1450IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxdq_u256,(PRTUINT256U puDst, PCRTUINT128U puSrc));
1451IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxdq_u256_fallback,(PRTUINT256U puDst, PCRTUINT128U puSrc));
1452
1453IEM_DECL_IMPL_DEF(void, iemAImpl_shufpd_u128,(PRTUINT128U puDst, PCRTUINT128U puSrc, uint8_t bEvil));
1454IEM_DECL_IMPL_DEF(void, iemAImpl_vshufpd_u128,(PRTUINT128U puDst, PCRTUINT128U puSrc1, PCRTUINT128U puSrc2, uint8_t bEvil));
1455IEM_DECL_IMPL_DEF(void, iemAImpl_vshufpd_u128_fallback,(PRTUINT128U puDst, PCRTUINT128U puSrc1, PCRTUINT128U puSrc2, uint8_t bEvil));
1456IEM_DECL_IMPL_DEF(void, iemAImpl_vshufpd_u256,(PRTUINT256U puDst, PCRTUINT256U puSrc1, PCRTUINT256U puSrc2, uint8_t bEvil));
1457IEM_DECL_IMPL_DEF(void, iemAImpl_vshufpd_u256_fallback,(PRTUINT256U puDst, PCRTUINT256U puSrc1, PCRTUINT256U puSrc2, uint8_t bEvil));
1458
1459IEM_DECL_IMPL_DEF(void, iemAImpl_shufps_u128,(PRTUINT128U puDst, PCRTUINT128U puSrc, uint8_t bEvil));
1460IEM_DECL_IMPL_DEF(void, iemAImpl_vshufps_u128,(PRTUINT128U puDst, PCRTUINT128U puSrc1, PCRTUINT128U puSrc2, uint8_t bEvil));
1461IEM_DECL_IMPL_DEF(void, iemAImpl_vshufps_u128_fallback,(PRTUINT128U puDst, PCRTUINT128U puSrc1, PCRTUINT128U puSrc2, uint8_t bEvil));
1462IEM_DECL_IMPL_DEF(void, iemAImpl_vshufps_u256,(PRTUINT256U puDst, PCRTUINT256U puSrc1, PCRTUINT256U puSrc2, uint8_t bEvil));
1463IEM_DECL_IMPL_DEF(void, iemAImpl_vshufps_u256_fallback,(PRTUINT256U puDst, PCRTUINT256U puSrc1, PCRTUINT256U puSrc2, uint8_t bEvil));
1464
1465IEM_DECL_IMPL_DEF(void, iemAImpl_palignr_u64,(uint64_t *pu64Dst, uint64_t u64Src, uint8_t bEvil));
1466IEM_DECL_IMPL_DEF(void, iemAImpl_palignr_u64_fallback,(uint64_t *pu64Dst, uint64_t u64Src, uint8_t bEvil));
1467
1468IEM_DECL_IMPL_DEF(void, iemAImpl_movmskps_u128,(uint8_t *pu8Dst, PCRTUINT128U puSrc));
1469IEM_DECL_IMPL_DEF(void, iemAImpl_vmovmskps_u128,(uint8_t *pu8Dst, PCRTUINT128U puSrc));
1470IEM_DECL_IMPL_DEF(void, iemAImpl_vmovmskps_u128_fallback,(uint8_t *pu8Dst, PCRTUINT128U puSrc));
1471IEM_DECL_IMPL_DEF(void, iemAImpl_vmovmskps_u256,(uint8_t *pu8Dst, PCRTUINT256U puSrc));
1472IEM_DECL_IMPL_DEF(void, iemAImpl_vmovmskps_u256_fallback,(uint8_t *pu8Dst, PCRTUINT256U puSrc));
1473
1474IEM_DECL_IMPL_DEF(void, iemAImpl_movmskpd_u128,(uint8_t *pu8Dst, PCRTUINT128U puSrc));
1475IEM_DECL_IMPL_DEF(void, iemAImpl_vmovmskpd_u128,(uint8_t *pu8Dst, PCRTUINT128U puSrc));
1476IEM_DECL_IMPL_DEF(void, iemAImpl_vmovmskpd_u128_fallback,(uint8_t *pu8Dst, PCRTUINT128U puSrc));
1477IEM_DECL_IMPL_DEF(void, iemAImpl_vmovmskpd_u256,(uint8_t *pu8Dst, PCRTUINT256U puSrc));
1478IEM_DECL_IMPL_DEF(void, iemAImpl_vmovmskpd_u256_fallback,(uint8_t *pu8Dst, PCRTUINT256U puSrc));
1479
1480
1481typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAOPTF2U128IMM8,(PRTUINT128U puDst, PCRTUINT128U puSrc, uint8_t bEvil));
1482typedef FNIEMAIMPLMEDIAOPTF2U128IMM8 *PFNIEMAIMPLMEDIAOPTF2U128IMM8;
1483typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAOPTF2U256IMM8,(PRTUINT256U puDst, PCRTUINT256U puSrc, uint8_t bEvil));
1484typedef FNIEMAIMPLMEDIAOPTF2U256IMM8 *PFNIEMAIMPLMEDIAOPTF2U256IMM8;
1485typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAOPTF3U128IMM8,(PRTUINT128U puDst, PCRTUINT128U puSrc1, PCRTUINT128U puSrc2, uint8_t bEvil));
1486typedef FNIEMAIMPLMEDIAOPTF3U128IMM8 *PFNIEMAIMPLMEDIAOPTF3U128IMM8;
1487typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAOPTF3U256IMM8,(PRTUINT256U puDst, PCRTUINT256U puSrc1, PCRTUINT256U puSrc2, uint8_t bEvil));
1488typedef FNIEMAIMPLMEDIAOPTF3U256IMM8 *PFNIEMAIMPLMEDIAOPTF3U256IMM8;
1489
1490FNIEMAIMPLMEDIAOPTF2U128IMM8 iemAImpl_palignr_u128, iemAImpl_palignr_u128_fallback;
1491FNIEMAIMPLMEDIAOPTF2U128IMM8 iemAImpl_pblendw_u128, iemAImpl_pblendw_u128_fallback;
1492FNIEMAIMPLMEDIAOPTF2U128IMM8 iemAImpl_blendps_u128, iemAImpl_blendps_u128_fallback;
1493FNIEMAIMPLMEDIAOPTF2U128IMM8 iemAImpl_blendpd_u128, iemAImpl_blendpd_u128_fallback;
1494
1495FNIEMAIMPLMEDIAOPTF3U128IMM8 iemAImpl_vpalignr_u128, iemAImpl_vpalignr_u128_fallback;
1496FNIEMAIMPLMEDIAOPTF3U128IMM8 iemAImpl_vpblendw_u128, iemAImpl_vpblendw_u128_fallback;
1497FNIEMAIMPLMEDIAOPTF3U128IMM8 iemAImpl_vpblendd_u128, iemAImpl_vpblendd_u128_fallback;
1498FNIEMAIMPLMEDIAOPTF3U128IMM8 iemAImpl_vblendps_u128, iemAImpl_vblendps_u128_fallback;
1499FNIEMAIMPLMEDIAOPTF3U128IMM8 iemAImpl_vblendpd_u128, iemAImpl_vblendpd_u128_fallback;
1500
1501FNIEMAIMPLMEDIAOPTF3U256IMM8 iemAImpl_vpalignr_u256, iemAImpl_vpalignr_u256_fallback;
1502FNIEMAIMPLMEDIAOPTF3U256IMM8 iemAImpl_vpblendw_u256, iemAImpl_vpblendw_u256_fallback;
1503FNIEMAIMPLMEDIAOPTF3U256IMM8 iemAImpl_vpblendd_u256, iemAImpl_vpblendd_u256_fallback;
1504FNIEMAIMPLMEDIAOPTF3U256IMM8 iemAImpl_vblendps_u256, iemAImpl_vblendps_u256_fallback;
1505FNIEMAIMPLMEDIAOPTF3U256IMM8 iemAImpl_vblendpd_u256, iemAImpl_vblendpd_u256_fallback;
1506FNIEMAIMPLMEDIAOPTF3U256IMM8 iemAImpl_vperm2i128_u256, iemAImpl_vperm2i128_u256_fallback;
1507FNIEMAIMPLMEDIAOPTF3U256IMM8 iemAImpl_vperm2f128_u256, iemAImpl_vperm2f128_u256_fallback;
1508
1509FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_aesimc_u128, iemAImpl_aesimc_u128_fallback;
1510FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_aesenc_u128, iemAImpl_aesenc_u128_fallback;
1511FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_aesenclast_u128, iemAImpl_aesenclast_u128_fallback;
1512FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_aesdec_u128, iemAImpl_aesdec_u128_fallback;
1513FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_aesdeclast_u128, iemAImpl_aesdeclast_u128_fallback;
1514
1515FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_vaesimc_u128, iemAImpl_vaesimc_u128_fallback;
1516FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vaesenc_u128, iemAImpl_vaesenc_u128_fallback;
1517FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vaesenclast_u128, iemAImpl_vaesenclast_u128_fallback;
1518FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vaesdec_u128, iemAImpl_vaesdec_u128_fallback;
1519FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vaesdeclast_u128, iemAImpl_vaesdeclast_u128_fallback;
1520
1521FNIEMAIMPLMEDIAOPTF2U128IMM8 iemAImpl_aeskeygenassist_u128, iemAImpl_aeskeygenassist_u128_fallback;
1522
1523FNIEMAIMPLMEDIAOPTF2U128IMM8 iemAImpl_vaeskeygenassist_u128, iemAImpl_vaeskeygenassist_u128_fallback;
1524
1525FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_sha1nexte_u128, iemAImpl_sha1nexte_u128_fallback;
1526FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_sha1msg1_u128, iemAImpl_sha1msg1_u128_fallback;
1527FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_sha1msg2_u128, iemAImpl_sha1msg2_u128_fallback;
1528FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_sha256msg1_u128, iemAImpl_sha256msg1_u128_fallback;
1529FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_sha256msg2_u128, iemAImpl_sha256msg2_u128_fallback;
1530FNIEMAIMPLMEDIAOPTF2U128IMM8 iemAImpl_sha1rnds4_u128, iemAImpl_sha1rnds4_u128_fallback;
1531IEM_DECL_IMPL_DEF(void, iemAImpl_sha256rnds2_u128,(PRTUINT128U puDst, PCRTUINT128U puSrc, PCRTUINT128U puXmm0Constants));
1532IEM_DECL_IMPL_DEF(void, iemAImpl_sha256rnds2_u128_fallback,(PRTUINT128U puDst, PCRTUINT128U puSrc, PCRTUINT128U puXmm0Constants));
1533
1534FNIEMAIMPLMEDIAOPTF2U256IMM8 iemAImpl_vpermq_u256, iemAImpl_vpermq_u256_fallback;
1535FNIEMAIMPLMEDIAOPTF2U256IMM8 iemAImpl_vpermpd_u256, iemAImpl_vpermpd_u256_fallback;
1536
1537typedef struct IEMPCMPISTRXSRC
1538{
1539 RTUINT128U uSrc1;
1540 RTUINT128U uSrc2;
1541} IEMPCMPISTRXSRC;
1542typedef IEMPCMPISTRXSRC *PIEMPCMPISTRXSRC;
1543typedef const IEMPCMPISTRXSRC *PCIEMPCMPISTRXSRC;
1544
1545typedef struct IEMPCMPESTRXSRC
1546{
1547 RTUINT128U uSrc1;
1548 RTUINT128U uSrc2;
1549 uint64_t u64Rax;
1550 uint64_t u64Rdx;
1551} IEMPCMPESTRXSRC;
1552typedef IEMPCMPESTRXSRC *PIEMPCMPESTRXSRC;
1553typedef const IEMPCMPESTRXSRC *PCIEMPCMPESTRXSRC;
1554
1555typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLPCMPISTRIU128IMM8,(uint32_t *pEFlags, PCRTUINT128U pSrc1, PCRTUINT128U pSrc2, uint8_t bEvil));
1556typedef FNIEMAIMPLPCMPISTRIU128IMM8 *PFNIEMAIMPLPCMPISTRIU128IMM8;
1557typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLPCMPESTRIU128IMM8,(uint32_t *pu32Ecx, uint32_t *pEFlags, PCIEMPCMPESTRXSRC pSrc, uint8_t bEvil));
1558typedef FNIEMAIMPLPCMPESTRIU128IMM8 *PFNIEMAIMPLPCMPESTRIU128IMM8;
1559
1560typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLPCMPISTRMU128IMM8,(PRTUINT128U puDst, uint32_t *pEFlags, PCIEMPCMPISTRXSRC pSrc, uint8_t bEvil));
1561typedef FNIEMAIMPLPCMPISTRMU128IMM8 *PFNIEMAIMPLPCMPISTRMU128IMM8;
1562typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLPCMPESTRMU128IMM8,(PRTUINT128U puDst, uint32_t *pEFlags, PCIEMPCMPESTRXSRC pSrc, uint8_t bEvil));
1563typedef FNIEMAIMPLPCMPESTRMU128IMM8 *PFNIEMAIMPLPCMPESTRMU128IMM8;
1564
1565FNIEMAIMPLPCMPISTRIU128IMM8 iemAImpl_pcmpistri_u128, iemAImpl_pcmpistri_u128_fallback;
1566FNIEMAIMPLPCMPESTRIU128IMM8 iemAImpl_pcmpestri_u128, iemAImpl_pcmpestri_u128_fallback;
1567FNIEMAIMPLPCMPISTRMU128IMM8 iemAImpl_pcmpistrm_u128, iemAImpl_pcmpistrm_u128_fallback;
1568FNIEMAIMPLPCMPESTRMU128IMM8 iemAImpl_pcmpestrm_u128, iemAImpl_pcmpestrm_u128_fallback;
1569FNIEMAIMPLPCMPISTRIU128IMM8 iemAImpl_vpcmpistri_u128, iemAImpl_vpcmpistri_u128_fallback;
1570FNIEMAIMPLPCMPESTRIU128IMM8 iemAImpl_vpcmpestri_u128, iemAImpl_vpcmpestri_u128_fallback;
1571FNIEMAIMPLPCMPISTRMU128IMM8 iemAImpl_vpcmpistrm_u128, iemAImpl_vpcmpistrm_u128_fallback;
1572FNIEMAIMPLPCMPESTRMU128IMM8 iemAImpl_vpcmpestrm_u128, iemAImpl_vpcmpestrm_u128_fallback;
1573
1574
1575FNIEMAIMPLMEDIAOPTF2U128IMM8 iemAImpl_pclmulqdq_u128, iemAImpl_pclmulqdq_u128_fallback;
1576FNIEMAIMPLMEDIAOPTF3U128IMM8 iemAImpl_vpclmulqdq_u128, iemAImpl_vpclmulqdq_u128_fallback;
1577
1578FNIEMAIMPLMEDIAOPTF2U128IMM8 iemAImpl_mpsadbw_u128, iemAImpl_mpsadbw_u128_fallback;
1579FNIEMAIMPLMEDIAOPTF3U128IMM8 iemAImpl_vmpsadbw_u128, iemAImpl_vmpsadbw_u128_fallback;
1580FNIEMAIMPLMEDIAOPTF3U256IMM8 iemAImpl_vmpsadbw_u256, iemAImpl_vmpsadbw_u256_fallback;
1581
1582FNIEMAIMPLMEDIAPSHUFU128 iemAImpl_vpsllw_imm_u128, iemAImpl_vpsllw_imm_u128_fallback;
1583FNIEMAIMPLMEDIAPSHUFU256 iemAImpl_vpsllw_imm_u256, iemAImpl_vpsllw_imm_u256_fallback;
1584FNIEMAIMPLMEDIAPSHUFU128 iemAImpl_vpslld_imm_u128, iemAImpl_vpslld_imm_u128_fallback;
1585FNIEMAIMPLMEDIAPSHUFU256 iemAImpl_vpslld_imm_u256, iemAImpl_vpslld_imm_u256_fallback;
1586FNIEMAIMPLMEDIAPSHUFU128 iemAImpl_vpsllq_imm_u128, iemAImpl_vpsllq_imm_u128_fallback;
1587FNIEMAIMPLMEDIAPSHUFU256 iemAImpl_vpsllq_imm_u256, iemAImpl_vpsllq_imm_u256_fallback;
1588IEM_DECL_IMPL_DEF(void, iemAImpl_vpslldq_imm_u128,(PRTUINT128U puDst, PCRTUINT128U puSrc, uint8_t uShift));
1589IEM_DECL_IMPL_DEF(void, iemAImpl_vpslldq_imm_u128_fallback,(PRTUINT128U puDst, PCRTUINT128U puSrc, uint8_t uShift));
1590IEM_DECL_IMPL_DEF(void, iemAImpl_vpslldq_imm_u256,(PRTUINT256U puDst, PCRTUINT256U puSrc, uint8_t uShift));
1591IEM_DECL_IMPL_DEF(void, iemAImpl_vpslldq_imm_u256_fallback,(PRTUINT256U puDst, PCRTUINT256U puSrc, uint8_t uShift));
1592
1593FNIEMAIMPLMEDIAPSHUFU128 iemAImpl_vpsraw_imm_u128, iemAImpl_vpsraw_imm_u128_fallback;
1594FNIEMAIMPLMEDIAPSHUFU256 iemAImpl_vpsraw_imm_u256, iemAImpl_vpsraw_imm_u256_fallback;
1595FNIEMAIMPLMEDIAPSHUFU128 iemAImpl_vpsrad_imm_u128, iemAImpl_vpsrad_imm_u128_fallback;
1596FNIEMAIMPLMEDIAPSHUFU256 iemAImpl_vpsrad_imm_u256, iemAImpl_vpsrad_imm_u256_fallback;
1597
1598FNIEMAIMPLMEDIAPSHUFU128 iemAImpl_vpsrlw_imm_u128, iemAImpl_vpsrlw_imm_u128_fallback;
1599FNIEMAIMPLMEDIAPSHUFU256 iemAImpl_vpsrlw_imm_u256, iemAImpl_vpsrlw_imm_u256_fallback;
1600FNIEMAIMPLMEDIAPSHUFU128 iemAImpl_vpsrld_imm_u128, iemAImpl_vpsrld_imm_u128_fallback;
1601FNIEMAIMPLMEDIAPSHUFU256 iemAImpl_vpsrld_imm_u256, iemAImpl_vpsrld_imm_u256_fallback;
1602FNIEMAIMPLMEDIAPSHUFU128 iemAImpl_vpsrlq_imm_u128, iemAImpl_vpsrlq_imm_u128_fallback;
1603FNIEMAIMPLMEDIAPSHUFU256 iemAImpl_vpsrlq_imm_u256, iemAImpl_vpsrlq_imm_u256_fallback;
1604IEM_DECL_IMPL_DEF(void, iemAImpl_vpsrldq_imm_u128,(PRTUINT128U puDst, PCRTUINT128U puSrc, uint8_t uShift));
1605IEM_DECL_IMPL_DEF(void, iemAImpl_vpsrldq_imm_u128_fallback,(PRTUINT128U puDst, PCRTUINT128U puSrc, uint8_t uShift));
1606IEM_DECL_IMPL_DEF(void, iemAImpl_vpsrldq_imm_u256,(PRTUINT256U puDst, PCRTUINT256U puSrc, uint8_t uShift));
1607IEM_DECL_IMPL_DEF(void, iemAImpl_vpsrldq_imm_u256_fallback,(PRTUINT256U puDst, PCRTUINT256U puSrc, uint8_t uShift));
1608
1609FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpermilps_u128, iemAImpl_vpermilps_u128_fallback;
1610FNIEMAIMPLMEDIAOPTF2U128IMM8 iemAImpl_vpermilps_imm_u128, iemAImpl_vpermilps_imm_u128_fallback;
1611FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpermilps_u256, iemAImpl_vpermilps_u256_fallback;
1612FNIEMAIMPLMEDIAOPTF2U256IMM8 iemAImpl_vpermilps_imm_u256, iemAImpl_vpermilps_imm_u256_fallback;
1613
1614FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpermilpd_u128, iemAImpl_vpermilpd_u128_fallback;
1615FNIEMAIMPLMEDIAOPTF2U128IMM8 iemAImpl_vpermilpd_imm_u128, iemAImpl_vpermilpd_imm_u128_fallback;
1616FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpermilpd_u256, iemAImpl_vpermilpd_u256_fallback;
1617FNIEMAIMPLMEDIAOPTF2U256IMM8 iemAImpl_vpermilpd_imm_u256, iemAImpl_vpermilpd_imm_u256_fallback;
1618
1619FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsllvd_u128, iemAImpl_vpsllvd_u128_fallback;
1620FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsllvd_u256, iemAImpl_vpsllvd_u256_fallback;
1621FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsllvq_u128, iemAImpl_vpsllvq_u128_fallback;
1622FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsllvq_u256, iemAImpl_vpsllvq_u256_fallback;
1623FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsravd_u128, iemAImpl_vpsravd_u128_fallback;
1624FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsravd_u256, iemAImpl_vpsravd_u256_fallback;
1625FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsrlvd_u128, iemAImpl_vpsrlvd_u128_fallback;
1626FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsrlvd_u256, iemAImpl_vpsrlvd_u256_fallback;
1627FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsrlvq_u128, iemAImpl_vpsrlvq_u128_fallback;
1628FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsrlvq_u256, iemAImpl_vpsrlvq_u256_fallback;
1629/** @} */
1630
1631/** @name Media Odds and Ends
1632 * @{ */
1633typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLCR32U8,(uint32_t *puDst, uint8_t uSrc));
1634typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLCR32U16,(uint32_t *puDst, uint16_t uSrc));
1635typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLCR32U32,(uint32_t *puDst, uint32_t uSrc));
1636typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLCR32U64,(uint32_t *puDst, uint64_t uSrc));
1637FNIEMAIMPLCR32U8 iemAImpl_crc32_u8, iemAImpl_crc32_u8_fallback;
1638FNIEMAIMPLCR32U16 iemAImpl_crc32_u16, iemAImpl_crc32_u16_fallback;
1639FNIEMAIMPLCR32U32 iemAImpl_crc32_u32, iemAImpl_crc32_u32_fallback;
1640FNIEMAIMPLCR32U64 iemAImpl_crc32_u64, iemAImpl_crc32_u64_fallback;
1641
1642typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLF2EFL128,(PCRTUINT128U puSrc1, PCRTUINT128U puSrc2, uint32_t *pEFlags));
1643typedef FNIEMAIMPLF2EFL128 *PFNIEMAIMPLF2EFL128;
1644typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLF2EFL256,(PCRTUINT256U puSrc1, PCRTUINT256U puSrc2, uint32_t *pEFlags));
1645typedef FNIEMAIMPLF2EFL256 *PFNIEMAIMPLF2EFL256;
1646FNIEMAIMPLF2EFL128 iemAImpl_ptest_u128;
1647FNIEMAIMPLF2EFL256 iemAImpl_vptest_u256, iemAImpl_vptest_u256_fallback;
1648FNIEMAIMPLF2EFL128 iemAImpl_vtestps_u128, iemAImpl_vtestps_u128_fallback;
1649FNIEMAIMPLF2EFL256 iemAImpl_vtestps_u256, iemAImpl_vtestps_u256_fallback;
1650FNIEMAIMPLF2EFL128 iemAImpl_vtestpd_u128, iemAImpl_vtestpd_u128_fallback;
1651FNIEMAIMPLF2EFL256 iemAImpl_vtestpd_u256, iemAImpl_vtestpd_u256_fallback;
1652
1653typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSSEF2I32U64,(uint32_t uMxCsrIn, int32_t *pi32Dst, const uint64_t *pu64Src)); /* pu64Src is a double precision floating point. */
1654typedef FNIEMAIMPLSSEF2I32U64 *PFNIEMAIMPLSSEF2I32U64;
1655typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSSEF2I64U64,(uint32_t uMxCsrIn, int64_t *pi64Dst, const uint64_t *pu64Src)); /* pu64Src is a double precision floating point. */
1656typedef FNIEMAIMPLSSEF2I64U64 *PFNIEMAIMPLSSEF2I64U64;
1657typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSSEF2I32U32,(uint32_t uMxCsrIn, int32_t *pi32Dst, const uint32_t *pu32Src)); /* pu32Src is a single precision floating point. */
1658typedef FNIEMAIMPLSSEF2I32U32 *PFNIEMAIMPLSSEF2I32U32;
1659typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSSEF2I64U32,(uint32_t uMxCsrIn, int64_t *pi64Dst, const uint32_t *pu32Src)); /* pu32Src is a single precision floating point. */
1660typedef FNIEMAIMPLSSEF2I64U32 *PFNIEMAIMPLSSEF2I64U32;
1661typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSSEF2I32R32,(uint32_t uMxCsrIn, int32_t *pi32Dst, PCRTFLOAT32U pr32Src));
1662typedef FNIEMAIMPLSSEF2I32R32 *PFNIEMAIMPLSSEF2I32R32;
1663typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSSEF2I64R32,(uint32_t uMxCsrIn, int64_t *pi64Dst, PCRTFLOAT32U pr32Src));
1664typedef FNIEMAIMPLSSEF2I64R32 *PFNIEMAIMPLSSEF2I64R32;
1665typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSSEF2I32R64,(uint32_t uMxCsrIn, int32_t *pi32Dst, PCRTFLOAT64U pr64Src));
1666typedef FNIEMAIMPLSSEF2I32R64 *PFNIEMAIMPLSSEF2I32R64;
1667typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSSEF2I64R64,(uint32_t uMxCsrIn, int64_t *pi64Dst, PCRTFLOAT64U pr64Src));
1668typedef FNIEMAIMPLSSEF2I64R64 *PFNIEMAIMPLSSEF2I64R64;
1669
1670FNIEMAIMPLSSEF2I32U64 iemAImpl_cvttsd2si_i32_r64;
1671FNIEMAIMPLSSEF2I32U64 iemAImpl_cvtsd2si_i32_r64;
1672
1673FNIEMAIMPLSSEF2I64U64 iemAImpl_cvttsd2si_i64_r64;
1674FNIEMAIMPLSSEF2I64U64 iemAImpl_cvtsd2si_i64_r64;
1675
1676FNIEMAIMPLSSEF2I32U32 iemAImpl_cvttss2si_i32_r32;
1677FNIEMAIMPLSSEF2I32U32 iemAImpl_cvtss2si_i32_r32;
1678
1679FNIEMAIMPLSSEF2I64U32 iemAImpl_cvttss2si_i64_r32;
1680FNIEMAIMPLSSEF2I64U32 iemAImpl_cvtss2si_i64_r32;
1681
1682FNIEMAIMPLSSEF2I32R32 iemAImpl_vcvttss2si_i32_r32, iemAImpl_vcvttss2si_i32_r32_fallback;
1683FNIEMAIMPLSSEF2I64R32 iemAImpl_vcvttss2si_i64_r32, iemAImpl_vcvttss2si_i64_r32_fallback;
1684FNIEMAIMPLSSEF2I32R32 iemAImpl_vcvtss2si_i32_r32, iemAImpl_vcvtss2si_i32_r32_fallback;
1685FNIEMAIMPLSSEF2I64R32 iemAImpl_vcvtss2si_i64_r32, iemAImpl_vcvtss2si_i64_r32_fallback;
1686
1687FNIEMAIMPLSSEF2I32R64 iemAImpl_vcvttss2si_i32_r64, iemAImpl_vcvttss2si_i32_r64_fallback;
1688FNIEMAIMPLSSEF2I64R64 iemAImpl_vcvttss2si_i64_r64, iemAImpl_vcvttss2si_i64_r64_fallback;
1689FNIEMAIMPLSSEF2I32R64 iemAImpl_vcvtss2si_i32_r64, iemAImpl_vcvtss2si_i32_r64_fallback;
1690FNIEMAIMPLSSEF2I64R64 iemAImpl_vcvtss2si_i64_r64, iemAImpl_vcvtss2si_i64_r64_fallback;
1691
1692FNIEMAIMPLSSEF2I32R32 iemAImpl_vcvttsd2si_i32_r32, iemAImpl_vcvttsd2si_i32_r32_fallback;
1693FNIEMAIMPLSSEF2I64R32 iemAImpl_vcvttsd2si_i64_r32, iemAImpl_vcvttsd2si_i64_r32_fallback;
1694FNIEMAIMPLSSEF2I32R32 iemAImpl_vcvtsd2si_i32_r32, iemAImpl_vcvtsd2si_i32_r32_fallback;
1695FNIEMAIMPLSSEF2I64R32 iemAImpl_vcvtsd2si_i64_r32, iemAImpl_vcvtsd2si_i64_r32_fallback;
1696
1697FNIEMAIMPLSSEF2I32R64 iemAImpl_vcvttsd2si_i32_r64, iemAImpl_vcvttsd2si_i32_r64_fallback;
1698FNIEMAIMPLSSEF2I64R64 iemAImpl_vcvttsd2si_i64_r64, iemAImpl_vcvttsd2si_i64_r64_fallback;
1699FNIEMAIMPLSSEF2I32R64 iemAImpl_vcvtsd2si_i32_r64, iemAImpl_vcvtsd2si_i32_r64_fallback;
1700FNIEMAIMPLSSEF2I64R64 iemAImpl_vcvtsd2si_i64_r64, iemAImpl_vcvtsd2si_i64_r64_fallback;
1701
1702typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSSEF2R32I32,(uint32_t uMxCsrIn, PRTFLOAT32U pr32Dst, const int32_t *pi32Src));
1703typedef FNIEMAIMPLSSEF2R32I32 *PFNIEMAIMPLSSEF2R32I32;
1704typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSSEF2R32I64,(uint32_t uMxCsrIn, PRTFLOAT32U pr32Dst, const int64_t *pi64Src));
1705typedef FNIEMAIMPLSSEF2R32I64 *PFNIEMAIMPLSSEF2R32I64;
1706
1707FNIEMAIMPLSSEF2R32I32 iemAImpl_cvtsi2ss_r32_i32;
1708FNIEMAIMPLSSEF2R32I64 iemAImpl_cvtsi2ss_r32_i64;
1709
1710typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLAVXF3XMMI32,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86XMMREG puSrc, const int32_t *pi32Src));
1711typedef FNIEMAIMPLAVXF3XMMI32 *PFNIEMAIMPLAVXF3XMMI32;
1712typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLAVXF3XMMI64,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86XMMREG puSrc, const int64_t *pi64Src));
1713typedef FNIEMAIMPLAVXF3XMMI64 *PFNIEMAIMPLAVXF3XMMI64;
1714
1715FNIEMAIMPLAVXF3XMMI32 iemAImpl_vcvtsi2ss_u128_i32, iemAImpl_vcvtsi2ss_u128_i32_fallback;
1716FNIEMAIMPLAVXF3XMMI64 iemAImpl_vcvtsi2ss_u128_i64, iemAImpl_vcvtsi2ss_u128_i64_fallback;
1717
1718
1719typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSSEF2R64I32,(uint32_t uMxCsrIn, PRTFLOAT64U pr64Dst, const int32_t *pi32Src));
1720typedef FNIEMAIMPLSSEF2R64I32 *PFNIEMAIMPLSSEF2R64I32;
1721typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSSEF2R64I64,(uint32_t uMxCsrIn, PRTFLOAT64U pr64Dst, const int64_t *pi64Src));
1722typedef FNIEMAIMPLSSEF2R64I64 *PFNIEMAIMPLSSEF2R64I64;
1723
1724FNIEMAIMPLSSEF2R64I32 iemAImpl_cvtsi2sd_r64_i32;
1725FNIEMAIMPLSSEF2R64I64 iemAImpl_cvtsi2sd_r64_i64;
1726
1727FNIEMAIMPLAVXF3XMMI32 iemAImpl_vcvtsi2sd_u128_i32, iemAImpl_vcvtsi2sd_u128_i32_fallback;
1728FNIEMAIMPLAVXF3XMMI64 iemAImpl_vcvtsi2sd_u128_i64, iemAImpl_vcvtsi2sd_u128_i64_fallback;
1729
1730IEM_DECL_IMPL_DEF(uint32_t, iemAImpl_vcvtps2pd_u128_u64,(uint32_t uMxCsrIn, PX86XMMREG puDst, const uint64_t *pu64Src)); /* Actually two single precision floating point values. */
1731IEM_DECL_IMPL_DEF(uint32_t, iemAImpl_vcvtps2pd_u128_u64_fallback,(uint32_t uMxCsrIn, PX86XMMREG puDst, const uint64_t *pu64Src)); /* Actually two single precision floating point values. */
1732IEM_DECL_IMPL_DEF(uint32_t, iemAImpl_vcvtps2pd_u256_u128,(uint32_t uMxCsrIn, PX86YMMREG puDst, PCX86XMMREG puSrc));
1733IEM_DECL_IMPL_DEF(uint32_t, iemAImpl_vcvtps2pd_u256_u128_fallback,(uint32_t uMxCsrIn, PX86YMMREG puDst, PCX86XMMREG puSrc));
1734
1735
1736IEM_DECL_IMPL_DEF(uint32_t, iemAImpl_vcvtdq2pd_u128_u64,(uint32_t uMxCsrIn, PX86XMMREG puDst, const uint64_t *pu64Src)); /* Actually two single precision floating point values. */
1737IEM_DECL_IMPL_DEF(uint32_t, iemAImpl_vcvtdq2pd_u128_u64_fallback,(uint32_t uMxCsrIn, PX86XMMREG puDst, const uint64_t *pu64Src)); /* Actually two single precision floating point values. */
1738IEM_DECL_IMPL_DEF(uint32_t, iemAImpl_vcvtdq2pd_u256_u128,(uint32_t uMxCsrIn, PX86YMMREG puDst, PCX86XMMREG puSrc));
1739IEM_DECL_IMPL_DEF(uint32_t, iemAImpl_vcvtdq2pd_u256_u128_fallback,(uint32_t uMxCsrIn, PX86YMMREG puDst, PCX86XMMREG puSrc));
1740
1741
1742typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLF2EFLMXCSRR32R32,(uint32_t uMxCsrIn, uint32_t *pfEFlags, RTFLOAT32U uSrc1, RTFLOAT32U uSrc2));
1743typedef FNIEMAIMPLF2EFLMXCSRR32R32 *PFNIEMAIMPLF2EFLMXCSRR32R32;
1744
1745typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLF2EFLMXCSRR64R64,(uint32_t uMxCsrIn, uint32_t *pfEFlags, RTFLOAT64U uSrc1, RTFLOAT64U uSrc2));
1746typedef FNIEMAIMPLF2EFLMXCSRR64R64 *PFNIEMAIMPLF2EFLMXCSRR64R64;
1747
1748FNIEMAIMPLF2EFLMXCSRR32R32 iemAImpl_ucomiss_u128;
1749FNIEMAIMPLF2EFLMXCSRR32R32 iemAImpl_vucomiss_u128, iemAImpl_vucomiss_u128_fallback;
1750
1751FNIEMAIMPLF2EFLMXCSRR64R64 iemAImpl_ucomisd_u128;
1752FNIEMAIMPLF2EFLMXCSRR64R64 iemAImpl_vucomisd_u128, iemAImpl_vucomisd_u128_fallback;
1753
1754FNIEMAIMPLF2EFLMXCSRR32R32 iemAImpl_comiss_u128;
1755FNIEMAIMPLF2EFLMXCSRR32R32 iemAImpl_vcomiss_u128, iemAImpl_vcomiss_u128_fallback;
1756
1757FNIEMAIMPLF2EFLMXCSRR64R64 iemAImpl_comisd_u128;
1758FNIEMAIMPLF2EFLMXCSRR64R64 iemAImpl_vcomisd_u128, iemAImpl_vcomisd_u128_fallback;
1759
1760
1761typedef struct IEMMEDIAF2XMMSRC
1762{
1763 X86XMMREG uSrc1;
1764 X86XMMREG uSrc2;
1765} IEMMEDIAF2XMMSRC;
1766typedef IEMMEDIAF2XMMSRC *PIEMMEDIAF2XMMSRC;
1767typedef const IEMMEDIAF2XMMSRC *PCIEMMEDIAF2XMMSRC;
1768
1769
1770typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMEDIAF3XMMIMM8,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCIEMMEDIAF2XMMSRC puSrc, uint8_t bEvil));
1771typedef FNIEMAIMPLMEDIAF3XMMIMM8 *PFNIEMAIMPLMEDIAF3XMMIMM8;
1772
1773
1774typedef struct IEMMEDIAF2YMMSRC
1775{
1776 X86YMMREG uSrc1;
1777 X86YMMREG uSrc2;
1778} IEMMEDIAF2YMMSRC;
1779typedef IEMMEDIAF2YMMSRC *PIEMMEDIAF2YMMSRC;
1780typedef const IEMMEDIAF2YMMSRC *PCIEMMEDIAF2YMMSRC;
1781
1782
1783typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMEDIAF3YMMIMM8,(uint32_t uMxCsrIn, PX86YMMREG puDst, PCIEMMEDIAF2YMMSRC puSrc, uint8_t bEvil));
1784typedef FNIEMAIMPLMEDIAF3YMMIMM8 *PFNIEMAIMPLMEDIAF3YMMIMM8;
1785
1786
1787FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_cmpps_u128;
1788FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_cmppd_u128;
1789FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_cmpss_u128;
1790FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_cmpsd_u128;
1791
1792FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_vcmpps_u128, iemAImpl_vcmpps_u128_fallback;
1793FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_vcmppd_u128, iemAImpl_vcmppd_u128_fallback;
1794FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_vcmpss_u128, iemAImpl_vcmpss_u128_fallback;
1795FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_vcmpsd_u128, iemAImpl_vcmpsd_u128_fallback;
1796
1797FNIEMAIMPLMEDIAF3YMMIMM8 iemAImpl_vcmpps_u256, iemAImpl_vcmpps_u256_fallback;
1798FNIEMAIMPLMEDIAF3YMMIMM8 iemAImpl_vcmppd_u256, iemAImpl_vcmppd_u256_fallback;
1799
1800FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_roundss_u128;
1801FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_roundsd_u128;
1802
1803FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_dpps_u128, iemAImpl_dpps_u128_fallback;
1804FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_dppd_u128, iemAImpl_dppd_u128_fallback;
1805
1806
1807typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMEDIAF2U128IMM8,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86XMMREG puSrc, uint8_t bEvil));
1808typedef FNIEMAIMPLMEDIAF2U128IMM8 *PFNIEMAIMPLMEDIAF2U128IMM8;
1809
1810
1811typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMEDIAF2U256IMM8,(uint32_t uMxCsrIn, PX86YMMREG puDst, PCX86YMMREG puSrc, uint8_t bEvil));
1812typedef FNIEMAIMPLMEDIAF2U256IMM8 *PFNIEMAIMPLMEDIAF2U256IMM8;
1813
1814
1815FNIEMAIMPLMEDIAF2U128IMM8 iemAImpl_roundps_u128, iemAImpl_roundps_u128_fallback;
1816FNIEMAIMPLMEDIAF2U128IMM8 iemAImpl_roundpd_u128, iemAImpl_roundpd_u128_fallback;
1817
1818FNIEMAIMPLMEDIAF2U128IMM8 iemAImpl_vroundps_u128, iemAImpl_vroundps_u128_fallback;
1819FNIEMAIMPLMEDIAF2U128IMM8 iemAImpl_vroundpd_u128, iemAImpl_vroundpd_u128_fallback;
1820
1821FNIEMAIMPLMEDIAF2U256IMM8 iemAImpl_vroundps_u256, iemAImpl_vroundps_u256_fallback;
1822FNIEMAIMPLMEDIAF2U256IMM8 iemAImpl_vroundpd_u256, iemAImpl_vroundpd_u256_fallback;
1823
1824FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_vroundss_u128, iemAImpl_vroundss_u128_fallback;
1825FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_vroundsd_u128, iemAImpl_vroundsd_u128_fallback;
1826
1827FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_vdpps_u128, iemAImpl_vdpps_u128_fallback;
1828FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_vdppd_u128, iemAImpl_vdppd_u128_fallback;
1829
1830FNIEMAIMPLMEDIAF3YMMIMM8 iemAImpl_vdpps_u256, iemAImpl_vdpps_u256_fallback;
1831
1832
1833typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMXCSRU64U128,(uint32_t fMxCsrIn, uint64_t *pu64Dst, PCX86XMMREG pSrc));
1834typedef FNIEMAIMPLMXCSRU64U128 *PFNIEMAIMPLMXCSRU64U128;
1835
1836FNIEMAIMPLMXCSRU64U128 iemAImpl_cvtpd2pi_u128;
1837FNIEMAIMPLMXCSRU64U128 iemAImpl_cvttpd2pi_u128;
1838
1839typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMXCSRU128U64,(uint32_t fMxCsrIn, PX86XMMREG pDst, uint64_t u64Src));
1840typedef FNIEMAIMPLMXCSRU128U64 *PFNIEMAIMPLMXCSRU128U64;
1841
1842FNIEMAIMPLMXCSRU128U64 iemAImpl_cvtpi2ps_u128;
1843FNIEMAIMPLMXCSRU128U64 iemAImpl_cvtpi2pd_u128;
1844
1845typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMXCSRU64U64,(uint32_t fMxCsrIn, uint64_t *pu64Dst, uint64_t u64Src));
1846typedef FNIEMAIMPLMXCSRU64U64 *PFNIEMAIMPLMXCSRU64U64;
1847
1848FNIEMAIMPLMXCSRU64U64 iemAImpl_cvtps2pi_u128;
1849FNIEMAIMPLMXCSRU64U64 iemAImpl_cvttps2pi_u128;
1850
1851/** @} */
1852
1853
1854/** @name Function tables.
1855 * @{
1856 */
1857
1858/**
1859 * Function table for a binary operator providing implementation based on
1860 * operand size.
1861 */
1862typedef struct IEMOPBINSIZES
1863{
1864 PFNIEMAIMPLBINU8 pfnNormalU8, pfnLockedU8;
1865 PFNIEMAIMPLBINU16 pfnNormalU16, pfnLockedU16;
1866 PFNIEMAIMPLBINU32 pfnNormalU32, pfnLockedU32;
1867 PFNIEMAIMPLBINU64 pfnNormalU64, pfnLockedU64;
1868} IEMOPBINSIZES;
1869/** Pointer to a binary operator function table. */
1870typedef IEMOPBINSIZES const *PCIEMOPBINSIZES;
1871
1872
1873/**
1874 * Function table for a unary operator providing implementation based on
1875 * operand size.
1876 */
1877typedef struct IEMOPUNARYSIZES
1878{
1879 PFNIEMAIMPLUNARYU8 pfnNormalU8, pfnLockedU8;
1880 PFNIEMAIMPLUNARYU16 pfnNormalU16, pfnLockedU16;
1881 PFNIEMAIMPLUNARYU32 pfnNormalU32, pfnLockedU32;
1882 PFNIEMAIMPLUNARYU64 pfnNormalU64, pfnLockedU64;
1883} IEMOPUNARYSIZES;
1884/** Pointer to a unary operator function table. */
1885typedef IEMOPUNARYSIZES const *PCIEMOPUNARYSIZES;
1886
1887
1888/**
1889 * Function table for a shift operator providing implementation based on
1890 * operand size.
1891 */
1892typedef struct IEMOPSHIFTSIZES
1893{
1894 PFNIEMAIMPLSHIFTU8 pfnNormalU8;
1895 PFNIEMAIMPLSHIFTU16 pfnNormalU16;
1896 PFNIEMAIMPLSHIFTU32 pfnNormalU32;
1897 PFNIEMAIMPLSHIFTU64 pfnNormalU64;
1898} IEMOPSHIFTSIZES;
1899/** Pointer to a shift operator function table. */
1900typedef IEMOPSHIFTSIZES const *PCIEMOPSHIFTSIZES;
1901
1902
1903/**
1904 * Function table for a multiplication or division operation.
1905 */
1906typedef struct IEMOPMULDIVSIZES
1907{
1908 PFNIEMAIMPLMULDIVU8 pfnU8;
1909 PFNIEMAIMPLMULDIVU16 pfnU16;
1910 PFNIEMAIMPLMULDIVU32 pfnU32;
1911 PFNIEMAIMPLMULDIVU64 pfnU64;
1912} IEMOPMULDIVSIZES;
1913/** Pointer to a multiplication or division operation function table. */
1914typedef IEMOPMULDIVSIZES const *PCIEMOPMULDIVSIZES;
1915
1916
1917/**
1918 * Function table for a double precision shift operator providing implementation
1919 * based on operand size.
1920 */
1921typedef struct IEMOPSHIFTDBLSIZES
1922{
1923 PFNIEMAIMPLSHIFTDBLU16 pfnNormalU16;
1924 PFNIEMAIMPLSHIFTDBLU32 pfnNormalU32;
1925 PFNIEMAIMPLSHIFTDBLU64 pfnNormalU64;
1926} IEMOPSHIFTDBLSIZES;
1927/** Pointer to a double precision shift function table. */
1928typedef IEMOPSHIFTDBLSIZES const *PCIEMOPSHIFTDBLSIZES;
1929
1930
1931/**
1932 * Function table for media instruction taking two full sized media source
1933 * registers and one full sized destination register (AVX).
1934 */
1935typedef struct IEMOPMEDIAF3
1936{
1937 PFNIEMAIMPLMEDIAF3U128 pfnU128;
1938 PFNIEMAIMPLMEDIAF3U256 pfnU256;
1939} IEMOPMEDIAF3;
1940/** Pointer to a media operation function table for 3 full sized ops (AVX). */
1941typedef IEMOPMEDIAF3 const *PCIEMOPMEDIAF3;
1942
1943/** @def IEMOPMEDIAF3_INIT_VARS_EX
1944 * Declares a s_Host (x86 & amd64 only) and a s_Fallback variable with the
1945 * given functions as initializers. For use in AVX functions where a pair of
1946 * functions are only used once and the function table need not be public. */
1947#ifndef TST_IEM_CHECK_MC
1948# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
1949# define IEMOPMEDIAF3_INIT_VARS_EX(a_pfnHostU128, a_pfnHostU256, a_pfnFallbackU128, a_pfnFallbackU256) \
1950 static IEMOPMEDIAF3 const s_Host = { a_pfnHostU128, a_pfnHostU256 }; \
1951 static IEMOPMEDIAF3 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
1952# else
1953# define IEMOPMEDIAF3_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) \
1954 static IEMOPMEDIAF3 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
1955# endif
1956#else
1957# define IEMOPMEDIAF3_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) (void)0
1958#endif
1959/** @def IEMOPMEDIAF3_INIT_VARS
1960 * Generate AVX function tables for the @a a_InstrNm instruction.
1961 * @sa IEMOPMEDIAF3_INIT_VARS_EX */
1962#define IEMOPMEDIAF3_INIT_VARS(a_InstrNm) \
1963 IEMOPMEDIAF3_INIT_VARS_EX(RT_CONCAT3(iemAImpl_,a_InstrNm,_u128), RT_CONCAT3(iemAImpl_,a_InstrNm,_u256),\
1964 RT_CONCAT3(iemAImpl_,a_InstrNm,_u128_fallback), RT_CONCAT3(iemAImpl_,a_InstrNm,_u256_fallback))
1965
1966
1967/**
1968 * Function table for media instruction taking one full sized media source
1969 * registers and one full sized destination register (AVX).
1970 */
1971typedef struct IEMOPMEDIAF2
1972{
1973 PFNIEMAIMPLMEDIAF2U128 pfnU128;
1974 PFNIEMAIMPLMEDIAF2U256 pfnU256;
1975} IEMOPMEDIAF2;
1976/** Pointer to a media operation function table for 2 full sized ops (AVX). */
1977typedef IEMOPMEDIAF2 const *PCIEMOPMEDIAF2;
1978
1979/** @def IEMOPMEDIAF2_INIT_VARS_EX
1980 * Declares a s_Host (x86 & amd64 only) and a s_Fallback variable with the
1981 * given functions as initializers. For use in AVX functions where a pair of
1982 * functions are only used once and the function table need not be public. */
1983#ifndef TST_IEM_CHECK_MC
1984# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
1985# define IEMOPMEDIAF2_INIT_VARS_EX(a_pfnHostU128, a_pfnHostU256, a_pfnFallbackU128, a_pfnFallbackU256) \
1986 static IEMOPMEDIAF2 const s_Host = { a_pfnHostU128, a_pfnHostU256 }; \
1987 static IEMOPMEDIAF2 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
1988# else
1989# define IEMOPMEDIAF2_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) \
1990 static IEMOPMEDIAF2 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
1991# endif
1992#else
1993# define IEMOPMEDIAF2_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) (void)0
1994#endif
1995/** @def IEMOPMEDIAF2_INIT_VARS
1996 * Generate AVX function tables for the @a a_InstrNm instruction.
1997 * @sa IEMOPMEDIAF2_INIT_VARS_EX */
1998#define IEMOPMEDIAF2_INIT_VARS(a_InstrNm) \
1999 IEMOPMEDIAF2_INIT_VARS_EX(RT_CONCAT3(iemAImpl_,a_InstrNm,_u128), RT_CONCAT3(iemAImpl_,a_InstrNm,_u256),\
2000 RT_CONCAT3(iemAImpl_,a_InstrNm,_u128_fallback), RT_CONCAT3(iemAImpl_,a_InstrNm,_u256_fallback))
2001
2002
2003/**
2004 * Function table for media instruction taking two full sized media source
2005 * registers and one full sized destination register, but no additional state
2006 * (AVX).
2007 */
2008typedef struct IEMOPMEDIAOPTF3
2009{
2010 PFNIEMAIMPLMEDIAOPTF3U128 pfnU128;
2011 PFNIEMAIMPLMEDIAOPTF3U256 pfnU256;
2012} IEMOPMEDIAOPTF3;
2013/** Pointer to a media operation function table for 3 full sized ops (AVX). */
2014typedef IEMOPMEDIAOPTF3 const *PCIEMOPMEDIAOPTF3;
2015
2016/** @def IEMOPMEDIAOPTF3_INIT_VARS_EX
2017 * Declares a s_Host (x86 & amd64 only) and a s_Fallback variable with the
2018 * given functions as initializers. For use in AVX functions where a pair of
2019 * functions are only used once and the function table need not be public. */
2020#ifndef TST_IEM_CHECK_MC
2021# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
2022# define IEMOPMEDIAOPTF3_INIT_VARS_EX(a_pfnHostU128, a_pfnHostU256, a_pfnFallbackU128, a_pfnFallbackU256) \
2023 static IEMOPMEDIAOPTF3 const s_Host = { a_pfnHostU128, a_pfnHostU256 }; \
2024 static IEMOPMEDIAOPTF3 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
2025# else
2026# define IEMOPMEDIAOPTF3_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) \
2027 static IEMOPMEDIAOPTF3 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
2028# endif
2029#else
2030# define IEMOPMEDIAOPTF3_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) (void)0
2031#endif
2032/** @def IEMOPMEDIAOPTF3_INIT_VARS
2033 * Generate AVX function tables for the @a a_InstrNm instruction.
2034 * @sa IEMOPMEDIAOPTF3_INIT_VARS_EX */
2035#define IEMOPMEDIAOPTF3_INIT_VARS(a_InstrNm) \
2036 IEMOPMEDIAOPTF3_INIT_VARS_EX(RT_CONCAT3(iemAImpl_,a_InstrNm,_u128), RT_CONCAT3(iemAImpl_,a_InstrNm,_u256),\
2037 RT_CONCAT3(iemAImpl_,a_InstrNm,_u128_fallback), RT_CONCAT3(iemAImpl_,a_InstrNm,_u256_fallback))
2038
2039/**
2040 * Function table for media instruction taking one full sized media source
2041 * registers and one full sized destination register, but no additional state
2042 * (AVX).
2043 */
2044typedef struct IEMOPMEDIAOPTF2
2045{
2046 PFNIEMAIMPLMEDIAOPTF2U128 pfnU128;
2047 PFNIEMAIMPLMEDIAOPTF2U256 pfnU256;
2048} IEMOPMEDIAOPTF2;
2049/** Pointer to a media operation function table for 2 full sized ops (AVX). */
2050typedef IEMOPMEDIAOPTF2 const *PCIEMOPMEDIAOPTF2;
2051
2052/** @def IEMOPMEDIAOPTF2_INIT_VARS_EX
2053 * Declares a s_Host (x86 & amd64 only) and a s_Fallback variable with the
2054 * given functions as initializers. For use in AVX functions where a pair of
2055 * functions are only used once and the function table need not be public. */
2056#ifndef TST_IEM_CHECK_MC
2057# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
2058# define IEMOPMEDIAOPTF2_INIT_VARS_EX(a_pfnHostU128, a_pfnHostU256, a_pfnFallbackU128, a_pfnFallbackU256) \
2059 static IEMOPMEDIAOPTF2 const s_Host = { a_pfnHostU128, a_pfnHostU256 }; \
2060 static IEMOPMEDIAOPTF2 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
2061# else
2062# define IEMOPMEDIAOPTF2_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) \
2063 static IEMOPMEDIAOPTF2 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
2064# endif
2065#else
2066# define IEMOPMEDIAOPTF2_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) (void)0
2067#endif
2068/** @def IEMOPMEDIAOPTF2_INIT_VARS
2069 * Generate AVX function tables for the @a a_InstrNm instruction.
2070 * @sa IEMOPMEDIAOPTF2_INIT_VARS_EX */
2071#define IEMOPMEDIAOPTF2_INIT_VARS(a_InstrNm) \
2072 IEMOPMEDIAOPTF2_INIT_VARS_EX(RT_CONCAT3(iemAImpl_,a_InstrNm,_u128), RT_CONCAT3(iemAImpl_,a_InstrNm,_u256),\
2073 RT_CONCAT3(iemAImpl_,a_InstrNm,_u128_fallback), RT_CONCAT3(iemAImpl_,a_InstrNm,_u256_fallback))
2074
2075
2076/**
2077 * Function table for media instruction taking one full sized media source
2078 * register and one full sized destination register and an 8-bit immediate (AVX).
2079 */
2080typedef struct IEMOPMEDIAF2IMM8
2081{
2082 PFNIEMAIMPLMEDIAF2U128IMM8 pfnU128;
2083 PFNIEMAIMPLMEDIAF2U256IMM8 pfnU256;
2084} IEMOPMEDIAF2IMM8;
2085/** Pointer to a media operation function table for 2 full sized ops (AVX). */
2086typedef IEMOPMEDIAF2IMM8 const *PCIEMOPMEDIAF2IMM8;
2087
2088/** @def IEMOPMEDIAF2IMM8_INIT_VARS_EX
2089 * Declares a s_Host (x86 & amd64 only) and a s_Fallback variable with the
2090 * given functions as initializers. For use in AVX functions where a pair of
2091 * functions are only used once and the function table need not be public. */
2092#ifndef TST_IEM_CHECK_MC
2093# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
2094# define IEMOPMEDIAF2IMM8_INIT_VARS_EX(a_pfnHostU128, a_pfnHostU256, a_pfnFallbackU128, a_pfnFallbackU256) \
2095 static IEMOPMEDIAF2IMM8 const s_Host = { a_pfnHostU128, a_pfnHostU256 }; \
2096 static IEMOPMEDIAF2IMM8 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
2097# else
2098# define IEMOPMEDIAF2IMM8_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) \
2099 static IEMOPMEDIAF2IMM8 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
2100# endif
2101#else
2102# define IEMOPMEDIAF2IMM8_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) (void)0
2103#endif
2104/** @def IEMOPMEDIAF2IMM8_INIT_VARS
2105 * Generate AVX function tables for the @a a_InstrNm instruction.
2106 * @sa IEMOPMEDIAF2IMM8_INIT_VARS_EX */
2107#define IEMOPMEDIAF2IMM8_INIT_VARS(a_InstrNm) \
2108 IEMOPMEDIAF2IMM8_INIT_VARS_EX(RT_CONCAT3(iemAImpl_,a_InstrNm,_u128), RT_CONCAT3(iemAImpl_,a_InstrNm,_u256),\
2109 RT_CONCAT3(iemAImpl_,a_InstrNm,_u128_fallback), RT_CONCAT3(iemAImpl_,a_InstrNm,_u256_fallback))
2110
2111
2112/**
2113 * Function table for media instruction taking one full sized media source
2114 * register and one full sized destination register and an 8-bit immediate, but no additional state
2115 * (AVX).
2116 */
2117typedef struct IEMOPMEDIAOPTF2IMM8
2118{
2119 PFNIEMAIMPLMEDIAOPTF2U128IMM8 pfnU128;
2120 PFNIEMAIMPLMEDIAOPTF2U256IMM8 pfnU256;
2121} IEMOPMEDIAOPTF2IMM8;
2122/** Pointer to a media operation function table for 2 full sized ops (AVX). */
2123typedef IEMOPMEDIAOPTF2IMM8 const *PCIEMOPMEDIAOPTF2IMM8;
2124
2125/** @def IEMOPMEDIAOPTF2IMM8_INIT_VARS_EX
2126 * Declares a s_Host (x86 & amd64 only) and a s_Fallback variable with the
2127 * given functions as initializers. For use in AVX functions where a pair of
2128 * functions are only used once and the function table need not be public. */
2129#ifndef TST_IEM_CHECK_MC
2130# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
2131# define IEMOPMEDIAOPTF2IMM8_INIT_VARS_EX(a_pfnHostU128, a_pfnHostU256, a_pfnFallbackU128, a_pfnFallbackU256) \
2132 static IEMOPMEDIAOPTF2IMM8 const s_Host = { a_pfnHostU128, a_pfnHostU256 }; \
2133 static IEMOPMEDIAOPTF2IMM8 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
2134# else
2135# define IEMOPMEDIAOPTF2IMM8_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) \
2136 static IEMOPMEDIAOPTF2IMM8 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
2137# endif
2138#else
2139# define IEMOPMEDIAOPTF2IMM8_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) (void)0
2140#endif
2141/** @def IEMOPMEDIAOPTF2IMM8_INIT_VARS
2142 * Generate AVX function tables for the @a a_InstrNm instruction.
2143 * @sa IEMOPMEDIAOPTF2IMM8_INIT_VARS_EX */
2144#define IEMOPMEDIAOPTF2IMM8_INIT_VARS(a_InstrNm) \
2145 IEMOPMEDIAOPTF2IMM8_INIT_VARS_EX(RT_CONCAT3(iemAImpl_,a_InstrNm,_imm_u128), RT_CONCAT3(iemAImpl_,a_InstrNm,_imm_u256),\
2146 RT_CONCAT3(iemAImpl_,a_InstrNm,_imm_u128_fallback), RT_CONCAT3(iemAImpl_,a_InstrNm,_imm_u256_fallback))
2147
2148/**
2149 * Function table for media instruction taking two full sized media source
2150 * registers and one full sized destination register and an 8-bit immediate, but no additional state
2151 * (AVX).
2152 */
2153typedef struct IEMOPMEDIAOPTF3IMM8
2154{
2155 PFNIEMAIMPLMEDIAOPTF3U128IMM8 pfnU128;
2156 PFNIEMAIMPLMEDIAOPTF3U256IMM8 pfnU256;
2157} IEMOPMEDIAOPTF3IMM8;
2158/** Pointer to a media operation function table for 3 full sized ops (AVX). */
2159typedef IEMOPMEDIAOPTF3IMM8 const *PCIEMOPMEDIAOPTF3IMM8;
2160
2161/** @def IEMOPMEDIAOPTF3IMM8_INIT_VARS_EX
2162 * Declares a s_Host (x86 & amd64 only) and a s_Fallback variable with the
2163 * given functions as initializers. For use in AVX functions where a pair of
2164 * functions are only used once and the function table need not be public. */
2165#ifndef TST_IEM_CHECK_MC
2166# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
2167# define IEMOPMEDIAOPTF3IMM8_INIT_VARS_EX(a_pfnHostU128, a_pfnHostU256, a_pfnFallbackU128, a_pfnFallbackU256) \
2168 static IEMOPMEDIAOPTF3IMM8 const s_Host = { a_pfnHostU128, a_pfnHostU256 }; \
2169 static IEMOPMEDIAOPTF3IMM8 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
2170# else
2171# define IEMOPMEDIAOPTF3IMM8_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) \
2172 static IEMOPMEDIAOPTF3IMM8 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
2173# endif
2174#else
2175# define IEMOPMEDIAOPTF3IMM8_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) (void)0
2176#endif
2177/** @def IEMOPMEDIAOPTF3IMM8_INIT_VARS
2178 * Generate AVX function tables for the @a a_InstrNm instruction.
2179 * @sa IEMOPMEDIAOPTF3IMM8_INIT_VARS_EX */
2180#define IEMOPMEDIAOPTF3IMM8_INIT_VARS(a_InstrNm) \
2181 IEMOPMEDIAOPTF3IMM8_INIT_VARS_EX(RT_CONCAT3(iemAImpl_,a_InstrNm,_u128), RT_CONCAT3(iemAImpl_,a_InstrNm,_u256),\
2182 RT_CONCAT3(iemAImpl_,a_InstrNm,_u128_fallback), RT_CONCAT3(iemAImpl_,a_InstrNm,_u256_fallback))
2183/** @} */
2184
2185
2186/**
2187 * Function table for blend type instruction taking three full sized media source
2188 * registers and one full sized destination register, but no additional state
2189 * (AVX).
2190 */
2191typedef struct IEMOPBLENDOP
2192{
2193 PFNIEMAIMPLAVXBLENDU128 pfnU128;
2194 PFNIEMAIMPLAVXBLENDU256 pfnU256;
2195} IEMOPBLENDOP;
2196/** Pointer to a media operation function table for 4 full sized ops (AVX). */
2197typedef IEMOPBLENDOP const *PCIEMOPBLENDOP;
2198
2199/** @def IEMOPBLENDOP_INIT_VARS_EX
2200 * Declares a s_Host (x86 & amd64 only) and a s_Fallback variable with the
2201 * given functions as initializers. For use in AVX functions where a pair of
2202 * functions are only used once and the function table need not be public. */
2203#ifndef TST_IEM_CHECK_MC
2204# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
2205# define IEMOPBLENDOP_INIT_VARS_EX(a_pfnHostU128, a_pfnHostU256, a_pfnFallbackU128, a_pfnFallbackU256) \
2206 static IEMOPBLENDOP const s_Host = { a_pfnHostU128, a_pfnHostU256 }; \
2207 static IEMOPBLENDOP const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
2208# else
2209# define IEMOPBLENDOP_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) \
2210 static IEMOPBLENDOP const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
2211# endif
2212#else
2213# define IEMOPBLENDOP_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) (void)0
2214#endif
2215/** @def IEMOPBLENDOP_INIT_VARS
2216 * Generate AVX function tables for the @a a_InstrNm instruction.
2217 * @sa IEMOPBLENDOP_INIT_VARS_EX */
2218#define IEMOPBLENDOP_INIT_VARS(a_InstrNm) \
2219 IEMOPBLENDOP_INIT_VARS_EX(RT_CONCAT3(iemAImpl_,a_InstrNm,_u128), RT_CONCAT3(iemAImpl_,a_InstrNm,_u256),\
2220 RT_CONCAT3(iemAImpl_,a_InstrNm,_u128_fallback), RT_CONCAT3(iemAImpl_,a_InstrNm,_u256_fallback))
2221
2222
2223/** @name SSE/AVX single/double precision floating point operations.
2224 * @{ */
2225typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLFPSSEF2U128,(uint32_t uMxCsrIn, PX86XMMREG pResult, PCX86XMMREG puSrc1, PCX86XMMREG puSrc2));
2226typedef FNIEMAIMPLFPSSEF2U128 *PFNIEMAIMPLFPSSEF2U128;
2227typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLFPSSEF2U128R32,(uint32_t uMxCsrIn, PX86XMMREG Result, PCX86XMMREG puSrc1, PCRTFLOAT32U pr32Src2));
2228typedef FNIEMAIMPLFPSSEF2U128R32 *PFNIEMAIMPLFPSSEF2U128R32;
2229typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLFPSSEF2U128R64,(uint32_t uMxCsrIn, PX86XMMREG pResult, PCX86XMMREG puSrc1, PCRTFLOAT64U pr64Src2));
2230typedef FNIEMAIMPLFPSSEF2U128R64 *PFNIEMAIMPLFPSSEF2U128R64;
2231
2232typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLFPAVXF3U128,(uint32_t uMxCsrIn, PX86XMMREG pResult, PCX86XMMREG puSrc1, PCX86XMMREG puSrc2));
2233typedef FNIEMAIMPLFPAVXF3U128 *PFNIEMAIMPLFPAVXF3U128;
2234typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLFPAVXF3U128R32,(uint32_t uMxCsrIn, PX86XMMREG pResult, PCX86XMMREG puSrc1, PCRTFLOAT32U pr32Src2));
2235typedef FNIEMAIMPLFPAVXF3U128R32 *PFNIEMAIMPLFPAVXF3U128R32;
2236typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLFPAVXF3U128R64,(uint32_t uMxCsrIn, PX86XMMREG pResult, PCX86XMMREG puSrc1, PCRTFLOAT64U pr64Src2));
2237typedef FNIEMAIMPLFPAVXF3U128R64 *PFNIEMAIMPLFPAVXF3U128R64;
2238
2239typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLFPAVXF3U256,(uint32_t uMxCsrIn, PX86YMMREG pResult, PCX86YMMREG puSrc1, PCX86YMMREG puSrc2));
2240typedef FNIEMAIMPLFPAVXF3U256 *PFNIEMAIMPLFPAVXF3U256;
2241
2242FNIEMAIMPLFPSSEF2U128 iemAImpl_addps_u128;
2243FNIEMAIMPLFPSSEF2U128 iemAImpl_addpd_u128;
2244FNIEMAIMPLFPSSEF2U128 iemAImpl_mulps_u128;
2245FNIEMAIMPLFPSSEF2U128 iemAImpl_mulpd_u128;
2246FNIEMAIMPLFPSSEF2U128 iemAImpl_subps_u128;
2247FNIEMAIMPLFPSSEF2U128 iemAImpl_subpd_u128;
2248FNIEMAIMPLFPSSEF2U128 iemAImpl_minps_u128;
2249FNIEMAIMPLFPSSEF2U128 iemAImpl_minpd_u128;
2250FNIEMAIMPLFPSSEF2U128 iemAImpl_divps_u128;
2251FNIEMAIMPLFPSSEF2U128 iemAImpl_divpd_u128;
2252FNIEMAIMPLFPSSEF2U128 iemAImpl_maxps_u128;
2253FNIEMAIMPLFPSSEF2U128 iemAImpl_maxpd_u128;
2254FNIEMAIMPLFPSSEF2U128 iemAImpl_haddps_u128;
2255FNIEMAIMPLFPSSEF2U128 iemAImpl_haddpd_u128;
2256FNIEMAIMPLFPSSEF2U128 iemAImpl_hsubps_u128;
2257FNIEMAIMPLFPSSEF2U128 iemAImpl_hsubpd_u128;
2258FNIEMAIMPLFPSSEF2U128 iemAImpl_sqrtps_u128;
2259FNIEMAIMPLFPSSEF2U128 iemAImpl_rsqrtps_u128;
2260FNIEMAIMPLFPSSEF2U128 iemAImpl_sqrtpd_u128;
2261FNIEMAIMPLFPSSEF2U128 iemAImpl_rcpps_u128;
2262FNIEMAIMPLFPSSEF2U128 iemAImpl_addsubps_u128;
2263FNIEMAIMPLFPSSEF2U128 iemAImpl_addsubpd_u128;
2264
2265FNIEMAIMPLFPSSEF2U128 iemAImpl_cvtpd2ps_u128;
2266IEM_DECL_IMPL_PROTO(uint32_t, iemAImpl_cvtps2pd_u128,(uint32_t uMxCsrIn, PX86XMMREG pResult, uint64_t const *pu64Src));
2267
2268FNIEMAIMPLFPSSEF2U128 iemAImpl_cvtdq2ps_u128;
2269FNIEMAIMPLFPSSEF2U128 iemAImpl_cvtps2dq_u128;
2270FNIEMAIMPLFPSSEF2U128 iemAImpl_cvttps2dq_u128;
2271FNIEMAIMPLFPSSEF2U128 iemAImpl_cvttpd2dq_u128;
2272FNIEMAIMPLFPSSEF2U128 iemAImpl_cvtdq2pd_u128;
2273FNIEMAIMPLFPSSEF2U128 iemAImpl_cvtpd2dq_u128;
2274
2275FNIEMAIMPLFPSSEF2U128R32 iemAImpl_addss_u128_r32;
2276FNIEMAIMPLFPSSEF2U128R64 iemAImpl_addsd_u128_r64;
2277FNIEMAIMPLFPSSEF2U128R32 iemAImpl_mulss_u128_r32;
2278FNIEMAIMPLFPSSEF2U128R64 iemAImpl_mulsd_u128_r64;
2279FNIEMAIMPLFPSSEF2U128R32 iemAImpl_subss_u128_r32;
2280FNIEMAIMPLFPSSEF2U128R64 iemAImpl_subsd_u128_r64;
2281FNIEMAIMPLFPSSEF2U128R32 iemAImpl_minss_u128_r32;
2282FNIEMAIMPLFPSSEF2U128R64 iemAImpl_minsd_u128_r64;
2283FNIEMAIMPLFPSSEF2U128R32 iemAImpl_divss_u128_r32;
2284FNIEMAIMPLFPSSEF2U128R64 iemAImpl_divsd_u128_r64;
2285FNIEMAIMPLFPSSEF2U128R32 iemAImpl_maxss_u128_r32;
2286FNIEMAIMPLFPSSEF2U128R64 iemAImpl_maxsd_u128_r64;
2287FNIEMAIMPLFPSSEF2U128R32 iemAImpl_cvtss2sd_u128_r32;
2288FNIEMAIMPLFPSSEF2U128R64 iemAImpl_cvtsd2ss_u128_r64;
2289FNIEMAIMPLFPSSEF2U128R32 iemAImpl_sqrtss_u128_r32;
2290FNIEMAIMPLFPSSEF2U128R64 iemAImpl_sqrtsd_u128_r64;
2291FNIEMAIMPLFPSSEF2U128R32 iemAImpl_rsqrtss_u128_r32;
2292FNIEMAIMPLFPSSEF2U128R32 iemAImpl_rcpss_u128_r32;
2293
2294FNIEMAIMPLMEDIAF3U128 iemAImpl_vaddps_u128, iemAImpl_vaddps_u128_fallback;
2295FNIEMAIMPLMEDIAF3U128 iemAImpl_vaddpd_u128, iemAImpl_vaddpd_u128_fallback;
2296FNIEMAIMPLMEDIAF3U128 iemAImpl_vmulps_u128, iemAImpl_vmulps_u128_fallback;
2297FNIEMAIMPLMEDIAF3U128 iemAImpl_vmulpd_u128, iemAImpl_vmulpd_u128_fallback;
2298FNIEMAIMPLMEDIAF3U128 iemAImpl_vsubps_u128, iemAImpl_vsubps_u128_fallback;
2299FNIEMAIMPLMEDIAF3U128 iemAImpl_vsubpd_u128, iemAImpl_vsubpd_u128_fallback;
2300FNIEMAIMPLMEDIAF3U128 iemAImpl_vminps_u128, iemAImpl_vminps_u128_fallback;
2301FNIEMAIMPLMEDIAF3U128 iemAImpl_vminpd_u128, iemAImpl_vminpd_u128_fallback;
2302FNIEMAIMPLMEDIAF3U128 iemAImpl_vdivps_u128, iemAImpl_vdivps_u128_fallback;
2303FNIEMAIMPLMEDIAF3U128 iemAImpl_vdivpd_u128, iemAImpl_vdivpd_u128_fallback;
2304FNIEMAIMPLMEDIAF3U128 iemAImpl_vmaxps_u128, iemAImpl_vmaxps_u128_fallback;
2305FNIEMAIMPLMEDIAF3U128 iemAImpl_vmaxpd_u128, iemAImpl_vmaxpd_u128_fallback;
2306FNIEMAIMPLMEDIAF3U128 iemAImpl_vhaddps_u128, iemAImpl_vhaddps_u128_fallback;
2307FNIEMAIMPLMEDIAF3U128 iemAImpl_vhaddpd_u128, iemAImpl_vhaddpd_u128_fallback;
2308FNIEMAIMPLMEDIAF3U128 iemAImpl_vhsubps_u128, iemAImpl_vhsubps_u128_fallback;
2309FNIEMAIMPLMEDIAF3U128 iemAImpl_vhsubpd_u128, iemAImpl_vhsubpd_u128_fallback;
2310FNIEMAIMPLMEDIAF2U128 iemAImpl_vsqrtps_u128, iemAImpl_vsqrtps_u128_fallback;
2311FNIEMAIMPLMEDIAF2U128 iemAImpl_vsqrtpd_u128, iemAImpl_vsqrtpd_u128_fallback;
2312FNIEMAIMPLMEDIAF2U128 iemAImpl_vrsqrtps_u128, iemAImpl_vrsqrtps_u128_fallback;
2313FNIEMAIMPLMEDIAF2U128 iemAImpl_vrcpps_u128, iemAImpl_vrcpps_u128_fallback;
2314FNIEMAIMPLMEDIAF3U128 iemAImpl_vaddsubps_u128, iemAImpl_vaddsubps_u128_fallback;
2315FNIEMAIMPLMEDIAF3U128 iemAImpl_vaddsubpd_u128, iemAImpl_vaddsubpd_u128_fallback;
2316FNIEMAIMPLMEDIAF2U128 iemAImpl_vcvtdq2ps_u128, iemAImpl_vcvtdq2ps_u128_fallback;
2317FNIEMAIMPLMEDIAF2U128 iemAImpl_vcvtps2dq_u128, iemAImpl_vcvtps2dq_u128_fallback;
2318FNIEMAIMPLMEDIAF2U128 iemAImpl_vcvttps2dq_u128, iemAImpl_vcvttps2dq_u128_fallback;
2319IEM_DECL_IMPL_PROTO(uint32_t, iemAImpl_vcvtpd2ps_u128_u128,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86XMMREG puSrc));
2320IEM_DECL_IMPL_PROTO(uint32_t, iemAImpl_vcvtpd2ps_u128_u128_fallback,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86XMMREG puSrc));
2321IEM_DECL_IMPL_PROTO(uint32_t, iemAImpl_vcvttpd2dq_u128_u128,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86XMMREG puSrc));
2322IEM_DECL_IMPL_PROTO(uint32_t, iemAImpl_vcvttpd2dq_u128_u128_fallback,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86XMMREG puSrc));
2323IEM_DECL_IMPL_PROTO(uint32_t, iemAImpl_vcvtpd2dq_u128_u128,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86XMMREG puSrc));
2324IEM_DECL_IMPL_PROTO(uint32_t, iemAImpl_vcvtpd2dq_u128_u128_fallback,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86XMMREG puSrc));
2325
2326
2327FNIEMAIMPLFPAVXF3U128R32 iemAImpl_vaddss_u128_r32, iemAImpl_vaddss_u128_r32_fallback;
2328FNIEMAIMPLFPAVXF3U128R64 iemAImpl_vaddsd_u128_r64, iemAImpl_vaddsd_u128_r64_fallback;
2329FNIEMAIMPLFPAVXF3U128R32 iemAImpl_vmulss_u128_r32, iemAImpl_vmulss_u128_r32_fallback;
2330FNIEMAIMPLFPAVXF3U128R64 iemAImpl_vmulsd_u128_r64, iemAImpl_vmulsd_u128_r64_fallback;
2331FNIEMAIMPLFPAVXF3U128R32 iemAImpl_vsubss_u128_r32, iemAImpl_vsubss_u128_r32_fallback;
2332FNIEMAIMPLFPAVXF3U128R64 iemAImpl_vsubsd_u128_r64, iemAImpl_vsubsd_u128_r64_fallback;
2333FNIEMAIMPLFPAVXF3U128R32 iemAImpl_vminss_u128_r32, iemAImpl_vminss_u128_r32_fallback;
2334FNIEMAIMPLFPAVXF3U128R64 iemAImpl_vminsd_u128_r64, iemAImpl_vminsd_u128_r64_fallback;
2335FNIEMAIMPLFPAVXF3U128R32 iemAImpl_vdivss_u128_r32, iemAImpl_vdivss_u128_r32_fallback;
2336FNIEMAIMPLFPAVXF3U128R64 iemAImpl_vdivsd_u128_r64, iemAImpl_vdivsd_u128_r64_fallback;
2337FNIEMAIMPLFPAVXF3U128R32 iemAImpl_vmaxss_u128_r32, iemAImpl_vmaxss_u128_r32_fallback;
2338FNIEMAIMPLFPAVXF3U128R64 iemAImpl_vmaxsd_u128_r64, iemAImpl_vmaxsd_u128_r64_fallback;
2339FNIEMAIMPLFPAVXF3U128R32 iemAImpl_vsqrtss_u128_r32, iemAImpl_vsqrtss_u128_r32_fallback;
2340FNIEMAIMPLFPAVXF3U128R64 iemAImpl_vsqrtsd_u128_r64, iemAImpl_vsqrtsd_u128_r64_fallback;
2341FNIEMAIMPLFPAVXF3U128R32 iemAImpl_vrsqrtss_u128_r32, iemAImpl_vrsqrtss_u128_r32_fallback;
2342FNIEMAIMPLFPAVXF3U128R32 iemAImpl_vrcpss_u128_r32, iemAImpl_vrcpss_u128_r32_fallback;
2343FNIEMAIMPLFPAVXF3U128R32 iemAImpl_vcvtss2sd_u128_r32, iemAImpl_vcvtss2sd_u128_r32_fallback;
2344FNIEMAIMPLFPAVXF3U128R64 iemAImpl_vcvtsd2ss_u128_r64, iemAImpl_vcvtsd2ss_u128_r64_fallback;
2345
2346
2347FNIEMAIMPLFPAVXF3U256 iemAImpl_vaddps_u256, iemAImpl_vaddps_u256_fallback;
2348FNIEMAIMPLFPAVXF3U256 iemAImpl_vaddpd_u256, iemAImpl_vaddpd_u256_fallback;
2349FNIEMAIMPLFPAVXF3U256 iemAImpl_vmulps_u256, iemAImpl_vmulps_u256_fallback;
2350FNIEMAIMPLFPAVXF3U256 iemAImpl_vmulpd_u256, iemAImpl_vmulpd_u256_fallback;
2351FNIEMAIMPLFPAVXF3U256 iemAImpl_vsubps_u256, iemAImpl_vsubps_u256_fallback;
2352FNIEMAIMPLFPAVXF3U256 iemAImpl_vsubpd_u256, iemAImpl_vsubpd_u256_fallback;
2353FNIEMAIMPLFPAVXF3U256 iemAImpl_vminps_u256, iemAImpl_vminps_u256_fallback;
2354FNIEMAIMPLFPAVXF3U256 iemAImpl_vminpd_u256, iemAImpl_vminpd_u256_fallback;
2355FNIEMAIMPLFPAVXF3U256 iemAImpl_vdivps_u256, iemAImpl_vdivps_u256_fallback;
2356FNIEMAIMPLFPAVXF3U256 iemAImpl_vdivpd_u256, iemAImpl_vdivpd_u256_fallback;
2357FNIEMAIMPLFPAVXF3U256 iemAImpl_vmaxps_u256, iemAImpl_vmaxps_u256_fallback;
2358FNIEMAIMPLFPAVXF3U256 iemAImpl_vmaxpd_u256, iemAImpl_vmaxpd_u256_fallback;
2359FNIEMAIMPLFPAVXF3U256 iemAImpl_vhaddps_u256, iemAImpl_vhaddps_u256_fallback;
2360FNIEMAIMPLFPAVXF3U256 iemAImpl_vhaddpd_u256, iemAImpl_vhaddpd_u256_fallback;
2361FNIEMAIMPLFPAVXF3U256 iemAImpl_vhsubps_u256, iemAImpl_vhsubps_u256_fallback;
2362FNIEMAIMPLFPAVXF3U256 iemAImpl_vhsubpd_u256, iemAImpl_vhsubpd_u256_fallback;
2363FNIEMAIMPLMEDIAF3U256 iemAImpl_vaddsubps_u256, iemAImpl_vaddsubps_u256_fallback;
2364FNIEMAIMPLMEDIAF3U256 iemAImpl_vaddsubpd_u256, iemAImpl_vaddsubpd_u256_fallback;
2365FNIEMAIMPLMEDIAF2U256 iemAImpl_vsqrtps_u256, iemAImpl_vsqrtps_u256_fallback;
2366FNIEMAIMPLMEDIAF2U256 iemAImpl_vsqrtpd_u256, iemAImpl_vsqrtpd_u256_fallback;
2367FNIEMAIMPLMEDIAF2U256 iemAImpl_vrsqrtps_u256, iemAImpl_vrsqrtps_u256_fallback;
2368FNIEMAIMPLMEDIAF2U256 iemAImpl_vrcpps_u256, iemAImpl_vrcpps_u256_fallback;
2369FNIEMAIMPLMEDIAF2U256 iemAImpl_vcvtdq2ps_u256, iemAImpl_vcvtdq2ps_u256_fallback;
2370FNIEMAIMPLMEDIAF2U256 iemAImpl_vcvtps2dq_u256, iemAImpl_vcvtps2dq_u256_fallback;
2371FNIEMAIMPLMEDIAF2U256 iemAImpl_vcvttps2dq_u256, iemAImpl_vcvttps2dq_u256_fallback;
2372IEM_DECL_IMPL_PROTO(uint32_t, iemAImpl_vcvtpd2ps_u128_u256,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86YMMREG puSrc));
2373IEM_DECL_IMPL_PROTO(uint32_t, iemAImpl_vcvtpd2ps_u128_u256_fallback,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86YMMREG puSrc));
2374IEM_DECL_IMPL_PROTO(uint32_t, iemAImpl_vcvttpd2dq_u128_u256,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86YMMREG puSrc));
2375IEM_DECL_IMPL_PROTO(uint32_t, iemAImpl_vcvttpd2dq_u128_u256_fallback,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86YMMREG puSrc));
2376IEM_DECL_IMPL_PROTO(uint32_t, iemAImpl_vcvtpd2dq_u128_u256,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86YMMREG puSrc));
2377IEM_DECL_IMPL_PROTO(uint32_t, iemAImpl_vcvtpd2dq_u128_u256_fallback,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86YMMREG puSrc));
2378/** @} */
2379
2380
2381/** @name Misc Helpers
2382 * @{ */
2383
2384/**
2385 * Gets the CPU mode (from fExec) as a IEMMODE value.
2386 *
2387 * @returns IEMMODE
2388 * @param a_pVCpu The cross context virtual CPU structure of the calling thread.
2389 */
2390#define IEM_GET_CPU_MODE(a_pVCpu) ((a_pVCpu)->iem.s.fExec & IEM_F_MODE_CPUMODE_MASK)
2391
2392/**
2393 * Check if we're currently executing in real or virtual 8086 mode.
2394 *
2395 * @returns @c true if it is, @c false if not.
2396 * @param a_pVCpu The cross context virtual CPU structure of the calling thread.
2397 */
2398#define IEM_IS_REAL_OR_V86_MODE(a_pVCpu) (( ((a_pVCpu)->iem.s.fExec ^ IEM_F_MODE_X86_PROT_MASK) \
2399 & (IEM_F_MODE_X86_V86_MASK | IEM_F_MODE_X86_PROT_MASK)) != 0)
2400
2401/**
2402 * Check if we're currently executing in virtual 8086 mode.
2403 *
2404 * @returns @c true if it is, @c false if not.
2405 * @param a_pVCpu The cross context virtual CPU structure of the calling thread.
2406 */
2407#define IEM_IS_V86_MODE(a_pVCpu) (((a_pVCpu)->iem.s.fExec & IEM_F_MODE_X86_V86_MASK) != 0)
2408
2409/**
2410 * Check if we're currently executing in long mode.
2411 *
2412 * @returns @c true if it is, @c false if not.
2413 * @param a_pVCpu The cross context virtual CPU structure of the calling thread.
2414 */
2415#define IEM_IS_LONG_MODE(a_pVCpu) (CPUMIsGuestInLongModeEx(IEM_GET_CTX(a_pVCpu)))
2416
2417/**
2418 * Check if we're currently executing in a 16-bit code segment.
2419 *
2420 * @returns @c true if it is, @c false if not.
2421 * @param a_pVCpu The cross context virtual CPU structure of the calling thread.
2422 */
2423#define IEM_IS_16BIT_CODE(a_pVCpu) (IEM_GET_CPU_MODE(a_pVCpu) == IEMMODE_16BIT)
2424
2425/**
2426 * Check if we're currently executing in a 32-bit code segment.
2427 *
2428 * @returns @c true if it is, @c false if not.
2429 * @param a_pVCpu The cross context virtual CPU structure of the calling thread.
2430 */
2431#define IEM_IS_32BIT_CODE(a_pVCpu) (IEM_GET_CPU_MODE(a_pVCpu) == IEMMODE_32BIT)
2432
2433/**
2434 * Check if we're currently executing in a 64-bit code segment.
2435 *
2436 * @returns @c true if it is, @c false if not.
2437 * @param a_pVCpu The cross context virtual CPU structure of the calling thread.
2438 */
2439#define IEM_IS_64BIT_CODE(a_pVCpu) (IEM_GET_CPU_MODE(a_pVCpu) == IEMMODE_64BIT)
2440
2441/**
2442 * Check if we're currently executing in real mode.
2443 *
2444 * @returns @c true if it is, @c false if not.
2445 * @param a_pVCpu The cross context virtual CPU structure of the calling thread.
2446 */
2447#define IEM_IS_REAL_MODE(a_pVCpu) (!((a_pVCpu)->iem.s.fExec & IEM_F_MODE_X86_PROT_MASK))
2448
2449/**
2450 * Gets the current protection level (CPL).
2451 *
2452 * @returns 0..3
2453 * @param a_pVCpu The cross context virtual CPU structure of the calling thread.
2454 */
2455#define IEM_GET_CPL(a_pVCpu) (((a_pVCpu)->iem.s.fExec >> IEM_F_X86_CPL_SHIFT) & IEM_F_X86_CPL_SMASK)
2456
2457/**
2458 * Sets the current protection level (CPL).
2459 *
2460 * @param a_pVCpu The cross context virtual CPU structure of the calling thread.
2461 */
2462#define IEM_SET_CPL(a_pVCpu, a_uCpl) \
2463 do { (a_pVCpu)->iem.s.fExec = ((a_pVCpu)->iem.s.fExec & ~IEM_F_X86_CPL_MASK) | ((a_uCpl) << IEM_F_X86_CPL_SHIFT); } while (0)
2464
2465/**
2466 * Returns a (const) pointer to the CPUMFEATURES for the guest CPU.
2467 * @returns PCCPUMFEATURES
2468 * @param a_pVCpu The cross context virtual CPU structure of the calling thread.
2469 */
2470#define IEM_GET_GUEST_CPU_FEATURES(a_pVCpu) (&((a_pVCpu)->CTX_SUFF(pVM)->cpum.ro.GuestFeatures))
2471
2472/**
2473 * Returns a (const) pointer to the CPUMFEATURES for the host CPU.
2474 * @returns PCCPUMFEATURES
2475 * @param a_pVCpu The cross context virtual CPU structure of the calling thread.
2476 */
2477#define IEM_GET_HOST_CPU_FEATURES(a_pVCpu) (&g_CpumHostFeatures.s)
2478
2479/**
2480 * Evaluates to true if we're presenting an Intel CPU to the guest.
2481 */
2482#define IEM_IS_GUEST_CPU_INTEL(a_pVCpu) ( (a_pVCpu)->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL )
2483
2484/**
2485 * Evaluates to true if we're presenting an AMD CPU to the guest.
2486 */
2487#define IEM_IS_GUEST_CPU_AMD(a_pVCpu) ( (a_pVCpu)->iem.s.enmCpuVendor == CPUMCPUVENDOR_AMD || (a_pVCpu)->iem.s.enmCpuVendor == CPUMCPUVENDOR_HYGON )
2488
2489/**
2490 * Check if the address is canonical.
2491 */
2492#define IEM_IS_CANONICAL(a_u64Addr) X86_IS_CANONICAL(a_u64Addr)
2493
2494/** Checks if the ModR/M byte is in register mode or not. */
2495#define IEM_IS_MODRM_REG_MODE(a_bRm) ( ((a_bRm) & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT) )
2496/** Checks if the ModR/M byte is in memory mode or not. */
2497#define IEM_IS_MODRM_MEM_MODE(a_bRm) ( ((a_bRm) & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT) )
2498
2499/**
2500 * Gets the register (reg) part of a ModR/M encoding, with REX.R added in.
2501 *
2502 * For use during decoding.
2503 */
2504#define IEM_GET_MODRM_REG(a_pVCpu, a_bRm) ( (((a_bRm) >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | (a_pVCpu)->iem.s.uRexReg )
2505/**
2506 * Gets the r/m part of a ModR/M encoding as a register index, with REX.B added in.
2507 *
2508 * For use during decoding.
2509 */
2510#define IEM_GET_MODRM_RM(a_pVCpu, a_bRm) ( ((a_bRm) & X86_MODRM_RM_MASK) | (a_pVCpu)->iem.s.uRexB )
2511
2512/**
2513 * Gets the register (reg) part of a ModR/M encoding, without REX.R.
2514 *
2515 * For use during decoding.
2516 */
2517#define IEM_GET_MODRM_REG_8(a_bRm) ( (((a_bRm) >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) )
2518/**
2519 * Gets the r/m part of a ModR/M encoding as a register index, without REX.B.
2520 *
2521 * For use during decoding.
2522 */
2523#define IEM_GET_MODRM_RM_8(a_bRm) ( ((a_bRm) & X86_MODRM_RM_MASK) )
2524
2525/**
2526 * Gets the register (reg) part of a ModR/M encoding as an extended 8-bit
2527 * register index, with REX.R added in.
2528 *
2529 * For use during decoding.
2530 *
2531 * @see iemGRegRefU8Ex, iemGRegFetchU8Ex, iemGRegStoreU8Ex
2532 */
2533#define IEM_GET_MODRM_REG_EX8(a_pVCpu, a_bRm) \
2534 ( (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX) \
2535 || !((a_bRm) & (4 << X86_MODRM_REG_SHIFT)) /* IEM_GET_MODRM_REG(pVCpu, a_bRm) < 4 */ \
2536 ? IEM_GET_MODRM_REG(pVCpu, a_bRm) : (((a_bRm) >> X86_MODRM_REG_SHIFT) & 3) | 16)
2537/**
2538 * Gets the r/m part of a ModR/M encoding as an extended 8-bit register index,
2539 * with REX.B added in.
2540 *
2541 * For use during decoding.
2542 *
2543 * @see iemGRegRefU8Ex, iemGRegFetchU8Ex, iemGRegStoreU8Ex
2544 */
2545#define IEM_GET_MODRM_RM_EX8(a_pVCpu, a_bRm) \
2546 ( (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX) \
2547 || !((a_bRm) & 4) /* IEM_GET_MODRM_RM(pVCpu, a_bRm) < 4 */ \
2548 ? IEM_GET_MODRM_RM(pVCpu, a_bRm) : ((a_bRm) & 3) | 16)
2549
2550/**
2551 * Combines the prefix REX and ModR/M byte for passing to
2552 * iemOpHlpCalcRmEffAddrThreadedAddr64().
2553 *
2554 * @returns The ModRM byte but with bit 3 set to REX.B and bit 4 to REX.X.
2555 * The two bits are part of the REG sub-field, which isn't needed in
2556 * iemOpHlpCalcRmEffAddrThreadedAddr64().
2557 *
2558 * For use during decoding/recompiling.
2559 */
2560#define IEM_GET_MODRM_EX(a_pVCpu, a_bRm) \
2561 ( ((a_bRm) & ~X86_MODRM_REG_MASK) \
2562 | (uint8_t)( (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X)) >> (25 - 3) ) )
2563AssertCompile(IEM_OP_PRF_REX_B == RT_BIT_32(25));
2564AssertCompile(IEM_OP_PRF_REX_X == RT_BIT_32(26));
2565
2566/**
2567 * Gets the effective VEX.VVVV value.
2568 *
2569 * The 4th bit is ignored if not 64-bit code.
2570 * @returns effective V-register value.
2571 * @param a_pVCpu The cross context virtual CPU structure of the calling thread.
2572 */
2573#define IEM_GET_EFFECTIVE_VVVV(a_pVCpu) \
2574 (IEM_IS_64BIT_CODE(a_pVCpu) ? (a_pVCpu)->iem.s.uVex3rdReg : (a_pVCpu)->iem.s.uVex3rdReg & 7)
2575
2576
2577/**
2578 * Gets the register (reg) part of a the special 4th register byte used by
2579 * vblendvps and vblendvpd.
2580 *
2581 * For use during decoding.
2582 */
2583#define IEM_GET_IMM8_REG(a_pVCpu, a_bRegImm8) \
2584 (IEM_IS_64BIT_CODE(a_pVCpu) ? (a_bRegImm8) >> 4 : ((a_bRegImm8) >> 4) & 7)
2585
2586
2587/**
2588 * Checks if we're executing inside an AMD-V or VT-x guest.
2589 */
2590#if defined(VBOX_WITH_NESTED_HWVIRT_VMX) || defined(VBOX_WITH_NESTED_HWVIRT_SVM)
2591# define IEM_IS_IN_GUEST(a_pVCpu) RT_BOOL((a_pVCpu)->iem.s.fExec & IEM_F_X86_CTX_IN_GUEST)
2592#else
2593# define IEM_IS_IN_GUEST(a_pVCpu) false
2594#endif
2595
2596
2597#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
2598
2599/**
2600 * Check if the guest has entered VMX root operation.
2601 */
2602# define IEM_VMX_IS_ROOT_MODE(a_pVCpu) (CPUMIsGuestInVmxRootMode(IEM_GET_CTX(a_pVCpu)))
2603
2604/**
2605 * Check if the guest has entered VMX non-root operation.
2606 */
2607# define IEM_VMX_IS_NON_ROOT_MODE(a_pVCpu) ( ((a_pVCpu)->iem.s.fExec & (IEM_F_X86_CTX_VMX | IEM_F_X86_CTX_IN_GUEST)) \
2608 == (IEM_F_X86_CTX_VMX | IEM_F_X86_CTX_IN_GUEST) )
2609
2610/**
2611 * Check if the nested-guest has the given Pin-based VM-execution control set.
2612 */
2613# define IEM_VMX_IS_PINCTLS_SET(a_pVCpu, a_PinCtl) (CPUMIsGuestVmxPinCtlsSet(IEM_GET_CTX(a_pVCpu), (a_PinCtl)))
2614
2615/**
2616 * Check if the nested-guest has the given Processor-based VM-execution control set.
2617 */
2618# define IEM_VMX_IS_PROCCTLS_SET(a_pVCpu, a_ProcCtl) (CPUMIsGuestVmxProcCtlsSet(IEM_GET_CTX(a_pVCpu), (a_ProcCtl)))
2619
2620/**
2621 * Check if the nested-guest has the given Secondary Processor-based VM-execution
2622 * control set.
2623 */
2624# define IEM_VMX_IS_PROCCTLS2_SET(a_pVCpu, a_ProcCtl2) (CPUMIsGuestVmxProcCtls2Set(IEM_GET_CTX(a_pVCpu), (a_ProcCtl2)))
2625
2626/** Gets the guest-physical address of the shadows VMCS for the given VCPU. */
2627# define IEM_VMX_GET_SHADOW_VMCS(a_pVCpu) ((a_pVCpu)->cpum.GstCtx.hwvirt.vmx.GCPhysShadowVmcs)
2628
2629/** Whether a shadow VMCS is present for the given VCPU. */
2630# define IEM_VMX_HAS_SHADOW_VMCS(a_pVCpu) RT_BOOL(IEM_VMX_GET_SHADOW_VMCS(a_pVCpu) != NIL_RTGCPHYS)
2631
2632/** Gets the VMXON region pointer. */
2633# define IEM_VMX_GET_VMXON_PTR(a_pVCpu) ((a_pVCpu)->cpum.GstCtx.hwvirt.vmx.GCPhysVmxon)
2634
2635/** Gets the guest-physical address of the current VMCS for the given VCPU. */
2636# define IEM_VMX_GET_CURRENT_VMCS(a_pVCpu) ((a_pVCpu)->cpum.GstCtx.hwvirt.vmx.GCPhysVmcs)
2637
2638/** Whether a current VMCS is present for the given VCPU. */
2639# define IEM_VMX_HAS_CURRENT_VMCS(a_pVCpu) RT_BOOL(IEM_VMX_GET_CURRENT_VMCS(a_pVCpu) != NIL_RTGCPHYS)
2640
2641/** Assigns the guest-physical address of the current VMCS for the given VCPU. */
2642# define IEM_VMX_SET_CURRENT_VMCS(a_pVCpu, a_GCPhysVmcs) \
2643 do \
2644 { \
2645 Assert((a_GCPhysVmcs) != NIL_RTGCPHYS); \
2646 (a_pVCpu)->cpum.GstCtx.hwvirt.vmx.GCPhysVmcs = (a_GCPhysVmcs); \
2647 } while (0)
2648
2649/** Clears any current VMCS for the given VCPU. */
2650# define IEM_VMX_CLEAR_CURRENT_VMCS(a_pVCpu) \
2651 do \
2652 { \
2653 (a_pVCpu)->cpum.GstCtx.hwvirt.vmx.GCPhysVmcs = NIL_RTGCPHYS; \
2654 } while (0)
2655
2656/**
2657 * Invokes the VMX VM-exit handler for an instruction intercept.
2658 */
2659# define IEM_VMX_VMEXIT_INSTR_RET(a_pVCpu, a_uExitReason, a_cbInstr) \
2660 do { return iemVmxVmexitInstr((a_pVCpu), (a_uExitReason), (a_cbInstr)); } while (0)
2661
2662/**
2663 * Invokes the VMX VM-exit handler for an instruction intercept where the
2664 * instruction provides additional VM-exit information.
2665 */
2666# define IEM_VMX_VMEXIT_INSTR_NEEDS_INFO_RET(a_pVCpu, a_uExitReason, a_uInstrId, a_cbInstr) \
2667 do { return iemVmxVmexitInstrNeedsInfo((a_pVCpu), (a_uExitReason), (a_uInstrId), (a_cbInstr)); } while (0)
2668
2669/**
2670 * Invokes the VMX VM-exit handler for a task switch.
2671 */
2672# define IEM_VMX_VMEXIT_TASK_SWITCH_RET(a_pVCpu, a_enmTaskSwitch, a_SelNewTss, a_cbInstr) \
2673 do { return iemVmxVmexitTaskSwitch((a_pVCpu), (a_enmTaskSwitch), (a_SelNewTss), (a_cbInstr)); } while (0)
2674
2675/**
2676 * Invokes the VMX VM-exit handler for MWAIT.
2677 */
2678# define IEM_VMX_VMEXIT_MWAIT_RET(a_pVCpu, a_fMonitorArmed, a_cbInstr) \
2679 do { return iemVmxVmexitInstrMwait((a_pVCpu), (a_fMonitorArmed), (a_cbInstr)); } while (0)
2680
2681/**
2682 * Invokes the VMX VM-exit handler for EPT faults.
2683 */
2684# define IEM_VMX_VMEXIT_EPT_RET(a_pVCpu, a_pPtWalk, a_fAccess, a_fSlatFail, a_cbInstr) \
2685 do { return iemVmxVmexitEpt(a_pVCpu, a_pPtWalk, a_fAccess, a_fSlatFail, a_cbInstr); } while (0)
2686
2687/**
2688 * Invokes the VMX VM-exit handler.
2689 */
2690# define IEM_VMX_VMEXIT_TRIPLE_FAULT_RET(a_pVCpu, a_uExitReason, a_uExitQual) \
2691 do { return iemVmxVmexit((a_pVCpu), (a_uExitReason), (a_uExitQual)); } while (0)
2692
2693#else
2694# define IEM_VMX_IS_ROOT_MODE(a_pVCpu) (false)
2695# define IEM_VMX_IS_NON_ROOT_MODE(a_pVCpu) (false)
2696# define IEM_VMX_IS_PINCTLS_SET(a_pVCpu, a_cbInstr) (false)
2697# define IEM_VMX_IS_PROCCTLS_SET(a_pVCpu, a_cbInstr) (false)
2698# define IEM_VMX_IS_PROCCTLS2_SET(a_pVCpu, a_cbInstr) (false)
2699# define IEM_VMX_VMEXIT_INSTR_RET(a_pVCpu, a_uExitReason, a_cbInstr) do { return VERR_VMX_IPE_1; } while (0)
2700# define IEM_VMX_VMEXIT_INSTR_NEEDS_INFO_RET(a_pVCpu, a_uExitReason, a_uInstrId, a_cbInstr) do { return VERR_VMX_IPE_1; } while (0)
2701# define IEM_VMX_VMEXIT_TASK_SWITCH_RET(a_pVCpu, a_enmTaskSwitch, a_SelNewTss, a_cbInstr) do { return VERR_VMX_IPE_1; } while (0)
2702# define IEM_VMX_VMEXIT_MWAIT_RET(a_pVCpu, a_fMonitorArmed, a_cbInstr) do { return VERR_VMX_IPE_1; } while (0)
2703# define IEM_VMX_VMEXIT_EPT_RET(a_pVCpu, a_pPtWalk, a_fAccess, a_fSlatFail, a_cbInstr) do { return VERR_VMX_IPE_1; } while (0)
2704# define IEM_VMX_VMEXIT_TRIPLE_FAULT_RET(a_pVCpu, a_uExitReason, a_uExitQual) do { return VERR_VMX_IPE_1; } while (0)
2705
2706#endif
2707
2708#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
2709/**
2710 * Checks if we're executing a guest using AMD-V.
2711 */
2712# define IEM_SVM_IS_IN_GUEST(a_pVCpu) ( (a_pVCpu->iem.s.fExec & (IEM_F_X86_CTX_SVM | IEM_F_X86_CTX_IN_GUEST)) \
2713 == (IEM_F_X86_CTX_SVM | IEM_F_X86_CTX_IN_GUEST))
2714/**
2715 * Check if an SVM control/instruction intercept is set.
2716 */
2717# define IEM_SVM_IS_CTRL_INTERCEPT_SET(a_pVCpu, a_Intercept) \
2718 (IEM_SVM_IS_IN_GUEST(a_pVCpu) && CPUMIsGuestSvmCtrlInterceptSet(a_pVCpu, IEM_GET_CTX(a_pVCpu), (a_Intercept)))
2719
2720/**
2721 * Check if an SVM read CRx intercept is set.
2722 */
2723# define IEM_SVM_IS_READ_CR_INTERCEPT_SET(a_pVCpu, a_uCr) \
2724 (IEM_SVM_IS_IN_GUEST(a_pVCpu) && CPUMIsGuestSvmReadCRxInterceptSet(a_pVCpu, IEM_GET_CTX(a_pVCpu), (a_uCr)))
2725
2726/**
2727 * Check if an SVM write CRx intercept is set.
2728 */
2729# define IEM_SVM_IS_WRITE_CR_INTERCEPT_SET(a_pVCpu, a_uCr) \
2730 (IEM_SVM_IS_IN_GUEST(a_pVCpu) && CPUMIsGuestSvmWriteCRxInterceptSet(a_pVCpu, IEM_GET_CTX(a_pVCpu), (a_uCr)))
2731
2732/**
2733 * Check if an SVM read DRx intercept is set.
2734 */
2735# define IEM_SVM_IS_READ_DR_INTERCEPT_SET(a_pVCpu, a_uDr) \
2736 (IEM_SVM_IS_IN_GUEST(a_pVCpu) && CPUMIsGuestSvmReadDRxInterceptSet(a_pVCpu, IEM_GET_CTX(a_pVCpu), (a_uDr)))
2737
2738/**
2739 * Check if an SVM write DRx intercept is set.
2740 */
2741# define IEM_SVM_IS_WRITE_DR_INTERCEPT_SET(a_pVCpu, a_uDr) \
2742 (IEM_SVM_IS_IN_GUEST(a_pVCpu) && CPUMIsGuestSvmWriteDRxInterceptSet(a_pVCpu, IEM_GET_CTX(a_pVCpu), (a_uDr)))
2743
2744/**
2745 * Check if an SVM exception intercept is set.
2746 */
2747# define IEM_SVM_IS_XCPT_INTERCEPT_SET(a_pVCpu, a_uVector) \
2748 (IEM_SVM_IS_IN_GUEST(a_pVCpu) && CPUMIsGuestSvmXcptInterceptSet(a_pVCpu, IEM_GET_CTX(a_pVCpu), (a_uVector)))
2749
2750/**
2751 * Invokes the SVM \#VMEXIT handler for the nested-guest.
2752 */
2753# define IEM_SVM_VMEXIT_RET(a_pVCpu, a_uExitCode, a_uExitInfo1, a_uExitInfo2) \
2754 do { return iemSvmVmexit((a_pVCpu), (a_uExitCode), (a_uExitInfo1), (a_uExitInfo2)); } while (0)
2755
2756/**
2757 * Invokes the 'MOV CRx' SVM \#VMEXIT handler after constructing the
2758 * corresponding decode assist information.
2759 */
2760# define IEM_SVM_CRX_VMEXIT_RET(a_pVCpu, a_uExitCode, a_enmAccessCrX, a_iGReg) \
2761 do \
2762 { \
2763 uint64_t uExitInfo1; \
2764 if ( IEM_GET_GUEST_CPU_FEATURES(a_pVCpu)->fSvmDecodeAssists \
2765 && (a_enmAccessCrX) == IEMACCESSCRX_MOV_CRX) \
2766 uExitInfo1 = SVM_EXIT1_MOV_CRX_MASK | ((a_iGReg) & 7); \
2767 else \
2768 uExitInfo1 = 0; \
2769 IEM_SVM_VMEXIT_RET(a_pVCpu, a_uExitCode, uExitInfo1, 0); \
2770 } while (0)
2771
2772/** Check and handles SVM nested-guest instruction intercept and updates
2773 * NRIP if needed.
2774 */
2775# define IEM_SVM_CHECK_INSTR_INTERCEPT(a_pVCpu, a_Intercept, a_uExitCode, a_uExitInfo1, a_uExitInfo2, a_cbInstr) \
2776 do \
2777 { \
2778 if (IEM_SVM_IS_CTRL_INTERCEPT_SET(a_pVCpu, a_Intercept)) \
2779 { \
2780 IEM_SVM_UPDATE_NRIP(a_pVCpu, a_cbInstr); \
2781 IEM_SVM_VMEXIT_RET(a_pVCpu, a_uExitCode, a_uExitInfo1, a_uExitInfo2); \
2782 } \
2783 } while (0)
2784
2785/** Checks and handles SVM nested-guest CR0 read intercept. */
2786# define IEM_SVM_CHECK_READ_CR0_INTERCEPT(a_pVCpu, a_uExitInfo1, a_uExitInfo2, a_cbInstr) \
2787 do \
2788 { \
2789 if (!IEM_SVM_IS_READ_CR_INTERCEPT_SET(a_pVCpu, 0)) \
2790 { /* probably likely */ } \
2791 else \
2792 { \
2793 IEM_SVM_UPDATE_NRIP(a_pVCpu, a_cbInstr); \
2794 IEM_SVM_VMEXIT_RET(a_pVCpu, SVM_EXIT_READ_CR0, a_uExitInfo1, a_uExitInfo2); \
2795 } \
2796 } while (0)
2797
2798/**
2799 * Updates the NextRIP (NRI) field in the nested-guest VMCB.
2800 */
2801# define IEM_SVM_UPDATE_NRIP(a_pVCpu, a_cbInstr) \
2802 do { \
2803 if (IEM_GET_GUEST_CPU_FEATURES(a_pVCpu)->fSvmNextRipSave) \
2804 CPUMGuestSvmUpdateNRip(a_pVCpu, IEM_GET_CTX(a_pVCpu), (a_cbInstr)); \
2805 } while (0)
2806
2807#else
2808# define IEM_SVM_IS_CTRL_INTERCEPT_SET(a_pVCpu, a_Intercept) (false)
2809# define IEM_SVM_IS_READ_CR_INTERCEPT_SET(a_pVCpu, a_uCr) (false)
2810# define IEM_SVM_IS_WRITE_CR_INTERCEPT_SET(a_pVCpu, a_uCr) (false)
2811# define IEM_SVM_IS_READ_DR_INTERCEPT_SET(a_pVCpu, a_uDr) (false)
2812# define IEM_SVM_IS_WRITE_DR_INTERCEPT_SET(a_pVCpu, a_uDr) (false)
2813# define IEM_SVM_IS_XCPT_INTERCEPT_SET(a_pVCpu, a_uVector) (false)
2814# define IEM_SVM_VMEXIT_RET(a_pVCpu, a_uExitCode, a_uExitInfo1, a_uExitInfo2) do { return VERR_SVM_IPE_1; } while (0)
2815# define IEM_SVM_CRX_VMEXIT_RET(a_pVCpu, a_uExitCode, a_enmAccessCrX, a_iGReg) do { return VERR_SVM_IPE_1; } while (0)
2816# define IEM_SVM_CHECK_INSTR_INTERCEPT(a_pVCpu, a_Intercept, a_uExitCode, \
2817 a_uExitInfo1, a_uExitInfo2, a_cbInstr) do { } while (0)
2818# define IEM_SVM_CHECK_READ_CR0_INTERCEPT(a_pVCpu, a_uExitInfo1, a_uExitInfo2, a_cbInstr) do { } while (0)
2819# define IEM_SVM_UPDATE_NRIP(a_pVCpu, a_cbInstr) do { } while (0)
2820
2821#endif
2822
2823/** @} */
2824
2825
2826/**
2827 * Selector descriptor table entry as fetched by iemMemFetchSelDesc.
2828 */
2829typedef union IEMSELDESC
2830{
2831 /** The legacy view. */
2832 X86DESC Legacy;
2833 /** The long mode view. */
2834 X86DESC64 Long;
2835} IEMSELDESC;
2836/** Pointer to a selector descriptor table entry. */
2837typedef IEMSELDESC *PIEMSELDESC;
2838
2839/** @name Raising Exceptions.
2840 * @{ */
2841VBOXSTRICTRC iemTaskSwitch(PVMCPUCC pVCpu, IEMTASKSWITCH enmTaskSwitch, uint32_t uNextEip, uint32_t fFlags,
2842 uint16_t uErr, uint64_t uCr2, RTSEL SelTSS, PIEMSELDESC pNewDescTSS) RT_NOEXCEPT;
2843
2844VBOXSTRICTRC iemRaiseXcptOrInt(PVMCPUCC pVCpu, uint8_t cbInstr, uint8_t u8Vector, uint32_t fFlags,
2845 uint16_t uErr, uint64_t uCr2) RT_NOEXCEPT;
2846#ifdef IEM_WITH_SETJMP
2847DECL_NO_RETURN(void) iemRaiseXcptOrIntJmp(PVMCPUCC pVCpu, uint8_t cbInstr, uint8_t u8Vector,
2848 uint32_t fFlags, uint16_t uErr, uint64_t uCr2) IEM_NOEXCEPT_MAY_LONGJMP;
2849#endif
2850VBOXSTRICTRC iemRaiseDivideError(PVMCPUCC pVCpu) RT_NOEXCEPT;
2851#ifdef IEM_WITH_SETJMP
2852DECL_NO_RETURN(void) iemRaiseDivideErrorJmp(PVMCPUCC pVCpu) IEM_NOEXCEPT_MAY_LONGJMP;
2853#endif
2854VBOXSTRICTRC iemRaiseDebugException(PVMCPUCC pVCpu) RT_NOEXCEPT;
2855VBOXSTRICTRC iemRaiseBoundRangeExceeded(PVMCPUCC pVCpu) RT_NOEXCEPT;
2856VBOXSTRICTRC iemRaiseUndefinedOpcode(PVMCPUCC pVCpu) RT_NOEXCEPT;
2857#ifdef IEM_WITH_SETJMP
2858DECL_NO_RETURN(void) iemRaiseUndefinedOpcodeJmp(PVMCPUCC pVCpu) IEM_NOEXCEPT_MAY_LONGJMP;
2859#endif
2860VBOXSTRICTRC iemRaiseDeviceNotAvailable(PVMCPUCC pVCpu) RT_NOEXCEPT;
2861#ifdef IEM_WITH_SETJMP
2862DECL_NO_RETURN(void) iemRaiseDeviceNotAvailableJmp(PVMCPUCC pVCpu) IEM_NOEXCEPT_MAY_LONGJMP;
2863#endif
2864VBOXSTRICTRC iemRaiseTaskSwitchFaultWithErr(PVMCPUCC pVCpu, uint16_t uErr) RT_NOEXCEPT;
2865VBOXSTRICTRC iemRaiseTaskSwitchFaultCurrentTSS(PVMCPUCC pVCpu) RT_NOEXCEPT;
2866VBOXSTRICTRC iemRaiseTaskSwitchFault0(PVMCPUCC pVCpu) RT_NOEXCEPT;
2867VBOXSTRICTRC iemRaiseTaskSwitchFaultBySelector(PVMCPUCC pVCpu, uint16_t uSel) RT_NOEXCEPT;
2868/*VBOXSTRICTRC iemRaiseSelectorNotPresent(PVMCPUCC pVCpu, uint32_t iSegReg, uint32_t fAccess) RT_NOEXCEPT;*/
2869VBOXSTRICTRC iemRaiseSelectorNotPresentWithErr(PVMCPUCC pVCpu, uint16_t uErr) RT_NOEXCEPT;
2870VBOXSTRICTRC iemRaiseSelectorNotPresentBySelector(PVMCPUCC pVCpu, uint16_t uSel) RT_NOEXCEPT;
2871VBOXSTRICTRC iemRaiseStackSelectorNotPresentBySelector(PVMCPUCC pVCpu, uint16_t uSel) RT_NOEXCEPT;
2872VBOXSTRICTRC iemRaiseStackSelectorNotPresentWithErr(PVMCPUCC pVCpu, uint16_t uErr) RT_NOEXCEPT;
2873VBOXSTRICTRC iemRaiseGeneralProtectionFault(PVMCPUCC pVCpu, uint16_t uErr) RT_NOEXCEPT;
2874VBOXSTRICTRC iemRaiseGeneralProtectionFault0(PVMCPUCC pVCpu) RT_NOEXCEPT;
2875#ifdef IEM_WITH_SETJMP
2876DECL_NO_RETURN(void) iemRaiseGeneralProtectionFault0Jmp(PVMCPUCC pVCpu) IEM_NOEXCEPT_MAY_LONGJMP;
2877#endif
2878VBOXSTRICTRC iemRaiseGeneralProtectionFaultBySelector(PVMCPUCC pVCpu, RTSEL Sel) RT_NOEXCEPT;
2879VBOXSTRICTRC iemRaiseNotCanonical(PVMCPUCC pVCpu) RT_NOEXCEPT;
2880VBOXSTRICTRC iemRaiseSelectorBounds(PVMCPUCC pVCpu, uint32_t iSegReg, uint32_t fAccess) RT_NOEXCEPT;
2881#ifdef IEM_WITH_SETJMP
2882DECL_NO_RETURN(void) iemRaiseSelectorBoundsJmp(PVMCPUCC pVCpu, uint32_t iSegReg, uint32_t fAccess) IEM_NOEXCEPT_MAY_LONGJMP;
2883#endif
2884VBOXSTRICTRC iemRaiseSelectorBoundsBySelector(PVMCPUCC pVCpu, RTSEL Sel) RT_NOEXCEPT;
2885#ifdef IEM_WITH_SETJMP
2886DECL_NO_RETURN(void) iemRaiseSelectorBoundsBySelectorJmp(PVMCPUCC pVCpu, RTSEL Sel) IEM_NOEXCEPT_MAY_LONGJMP;
2887#endif
2888VBOXSTRICTRC iemRaiseSelectorInvalidAccess(PVMCPUCC pVCpu, uint32_t iSegReg, uint32_t fAccess) RT_NOEXCEPT;
2889#ifdef IEM_WITH_SETJMP
2890DECL_NO_RETURN(void) iemRaiseSelectorInvalidAccessJmp(PVMCPUCC pVCpu, uint32_t iSegReg, uint32_t fAccess) IEM_NOEXCEPT_MAY_LONGJMP;
2891#endif
2892VBOXSTRICTRC iemRaisePageFault(PVMCPUCC pVCpu, RTGCPTR GCPtrWhere, uint32_t cbAccess, uint32_t fAccess, int rc) RT_NOEXCEPT;
2893#ifdef IEM_WITH_SETJMP
2894DECL_NO_RETURN(void) iemRaisePageFaultJmp(PVMCPUCC pVCpu, RTGCPTR GCPtrWhere, uint32_t cbAccess, uint32_t fAccess, int rc) IEM_NOEXCEPT_MAY_LONGJMP;
2895#endif
2896VBOXSTRICTRC iemRaiseMathFault(PVMCPUCC pVCpu) RT_NOEXCEPT;
2897#ifdef IEM_WITH_SETJMP
2898DECL_NO_RETURN(void) iemRaiseMathFaultJmp(PVMCPUCC pVCpu) IEM_NOEXCEPT_MAY_LONGJMP;
2899#endif
2900VBOXSTRICTRC iemRaiseAlignmentCheckException(PVMCPUCC pVCpu) RT_NOEXCEPT;
2901#ifdef IEM_WITH_SETJMP
2902DECL_NO_RETURN(void) iemRaiseAlignmentCheckExceptionJmp(PVMCPUCC pVCpu) IEM_NOEXCEPT_MAY_LONGJMP;
2903#endif
2904VBOXSTRICTRC iemRaiseSimdFpException(PVMCPUCC pVCpu) RT_NOEXCEPT;
2905#ifdef IEM_WITH_SETJMP
2906DECL_NO_RETURN(void) iemRaiseSimdFpExceptionJmp(PVMCPUCC pVCpu) IEM_NOEXCEPT_MAY_LONGJMP;
2907#endif
2908
2909void iemLogSyscallRealModeInt(PVMCPUCC pVCpu, uint8_t u8Vector, uint8_t cbInstr);
2910void iemLogSyscallProtModeInt(PVMCPUCC pVCpu, uint8_t u8Vector, uint8_t cbInstr);
2911
2912IEM_CIMPL_DEF_0(iemCImplRaiseDivideError);
2913IEM_CIMPL_DEF_0(iemCImplRaiseInvalidLockPrefix);
2914IEM_CIMPL_DEF_0(iemCImplRaiseInvalidOpcode);
2915
2916/**
2917 * Macro for calling iemCImplRaiseDivideError().
2918 *
2919 * This is for things that will _always_ decode to an \#DE, taking the
2920 * recompiler into consideration and everything.
2921 *
2922 * @return Strict VBox status code.
2923 */
2924#define IEMOP_RAISE_DIVIDE_ERROR_RET() IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_XCPT, 0, iemCImplRaiseDivideError)
2925
2926/**
2927 * Macro for calling iemCImplRaiseInvalidLockPrefix().
2928 *
2929 * This is for things that will _always_ decode to an \#UD, taking the
2930 * recompiler into consideration and everything.
2931 *
2932 * @return Strict VBox status code.
2933 */
2934#define IEMOP_RAISE_INVALID_LOCK_PREFIX_RET() IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_XCPT, 0, iemCImplRaiseInvalidLockPrefix)
2935
2936/**
2937 * Macro for calling iemCImplRaiseInvalidOpcode() for decode/static \#UDs.
2938 *
2939 * This is for things that will _always_ decode to an \#UD, taking the
2940 * recompiler into consideration and everything.
2941 *
2942 * @return Strict VBox status code.
2943 */
2944#define IEMOP_RAISE_INVALID_OPCODE_RET() IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_XCPT, 0, iemCImplRaiseInvalidOpcode)
2945
2946/**
2947 * Macro for calling iemCImplRaiseInvalidOpcode() for runtime-style \#UDs.
2948 *
2949 * Using this macro means you've got _buggy_ _code_ and are doing things that
2950 * belongs exclusively in IEMAllCImpl.cpp during decoding.
2951 *
2952 * @return Strict VBox status code.
2953 * @see IEMOP_RAISE_INVALID_OPCODE_RET
2954 */
2955#define IEMOP_RAISE_INVALID_OPCODE_RUNTIME_RET() IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_XCPT, 0, iemCImplRaiseInvalidOpcode)
2956
2957/** @} */
2958
2959/** @name Register Access.
2960 * @{ */
2961VBOXSTRICTRC iemRegRipRelativeJumpS8AndFinishClearingRF(PVMCPUCC pVCpu, uint8_t cbInstr, int8_t offNextInstr,
2962 IEMMODE enmEffOpSize) RT_NOEXCEPT;
2963VBOXSTRICTRC iemRegRipRelativeJumpS16AndFinishClearingRF(PVMCPUCC pVCpu, uint8_t cbInstr, int16_t offNextInstr) RT_NOEXCEPT;
2964VBOXSTRICTRC iemRegRipRelativeJumpS32AndFinishClearingRF(PVMCPUCC pVCpu, uint8_t cbInstr, int32_t offNextInstr,
2965 IEMMODE enmEffOpSize) RT_NOEXCEPT;
2966/** @} */
2967
2968/** @name FPU access and helpers.
2969 * @{ */
2970void iemFpuPushResult(PVMCPUCC pVCpu, PIEMFPURESULT pResult, uint16_t uFpuOpcode) RT_NOEXCEPT;
2971void iemFpuPushResultWithMemOp(PVMCPUCC pVCpu, PIEMFPURESULT pResult, uint8_t iEffSeg, RTGCPTR GCPtrEff, uint16_t uFpuOpcode) RT_NOEXCEPT;
2972void iemFpuPushResultTwo(PVMCPUCC pVCpu, PIEMFPURESULTTWO pResult, uint16_t uFpuOpcode) RT_NOEXCEPT;
2973void iemFpuStoreResult(PVMCPUCC pVCpu, PIEMFPURESULT pResult, uint8_t iStReg, uint16_t uFpuOpcode) RT_NOEXCEPT;
2974void iemFpuStoreResultThenPop(PVMCPUCC pVCpu, PIEMFPURESULT pResult, uint8_t iStReg, uint16_t uFpuOpcode) RT_NOEXCEPT;
2975void iemFpuStoreResultWithMemOp(PVMCPUCC pVCpu, PIEMFPURESULT pResult, uint8_t iStReg,
2976 uint8_t iEffSeg, RTGCPTR GCPtrEff, uint16_t uFpuOpcode) RT_NOEXCEPT;
2977void iemFpuStoreResultWithMemOpThenPop(PVMCPUCC pVCpu, PIEMFPURESULT pResult, uint8_t iStReg,
2978 uint8_t iEffSeg, RTGCPTR GCPtrEff, uint16_t uFpuOpcode) RT_NOEXCEPT;
2979void iemFpuUpdateOpcodeAndIp(PVMCPUCC pVCpu, uint16_t uFpuOpcode) RT_NOEXCEPT;
2980void iemFpuUpdateFSW(PVMCPUCC pVCpu, uint16_t u16FSW, uint16_t uFpuOpcode) RT_NOEXCEPT;
2981void iemFpuUpdateFSWThenPop(PVMCPUCC pVCpu, uint16_t u16FSW, uint16_t uFpuOpcode) RT_NOEXCEPT;
2982void iemFpuUpdateFSWWithMemOp(PVMCPUCC pVCpu, uint16_t u16FSW, uint8_t iEffSeg, RTGCPTR GCPtrEff, uint16_t uFpuOpcode) RT_NOEXCEPT;
2983void iemFpuUpdateFSWThenPopPop(PVMCPUCC pVCpu, uint16_t u16FSW, uint16_t uFpuOpcode) RT_NOEXCEPT;
2984void iemFpuUpdateFSWWithMemOpThenPop(PVMCPUCC pVCpu, uint16_t u16FSW, uint8_t iEffSeg, RTGCPTR GCPtrEff, uint16_t uFpuOpcode) RT_NOEXCEPT;
2985void iemFpuStackUnderflow(PVMCPUCC pVCpu, uint8_t iStReg, uint16_t uFpuOpcode) RT_NOEXCEPT;
2986void iemFpuStackUnderflowWithMemOp(PVMCPUCC pVCpu, uint8_t iStReg, uint8_t iEffSeg, RTGCPTR GCPtrEff, uint16_t uFpuOpcode) RT_NOEXCEPT;
2987void iemFpuStackUnderflowThenPop(PVMCPUCC pVCpu, uint8_t iStReg, uint16_t uFpuOpcode) RT_NOEXCEPT;
2988void iemFpuStackUnderflowWithMemOpThenPop(PVMCPUCC pVCpu, uint8_t iStReg, uint8_t iEffSeg, RTGCPTR GCPtrEff, uint16_t uFpuOpcode) RT_NOEXCEPT;
2989void iemFpuStackUnderflowThenPopPop(PVMCPUCC pVCpu, uint16_t uFpuOpcode) RT_NOEXCEPT;
2990void iemFpuStackPushUnderflow(PVMCPUCC pVCpu, uint16_t uFpuOpcode) RT_NOEXCEPT;
2991void iemFpuStackPushUnderflowTwo(PVMCPUCC pVCpu, uint16_t uFpuOpcode) RT_NOEXCEPT;
2992void iemFpuStackPushOverflow(PVMCPUCC pVCpu, uint16_t uFpuOpcode) RT_NOEXCEPT;
2993void iemFpuStackPushOverflowWithMemOp(PVMCPUCC pVCpu, uint8_t iEffSeg, RTGCPTR GCPtrEff, uint16_t uFpuOpcode) RT_NOEXCEPT;
2994/** @} */
2995
2996/** @name SSE+AVX SIMD access and helpers.
2997 * @{ */
2998void iemSseUpdateMxcsr(PVMCPUCC pVCpu, uint32_t fMxcsr) RT_NOEXCEPT;
2999/** @} */
3000
3001/** @name Memory access.
3002 * @{ */
3003
3004/** Report a \#GP instead of \#AC and do not restrict to ring-3 */
3005#define IEM_MEMMAP_F_ALIGN_GP RT_BIT_32(16)
3006/** SSE access that should report a \#GP instead of \#AC, unless MXCSR.MM=1
3007 * when it works like normal \#AC. Always used with IEM_MEMMAP_F_ALIGN_GP. */
3008#define IEM_MEMMAP_F_ALIGN_SSE RT_BIT_32(17)
3009/** If \#AC is applicable, raise it. Always used with IEM_MEMMAP_F_ALIGN_GP.
3010 * Users include FXSAVE & FXRSTOR. */
3011#define IEM_MEMMAP_F_ALIGN_GP_OR_AC RT_BIT_32(18)
3012
3013VBOXSTRICTRC iemMemMap(PVMCPUCC pVCpu, void **ppvMem, uint8_t *pbUnmapInfo, size_t cbMem, uint8_t iSegReg, RTGCPTR GCPtrMem,
3014 uint32_t fAccess, uint32_t uAlignCtl) RT_NOEXCEPT;
3015#ifndef IN_RING3
3016VBOXSTRICTRC iemMemCommitAndUnmapPostponeTroubleToR3(PVMCPUCC pVCpu, uint8_t bUnmapInfo) RT_NOEXCEPT;
3017#endif
3018VBOXSTRICTRC iemMemApplySegment(PVMCPUCC pVCpu, uint32_t fAccess, uint8_t iSegReg, size_t cbMem, PRTGCPTR pGCPtrMem) RT_NOEXCEPT;
3019VBOXSTRICTRC iemMemMarkSelDescAccessed(PVMCPUCC pVCpu, uint16_t uSel) RT_NOEXCEPT;
3020VBOXSTRICTRC iemMemPageTranslateAndCheckAccess(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint32_t cbAccess, uint32_t fAccess, PRTGCPHYS pGCPhysMem) RT_NOEXCEPT;
3021
3022void iemOpcodeFlushLight(PVMCPUCC pVCpu, uint8_t cbInstr);
3023void iemOpcodeFlushHeavy(PVMCPUCC pVCpu, uint8_t cbInstr);
3024#ifdef IEM_WITH_CODE_TLB
3025void iemOpcodeFetchBytesJmp(PVMCPUCC pVCpu, size_t cbDst, void *pvDst) IEM_NOEXCEPT_MAY_LONGJMP;
3026#else
3027VBOXSTRICTRC iemOpcodeFetchMoreBytes(PVMCPUCC pVCpu, size_t cbMin) RT_NOEXCEPT;
3028#endif
3029#ifdef IEM_WITH_SETJMP
3030uint8_t iemOpcodeGetNextU8SlowJmp(PVMCPUCC pVCpu) IEM_NOEXCEPT_MAY_LONGJMP;
3031uint16_t iemOpcodeGetNextU16SlowJmp(PVMCPUCC pVCpu) IEM_NOEXCEPT_MAY_LONGJMP;
3032uint32_t iemOpcodeGetNextU32SlowJmp(PVMCPUCC pVCpu) IEM_NOEXCEPT_MAY_LONGJMP;
3033uint64_t iemOpcodeGetNextU64SlowJmp(PVMCPUCC pVCpu) IEM_NOEXCEPT_MAY_LONGJMP;
3034#else
3035VBOXSTRICTRC iemOpcodeGetNextU8Slow(PVMCPUCC pVCpu, uint8_t *pb) RT_NOEXCEPT;
3036VBOXSTRICTRC iemOpcodeGetNextS8SxU16Slow(PVMCPUCC pVCpu, uint16_t *pu16) RT_NOEXCEPT;
3037VBOXSTRICTRC iemOpcodeGetNextS8SxU32Slow(PVMCPUCC pVCpu, uint32_t *pu32) RT_NOEXCEPT;
3038VBOXSTRICTRC iemOpcodeGetNextS8SxU64Slow(PVMCPUCC pVCpu, uint64_t *pu64) RT_NOEXCEPT;
3039VBOXSTRICTRC iemOpcodeGetNextU16Slow(PVMCPUCC pVCpu, uint16_t *pu16) RT_NOEXCEPT;
3040VBOXSTRICTRC iemOpcodeGetNextU16ZxU32Slow(PVMCPUCC pVCpu, uint32_t *pu32) RT_NOEXCEPT;
3041VBOXSTRICTRC iemOpcodeGetNextU16ZxU64Slow(PVMCPUCC pVCpu, uint64_t *pu64) RT_NOEXCEPT;
3042VBOXSTRICTRC iemOpcodeGetNextU32Slow(PVMCPUCC pVCpu, uint32_t *pu32) RT_NOEXCEPT;
3043VBOXSTRICTRC iemOpcodeGetNextU32ZxU64Slow(PVMCPUCC pVCpu, uint64_t *pu64) RT_NOEXCEPT;
3044VBOXSTRICTRC iemOpcodeGetNextS32SxU64Slow(PVMCPUCC pVCpu, uint64_t *pu64) RT_NOEXCEPT;
3045VBOXSTRICTRC iemOpcodeGetNextU64Slow(PVMCPUCC pVCpu, uint64_t *pu64) RT_NOEXCEPT;
3046#endif
3047
3048VBOXSTRICTRC iemMemFetchDataU8(PVMCPUCC pVCpu, uint8_t *pu8Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
3049VBOXSTRICTRC iemMemFetchDataU16(PVMCPUCC pVCpu, uint16_t *pu16Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
3050VBOXSTRICTRC iemMemFetchDataU32(PVMCPUCC pVCpu, uint32_t *pu32Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
3051VBOXSTRICTRC iemMemFetchDataU32NoAc(PVMCPUCC pVCpu, uint32_t *pu32Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
3052VBOXSTRICTRC iemMemFetchDataU32_ZX_U64(PVMCPUCC pVCpu, uint64_t *pu64Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
3053VBOXSTRICTRC iemMemFetchDataU64(PVMCPUCC pVCpu, uint64_t *pu64Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
3054VBOXSTRICTRC iemMemFetchDataU64NoAc(PVMCPUCC pVCpu, uint64_t *pu64Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
3055VBOXSTRICTRC iemMemFetchDataU64AlignedU128(PVMCPUCC pVCpu, uint64_t *pu64Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
3056VBOXSTRICTRC iemMemFetchDataR80(PVMCPUCC pVCpu, PRTFLOAT80U pr80Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
3057VBOXSTRICTRC iemMemFetchDataD80(PVMCPUCC pVCpu, PRTPBCD80U pd80Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
3058VBOXSTRICTRC iemMemFetchDataU128(PVMCPUCC pVCpu, PRTUINT128U pu128Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
3059VBOXSTRICTRC iemMemFetchDataU128NoAc(PVMCPUCC pVCpu, PRTUINT128U pu128Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
3060VBOXSTRICTRC iemMemFetchDataU128AlignedSse(PVMCPUCC pVCpu, PRTUINT128U pu128Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
3061VBOXSTRICTRC iemMemFetchDataU256(PVMCPUCC pVCpu, PRTUINT256U pu256Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
3062VBOXSTRICTRC iemMemFetchDataU256NoAc(PVMCPUCC pVCpu, PRTUINT256U pu256Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
3063VBOXSTRICTRC iemMemFetchDataU256AlignedAvx(PVMCPUCC pVCpu, PRTUINT256U pu256Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
3064VBOXSTRICTRC iemMemFetchDataXdtr(PVMCPUCC pVCpu, uint16_t *pcbLimit, PRTGCPTR pGCPtrBase, uint8_t iSegReg,
3065 RTGCPTR GCPtrMem, IEMMODE enmOpSize) RT_NOEXCEPT;
3066#ifdef IEM_WITH_SETJMP
3067uint8_t iemMemFetchDataU8SafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3068uint16_t iemMemFetchDataU16SafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3069uint32_t iemMemFetchDataU32SafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3070uint32_t iemMemFetchDataU32NoAcSafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3071uint32_t iemMemFlatFetchDataU32SafeJmp(PVMCPUCC pVCpu, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3072uint64_t iemMemFetchDataU64SafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3073uint64_t iemMemFetchDataU64NoAcSafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3074uint64_t iemMemFetchDataU64AlignedU128SafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3075void iemMemFetchDataR80SafeJmp(PVMCPUCC pVCpu, PRTFLOAT80U pr80Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3076void iemMemFetchDataD80SafeJmp(PVMCPUCC pVCpu, PRTPBCD80U pd80Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3077void iemMemFetchDataU128SafeJmp(PVMCPUCC pVCpu, PRTUINT128U pu128Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3078void iemMemFetchDataU128NoAcSafeJmp(PVMCPUCC pVCpu, PRTUINT128U pu128Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3079void iemMemFetchDataU128AlignedSseSafeJmp(PVMCPUCC pVCpu, PRTUINT128U pu128Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3080void iemMemFetchDataU256SafeJmp(PVMCPUCC pVCpu, PRTUINT256U pu256Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3081void iemMemFetchDataU256NoAcSafeJmp(PVMCPUCC pVCpu, PRTUINT256U pu256Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3082void iemMemFetchDataU256AlignedAvxSafeJmp(PVMCPUCC pVCpu, PRTUINT256U pu256Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3083# if 0 /* these are inlined now */
3084uint8_t iemMemFetchDataU8Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3085uint16_t iemMemFetchDataU16Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3086uint32_t iemMemFetchDataU32Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3087uint32_t iemMemFlatFetchDataU32Jmp(PVMCPUCC pVCpu, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3088uint64_t iemMemFetchDataU64Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3089uint64_t iemMemFetchDataU64AlignedU128Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3090void iemMemFetchDataR80Jmp(PVMCPUCC pVCpu, PRTFLOAT80U pr80Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3091void iemMemFetchDataD80Jmp(PVMCPUCC pVCpu, PRTPBCD80U pd80Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3092void iemMemFetchDataU128Jmp(PVMCPUCC pVCpu, PRTUINT128U pu128Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3093void iemMemFetchDataU128NoAcJmp(PVMCPUCC pVCpu, PRTUINT128U pu128Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3094void iemMemFetchDataU128AlignedSseJmp(PVMCPUCC pVCpu, PRTUINT128U pu128Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3095void iemMemFetchDataU256NoAcJmp(PVMCPUCC pVCpu, PRTUINT256U pu256Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3096void iemMemFetchDataU256AlignedAvxJmp(PVMCPUCC pVCpu, PRTUINT256U pu256Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3097# endif
3098void iemMemFetchDataU256Jmp(PVMCPUCC pVCpu, PRTUINT256U pu256Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3099#endif
3100
3101VBOXSTRICTRC iemMemFetchSysU8(PVMCPUCC pVCpu, uint8_t *pu8Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
3102VBOXSTRICTRC iemMemFetchSysU16(PVMCPUCC pVCpu, uint16_t *pu16Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
3103VBOXSTRICTRC iemMemFetchSysU32(PVMCPUCC pVCpu, uint32_t *pu32Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
3104VBOXSTRICTRC iemMemFetchSysU64(PVMCPUCC pVCpu, uint64_t *pu64Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
3105VBOXSTRICTRC iemMemFetchSelDesc(PVMCPUCC pVCpu, PIEMSELDESC pDesc, uint16_t uSel, uint8_t uXcpt) RT_NOEXCEPT;
3106VBOXSTRICTRC iemMemFetchSelDescWithErr(PVMCPUCC pVCpu, PIEMSELDESC pDesc, uint16_t uSel,
3107 uint8_t uXcpt, uint16_t uErrorCode) RT_NOEXCEPT;
3108
3109VBOXSTRICTRC iemMemStoreDataU8(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint8_t u8Value) RT_NOEXCEPT;
3110VBOXSTRICTRC iemMemStoreDataU16(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint16_t u16Value) RT_NOEXCEPT;
3111VBOXSTRICTRC iemMemStoreDataU32(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint32_t u32Value) RT_NOEXCEPT;
3112VBOXSTRICTRC iemMemStoreDataU64(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint64_t u64Value) RT_NOEXCEPT;
3113VBOXSTRICTRC iemMemStoreDataU128(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, RTUINT128U u128Value) RT_NOEXCEPT;
3114VBOXSTRICTRC iemMemStoreDataU128NoAc(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, RTUINT128U u128Value) RT_NOEXCEPT;
3115VBOXSTRICTRC iemMemStoreDataU128AlignedSse(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, RTUINT128U u128Value) RT_NOEXCEPT;
3116VBOXSTRICTRC iemMemStoreDataU256(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTUINT256U pu256Value) RT_NOEXCEPT;
3117VBOXSTRICTRC iemMemStoreDataU256NoAc(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTUINT256U pu256Value) RT_NOEXCEPT;
3118VBOXSTRICTRC iemMemStoreDataU256AlignedAvx(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTUINT256U pu256Value) RT_NOEXCEPT;
3119VBOXSTRICTRC iemMemStoreDataXdtr(PVMCPUCC pVCpu, uint16_t cbLimit, RTGCPTR GCPtrBase, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
3120#ifdef IEM_WITH_SETJMP
3121void iemMemStoreDataU8SafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint8_t u8Value) IEM_NOEXCEPT_MAY_LONGJMP;
3122void iemMemStoreDataU16SafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint16_t u16Value) IEM_NOEXCEPT_MAY_LONGJMP;
3123void iemMemStoreDataU32SafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint32_t u32Value) IEM_NOEXCEPT_MAY_LONGJMP;
3124void iemMemStoreDataU64SafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint64_t u64Value) IEM_NOEXCEPT_MAY_LONGJMP;
3125void iemMemStoreDataU128SafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTUINT128U u128Value) IEM_NOEXCEPT_MAY_LONGJMP;
3126void iemMemStoreDataU128NoAcSafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTUINT128U pu128Value) IEM_NOEXCEPT_MAY_LONGJMP;
3127void iemMemStoreDataU128AlignedSseSafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTUINT128U pu128Value) IEM_NOEXCEPT_MAY_LONGJMP;
3128void iemMemStoreDataU256SafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTUINT256U pu256Value) IEM_NOEXCEPT_MAY_LONGJMP;
3129void iemMemStoreDataU256NoAcSafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTUINT256U pu256Value) IEM_NOEXCEPT_MAY_LONGJMP;
3130void iemMemStoreDataU256AlignedAvxSafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTUINT256U pu256Value) IEM_NOEXCEPT_MAY_LONGJMP;
3131void iemMemStoreDataR80SafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTFLOAT80U pr80Value) IEM_NOEXCEPT_MAY_LONGJMP;
3132void iemMemStoreDataD80SafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTPBCD80U pd80Value) IEM_NOEXCEPT_MAY_LONGJMP;
3133#if 0
3134void iemMemStoreDataU8Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint8_t u8Value) IEM_NOEXCEPT_MAY_LONGJMP;
3135void iemMemStoreDataU16Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint16_t u16Value) IEM_NOEXCEPT_MAY_LONGJMP;
3136void iemMemStoreDataU32Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint32_t u32Value) IEM_NOEXCEPT_MAY_LONGJMP;
3137void iemMemStoreDataU64Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint64_t u64Value) IEM_NOEXCEPT_MAY_LONGJMP;
3138void iemMemStoreDataU128Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, RTUINT128U u128Value) IEM_NOEXCEPT_MAY_LONGJMP;
3139void iemMemStoreDataNoAcU128Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, RTUINT128U u128Value) IEM_NOEXCEPT_MAY_LONGJMP;
3140void iemMemStoreDataU256NoAcJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTUINT256U pu256Value) IEM_NOEXCEPT_MAY_LONGJMP;
3141void iemMemStoreDataU256AlignedAvxJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTUINT256U pu256Value) IEM_NOEXCEPT_MAY_LONGJMP;
3142#endif
3143void iemMemStoreDataU128AlignedSseJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, RTUINT128U u128Value) IEM_NOEXCEPT_MAY_LONGJMP;
3144void iemMemStoreDataU256Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTUINT256U pu256Value) IEM_NOEXCEPT_MAY_LONGJMP;
3145#endif
3146
3147#ifdef IEM_WITH_SETJMP
3148uint8_t *iemMemMapDataU8RwSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3149uint8_t *iemMemMapDataU8AtSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3150uint8_t *iemMemMapDataU8WoSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3151uint8_t const *iemMemMapDataU8RoSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3152uint16_t *iemMemMapDataU16RwSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3153uint16_t *iemMemMapDataU16AtSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3154uint16_t *iemMemMapDataU16WoSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3155uint16_t const *iemMemMapDataU16RoSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3156uint32_t *iemMemMapDataU32RwSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3157uint32_t *iemMemMapDataU32AtSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3158uint32_t *iemMemMapDataU32WoSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3159uint32_t const *iemMemMapDataU32RoSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3160uint64_t *iemMemMapDataU64RwSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3161uint64_t *iemMemMapDataU64AtSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3162uint64_t *iemMemMapDataU64WoSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3163uint64_t const *iemMemMapDataU64RoSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3164PRTFLOAT80U iemMemMapDataR80RwSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3165PRTFLOAT80U iemMemMapDataR80WoSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3166PCRTFLOAT80U iemMemMapDataR80RoSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3167PRTPBCD80U iemMemMapDataD80RwSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3168PRTPBCD80U iemMemMapDataD80WoSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3169PCRTPBCD80U iemMemMapDataD80RoSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3170PRTUINT128U iemMemMapDataU128RwSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3171PRTUINT128U iemMemMapDataU128AtSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3172PRTUINT128U iemMemMapDataU128WoSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3173PCRTUINT128U iemMemMapDataU128RoSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3174#endif
3175
3176VBOXSTRICTRC iemMemStackPushBeginSpecial(PVMCPUCC pVCpu, size_t cbMem, uint32_t cbAlign,
3177 void **ppvMem, uint8_t *pbUnmapInfo, uint64_t *puNewRsp) RT_NOEXCEPT;
3178VBOXSTRICTRC iemMemStackPushCommitSpecial(PVMCPUCC pVCpu, uint8_t bUnmapInfo, uint64_t uNewRsp) RT_NOEXCEPT;
3179VBOXSTRICTRC iemMemStackPushU16(PVMCPUCC pVCpu, uint16_t u16Value) RT_NOEXCEPT;
3180VBOXSTRICTRC iemMemStackPushU32(PVMCPUCC pVCpu, uint32_t u32Value) RT_NOEXCEPT;
3181VBOXSTRICTRC iemMemStackPushU64(PVMCPUCC pVCpu, uint64_t u64Value) RT_NOEXCEPT;
3182VBOXSTRICTRC iemMemStackPushU16Ex(PVMCPUCC pVCpu, uint16_t u16Value, PRTUINT64U pTmpRsp) RT_NOEXCEPT;
3183VBOXSTRICTRC iemMemStackPushU32Ex(PVMCPUCC pVCpu, uint32_t u32Value, PRTUINT64U pTmpRsp) RT_NOEXCEPT;
3184VBOXSTRICTRC iemMemStackPushU64Ex(PVMCPUCC pVCpu, uint64_t u64Value, PRTUINT64U pTmpRsp) RT_NOEXCEPT;
3185VBOXSTRICTRC iemMemStackPushU32SReg(PVMCPUCC pVCpu, uint32_t u32Value) RT_NOEXCEPT;
3186VBOXSTRICTRC iemMemStackPopBeginSpecial(PVMCPUCC pVCpu, size_t cbMem, uint32_t cbAlign,
3187 void const **ppvMem, uint8_t *pbUnmapInfo, uint64_t *puNewRsp) RT_NOEXCEPT;
3188VBOXSTRICTRC iemMemStackPopContinueSpecial(PVMCPUCC pVCpu, size_t off, size_t cbMem,
3189 void const **ppvMem, uint8_t *pbUnmapInfo, uint64_t uCurNewRsp) RT_NOEXCEPT;
3190VBOXSTRICTRC iemMemStackPopDoneSpecial(PVMCPUCC pVCpu, uint8_t bUnmapInfo) RT_NOEXCEPT;
3191VBOXSTRICTRC iemMemStackPopU16(PVMCPUCC pVCpu, uint16_t *pu16Value) RT_NOEXCEPT;
3192VBOXSTRICTRC iemMemStackPopU32(PVMCPUCC pVCpu, uint32_t *pu32Value) RT_NOEXCEPT;
3193VBOXSTRICTRC iemMemStackPopU64(PVMCPUCC pVCpu, uint64_t *pu64Value) RT_NOEXCEPT;
3194VBOXSTRICTRC iemMemStackPopU16Ex(PVMCPUCC pVCpu, uint16_t *pu16Value, PRTUINT64U pTmpRsp) RT_NOEXCEPT;
3195VBOXSTRICTRC iemMemStackPopU32Ex(PVMCPUCC pVCpu, uint32_t *pu32Value, PRTUINT64U pTmpRsp) RT_NOEXCEPT;
3196VBOXSTRICTRC iemMemStackPopU64Ex(PVMCPUCC pVCpu, uint64_t *pu64Value, PRTUINT64U pTmpRsp) RT_NOEXCEPT;
3197
3198#ifdef IEM_WITH_SETJMP
3199void iemMemStackPushU16SafeJmp(PVMCPUCC pVCpu, uint16_t uValue) IEM_NOEXCEPT_MAY_LONGJMP;
3200void iemMemStackPushU32SafeJmp(PVMCPUCC pVCpu, uint32_t uValue) IEM_NOEXCEPT_MAY_LONGJMP;
3201void iemMemStackPushU32SRegSafeJmp(PVMCPUCC pVCpu, uint32_t uValue) IEM_NOEXCEPT_MAY_LONGJMP;
3202void iemMemStackPushU64SafeJmp(PVMCPUCC pVCpu, uint64_t uValue) IEM_NOEXCEPT_MAY_LONGJMP;
3203void iemMemStackPopGRegU16SafeJmp(PVMCPUCC pVCpu, uint8_t iGReg) IEM_NOEXCEPT_MAY_LONGJMP;
3204void iemMemStackPopGRegU32SafeJmp(PVMCPUCC pVCpu, uint8_t iGReg) IEM_NOEXCEPT_MAY_LONGJMP;
3205void iemMemStackPopGRegU64SafeJmp(PVMCPUCC pVCpu, uint8_t iGReg) IEM_NOEXCEPT_MAY_LONGJMP;
3206
3207void iemMemFlat32StackPushU16SafeJmp(PVMCPUCC pVCpu, uint16_t uValue) IEM_NOEXCEPT_MAY_LONGJMP;
3208void iemMemFlat32StackPushU32SafeJmp(PVMCPUCC pVCpu, uint32_t uValue) IEM_NOEXCEPT_MAY_LONGJMP;
3209void iemMemFlat32StackPushU32SRegSafeJmp(PVMCPUCC pVCpu, uint32_t uValue) IEM_NOEXCEPT_MAY_LONGJMP;
3210void iemMemFlat32StackPopGRegU16SafeJmp(PVMCPUCC pVCpu, uint8_t iGReg) IEM_NOEXCEPT_MAY_LONGJMP;
3211void iemMemFlat32StackPopGRegU32SafeJmp(PVMCPUCC pVCpu, uint8_t iGReg) IEM_NOEXCEPT_MAY_LONGJMP;
3212
3213void iemMemFlat64StackPushU16SafeJmp(PVMCPUCC pVCpu, uint16_t uValue) IEM_NOEXCEPT_MAY_LONGJMP;
3214void iemMemFlat64StackPushU64SafeJmp(PVMCPUCC pVCpu, uint64_t uValue) IEM_NOEXCEPT_MAY_LONGJMP;
3215void iemMemFlat64StackPopGRegU16SafeJmp(PVMCPUCC pVCpu, uint8_t iGReg) IEM_NOEXCEPT_MAY_LONGJMP;
3216void iemMemFlat64StackPopGRegU64SafeJmp(PVMCPUCC pVCpu, uint8_t iGReg) IEM_NOEXCEPT_MAY_LONGJMP;
3217
3218void iemMemStoreStackU16SafeJmp(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint16_t uValue) IEM_NOEXCEPT_MAY_LONGJMP;
3219void iemMemStoreStackU32SafeJmp(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint32_t uValue) IEM_NOEXCEPT_MAY_LONGJMP;
3220void iemMemStoreStackU32SRegSafeJmp(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint32_t uValue) IEM_NOEXCEPT_MAY_LONGJMP;
3221void iemMemStoreStackU64SafeJmp(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint64_t uValue) IEM_NOEXCEPT_MAY_LONGJMP;
3222
3223uint16_t iemMemFetchStackU16SafeJmp(PVMCPUCC pVCpu, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3224uint32_t iemMemFetchStackU32SafeJmp(PVMCPUCC pVCpu, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3225uint64_t iemMemFetchStackU64SafeJmp(PVMCPUCC pVCpu, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3226
3227#endif
3228
3229/** @} */
3230
3231/** @name IEMAllCImpl.cpp
3232 * @note sed -e '/IEM_CIMPL_DEF_/!d' -e 's/IEM_CIMPL_DEF_/IEM_CIMPL_PROTO_/' -e 's/$/;/'
3233 * @{ */
3234
3235/**
3236 * INT instruction types - iemCImpl_int().
3237 * @note x86 specific
3238 */
3239typedef enum IEMINT
3240{
3241 /** INT n instruction (opcode 0xcd imm). */
3242 IEMINT_INTN = 0,
3243 /** Single byte INT3 instruction (opcode 0xcc). */
3244 IEMINT_INT3 = IEM_XCPT_FLAGS_BP_INSTR,
3245 /** Single byte INTO instruction (opcode 0xce). */
3246 IEMINT_INTO = IEM_XCPT_FLAGS_OF_INSTR,
3247 /** Single byte INT1 (ICEBP) instruction (opcode 0xf1). */
3248 IEMINT_INT1 = IEM_XCPT_FLAGS_ICEBP_INSTR
3249} IEMINT;
3250AssertCompileSize(IEMINT, 4);
3251
3252IEM_CIMPL_PROTO_2(iemCImpl_pop_mem16, uint16_t, iEffSeg, RTGCPTR, GCPtrEffDst);
3253IEM_CIMPL_PROTO_2(iemCImpl_pop_mem32, uint16_t, iEffSeg, RTGCPTR, GCPtrEffDst);
3254IEM_CIMPL_PROTO_2(iemCImpl_pop_mem64, uint16_t, iEffSeg, RTGCPTR, GCPtrEffDst);
3255IEM_CIMPL_PROTO_0(iemCImpl_popa_16);
3256IEM_CIMPL_PROTO_0(iemCImpl_popa_32);
3257IEM_CIMPL_PROTO_0(iemCImpl_pusha_16);
3258IEM_CIMPL_PROTO_0(iemCImpl_pusha_32);
3259IEM_CIMPL_PROTO_1(iemCImpl_pushf, IEMMODE, enmEffOpSize);
3260IEM_CIMPL_PROTO_1(iemCImpl_popf, IEMMODE, enmEffOpSize);
3261IEM_CIMPL_PROTO_3(iemCImpl_FarJmp, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmEffOpSize);
3262IEM_CIMPL_PROTO_3(iemCImpl_callf, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmEffOpSize);
3263typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmEffOpSize);
3264typedef FNIEMCIMPLFARBRANCH *PFNIEMCIMPLFARBRANCH;
3265IEM_CIMPL_PROTO_2(iemCImpl_retf, IEMMODE, enmEffOpSize, uint16_t, cbPop);
3266IEM_CIMPL_PROTO_3(iemCImpl_enter, IEMMODE, enmEffOpSize, uint16_t, cbFrame, uint8_t, cParameters);
3267IEM_CIMPL_PROTO_1(iemCImpl_leave, IEMMODE, enmEffOpSize);
3268IEM_CIMPL_PROTO_2(iemCImpl_int, uint8_t, u8Int, IEMINT, enmInt);
3269IEM_CIMPL_PROTO_1(iemCImpl_iret_real_v8086, IEMMODE, enmEffOpSize);
3270IEM_CIMPL_PROTO_4(iemCImpl_iret_prot_v8086, uint32_t, uNewEip, uint16_t, uNewCs, uint32_t, uNewFlags, uint64_t, uNewRsp);
3271IEM_CIMPL_PROTO_1(iemCImpl_iret_prot_NestedTask, IEMMODE, enmEffOpSize);
3272IEM_CIMPL_PROTO_1(iemCImpl_iret_prot, IEMMODE, enmEffOpSize);
3273IEM_CIMPL_PROTO_1(iemCImpl_iret_64bit, IEMMODE, enmEffOpSize);
3274IEM_CIMPL_PROTO_1(iemCImpl_iret, IEMMODE, enmEffOpSize);
3275IEM_CIMPL_PROTO_0(iemCImpl_loadall286);
3276IEM_CIMPL_PROTO_0(iemCImpl_syscall);
3277IEM_CIMPL_PROTO_1(iemCImpl_sysret, IEMMODE, enmEffOpSize);
3278IEM_CIMPL_PROTO_0(iemCImpl_sysenter);
3279IEM_CIMPL_PROTO_1(iemCImpl_sysexit, IEMMODE, enmEffOpSize);
3280IEM_CIMPL_PROTO_2(iemCImpl_LoadSReg, uint8_t, iSegReg, uint16_t, uSel);
3281IEM_CIMPL_PROTO_2(iemCImpl_load_SReg, uint8_t, iSegReg, uint16_t, uSel);
3282IEM_CIMPL_PROTO_2(iemCImpl_pop_Sreg, uint8_t, iSegReg, IEMMODE, enmEffOpSize);
3283IEM_CIMPL_PROTO_5(iemCImpl_load_SReg_Greg, uint16_t, uSel, uint64_t, offSeg, uint8_t, iSegReg, uint8_t, iGReg, IEMMODE, enmEffOpSize);
3284IEM_CIMPL_PROTO_2(iemCImpl_VerX, uint16_t, uSel, bool, fWrite);
3285IEM_CIMPL_PROTO_3(iemCImpl_LarLsl_u64, uint64_t *, pu64Dst, uint16_t, uSel, bool, fIsLar);
3286IEM_CIMPL_PROTO_3(iemCImpl_LarLsl_u16, uint16_t *, pu16Dst, uint16_t, uSel, bool, fIsLar);
3287IEM_CIMPL_PROTO_3(iemCImpl_lgdt, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc, IEMMODE, enmEffOpSize);
3288IEM_CIMPL_PROTO_2(iemCImpl_sgdt, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst);
3289IEM_CIMPL_PROTO_3(iemCImpl_lidt, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc, IEMMODE, enmEffOpSize);
3290IEM_CIMPL_PROTO_2(iemCImpl_sidt, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst);
3291IEM_CIMPL_PROTO_1(iemCImpl_lldt, uint16_t, uNewLdt);
3292IEM_CIMPL_PROTO_2(iemCImpl_sldt_reg, uint8_t, iGReg, uint8_t, enmEffOpSize);
3293IEM_CIMPL_PROTO_2(iemCImpl_sldt_mem, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst);
3294IEM_CIMPL_PROTO_1(iemCImpl_ltr, uint16_t, uNewTr);
3295IEM_CIMPL_PROTO_2(iemCImpl_str_reg, uint8_t, iGReg, uint8_t, enmEffOpSize);
3296IEM_CIMPL_PROTO_2(iemCImpl_str_mem, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst);
3297IEM_CIMPL_PROTO_2(iemCImpl_mov_Rd_Cd, uint8_t, iGReg, uint8_t, iCrReg);
3298IEM_CIMPL_PROTO_2(iemCImpl_smsw_reg, uint8_t, iGReg, uint8_t, enmEffOpSize);
3299IEM_CIMPL_PROTO_2(iemCImpl_smsw_mem, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst);
3300IEM_CIMPL_PROTO_4(iemCImpl_load_CrX, uint8_t, iCrReg, uint64_t, uNewCrX, IEMACCESSCRX, enmAccessCrX, uint8_t, iGReg);
3301IEM_CIMPL_PROTO_2(iemCImpl_mov_Cd_Rd, uint8_t, iCrReg, uint8_t, iGReg);
3302IEM_CIMPL_PROTO_2(iemCImpl_lmsw, uint16_t, u16NewMsw, RTGCPTR, GCPtrEffDst);
3303IEM_CIMPL_PROTO_0(iemCImpl_clts);
3304IEM_CIMPL_PROTO_2(iemCImpl_mov_Rd_Dd, uint8_t, iGReg, uint8_t, iDrReg);
3305IEM_CIMPL_PROTO_2(iemCImpl_mov_Dd_Rd, uint8_t, iDrReg, uint8_t, iGReg);
3306IEM_CIMPL_PROTO_2(iemCImpl_mov_Rd_Td, uint8_t, iGReg, uint8_t, iTrReg);
3307IEM_CIMPL_PROTO_2(iemCImpl_mov_Td_Rd, uint8_t, iTrReg, uint8_t, iGReg);
3308IEM_CIMPL_PROTO_1(iemCImpl_invlpg, RTGCPTR, GCPtrPage);
3309IEM_CIMPL_PROTO_3(iemCImpl_invpcid, uint8_t, iEffSeg, RTGCPTR, GCPtrInvpcidDesc, uint64_t, uInvpcidType);
3310IEM_CIMPL_PROTO_0(iemCImpl_invd);
3311IEM_CIMPL_PROTO_0(iemCImpl_wbinvd);
3312IEM_CIMPL_PROTO_0(iemCImpl_rsm);
3313IEM_CIMPL_PROTO_0(iemCImpl_rdtsc);
3314IEM_CIMPL_PROTO_0(iemCImpl_rdtscp);
3315IEM_CIMPL_PROTO_0(iemCImpl_rdpmc);
3316IEM_CIMPL_PROTO_0(iemCImpl_rdmsr);
3317IEM_CIMPL_PROTO_0(iemCImpl_wrmsr);
3318IEM_CIMPL_PROTO_3(iemCImpl_in, uint16_t, u16Port, uint8_t, cbReg, uint8_t, bImmAndEffAddrMode);
3319IEM_CIMPL_PROTO_2(iemCImpl_in_eAX_DX, uint8_t, cbReg, IEMMODE, enmEffAddrMode);
3320IEM_CIMPL_PROTO_3(iemCImpl_out, uint16_t, u16Port, uint8_t, cbReg, uint8_t, bImmAndEffAddrMode);
3321IEM_CIMPL_PROTO_2(iemCImpl_out_DX_eAX, uint8_t, cbReg, IEMMODE, enmEffAddrMode);
3322IEM_CIMPL_PROTO_0(iemCImpl_cli);
3323IEM_CIMPL_PROTO_0(iemCImpl_sti);
3324IEM_CIMPL_PROTO_0(iemCImpl_hlt);
3325IEM_CIMPL_PROTO_1(iemCImpl_monitor, uint8_t, iEffSeg);
3326IEM_CIMPL_PROTO_0(iemCImpl_mwait);
3327IEM_CIMPL_PROTO_0(iemCImpl_swapgs);
3328IEM_CIMPL_PROTO_0(iemCImpl_cpuid);
3329IEM_CIMPL_PROTO_1(iemCImpl_aad, uint8_t, bImm);
3330IEM_CIMPL_PROTO_1(iemCImpl_aam, uint8_t, bImm);
3331IEM_CIMPL_PROTO_0(iemCImpl_daa);
3332IEM_CIMPL_PROTO_0(iemCImpl_das);
3333IEM_CIMPL_PROTO_0(iemCImpl_aaa);
3334IEM_CIMPL_PROTO_0(iemCImpl_aas);
3335IEM_CIMPL_PROTO_3(iemCImpl_bound_16, int16_t, idxArray, int16_t, idxLowerBound, int16_t, idxUpperBound);
3336IEM_CIMPL_PROTO_3(iemCImpl_bound_32, int32_t, idxArray, int32_t, idxLowerBound, int32_t, idxUpperBound);
3337IEM_CIMPL_PROTO_0(iemCImpl_xgetbv);
3338IEM_CIMPL_PROTO_0(iemCImpl_xsetbv);
3339IEM_CIMPL_PROTO_5(iemCImpl_cmpxchg16b_fallback_rendezvous, PRTUINT128U, pu128Dst, PRTUINT128U, pu128RaxRdx,
3340 PRTUINT128U, pu128RbxRcx, uint32_t *, pEFlags, uint8_t, bUnmapInfo);
3341IEM_CIMPL_PROTO_2(iemCImpl_clflush_clflushopt, uint8_t, iEffSeg, RTGCPTR, GCPtrEff);
3342IEM_CIMPL_PROTO_1(iemCImpl_finit, bool, fCheckXcpts);
3343IEM_CIMPL_PROTO_3(iemCImpl_fxsave, uint8_t, iEffSeg, RTGCPTR, GCPtrEff, IEMMODE, enmEffOpSize);
3344IEM_CIMPL_PROTO_3(iemCImpl_fxrstor, uint8_t, iEffSeg, RTGCPTR, GCPtrEff, IEMMODE, enmEffOpSize);
3345IEM_CIMPL_PROTO_3(iemCImpl_xsave, uint8_t, iEffSeg, RTGCPTR, GCPtrEff, IEMMODE, enmEffOpSize);
3346IEM_CIMPL_PROTO_3(iemCImpl_xrstor, uint8_t, iEffSeg, RTGCPTR, GCPtrEff, IEMMODE, enmEffOpSize);
3347IEM_CIMPL_PROTO_2(iemCImpl_stmxcsr, uint8_t, iEffSeg, RTGCPTR, GCPtrEff);
3348IEM_CIMPL_PROTO_2(iemCImpl_vstmxcsr, uint8_t, iEffSeg, RTGCPTR, GCPtrEff);
3349IEM_CIMPL_PROTO_2(iemCImpl_ldmxcsr, uint8_t, iEffSeg, RTGCPTR, GCPtrEff);
3350IEM_CIMPL_PROTO_2(iemCImpl_vldmxcsr, uint8_t, iEffSeg, RTGCPTR, GCPtrEff);
3351IEM_CIMPL_PROTO_3(iemCImpl_fnstenv, IEMMODE, enmEffOpSize, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst);
3352IEM_CIMPL_PROTO_3(iemCImpl_fnsave, IEMMODE, enmEffOpSize, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst);
3353IEM_CIMPL_PROTO_3(iemCImpl_fldenv, IEMMODE, enmEffOpSize, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc);
3354IEM_CIMPL_PROTO_3(iemCImpl_frstor, IEMMODE, enmEffOpSize, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc);
3355IEM_CIMPL_PROTO_1(iemCImpl_fldcw, uint16_t, u16Fcw);
3356IEM_CIMPL_PROTO_2(iemCImpl_fxch_underflow, uint8_t, iStReg, uint16_t, uFpuOpcode);
3357IEM_CIMPL_PROTO_3(iemCImpl_fcomi_fucomi, uint8_t, iStReg, bool, fUCmp, uint32_t, uPopAndFpuOpcode);
3358IEM_CIMPL_PROTO_2(iemCImpl_rdseed, uint8_t, iReg, IEMMODE, enmEffOpSize);
3359IEM_CIMPL_PROTO_2(iemCImpl_rdrand, uint8_t, iReg, IEMMODE, enmEffOpSize);
3360IEM_CIMPL_PROTO_4(iemCImpl_vmaskmovps_load_u128, uint8_t, iXRegDst, uint8_t, iXRegMsk, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc);
3361IEM_CIMPL_PROTO_4(iemCImpl_vmaskmovps_load_u256, uint8_t, iYRegDst, uint8_t, iYRegMsk, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc);
3362IEM_CIMPL_PROTO_4(iemCImpl_vmaskmovps_store_u128, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst, uint8_t, iXRegMsk, uint8_t, iXRegSrc);
3363IEM_CIMPL_PROTO_4(iemCImpl_vmaskmovps_store_u256, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst, uint8_t, iYRegMsk, uint8_t, iYRegSrc);
3364IEM_CIMPL_PROTO_4(iemCImpl_vpmaskmovd_load_u128, uint8_t, iXRegDst, uint8_t, iXRegMsk, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc);
3365IEM_CIMPL_PROTO_4(iemCImpl_vpmaskmovd_load_u256, uint8_t, iYRegDst, uint8_t, iYRegMsk, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc);
3366IEM_CIMPL_PROTO_4(iemCImpl_vpmaskmovd_store_u128, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst, uint8_t, iXRegMsk, uint8_t, iXRegSrc);
3367IEM_CIMPL_PROTO_4(iemCImpl_vpmaskmovd_store_u256, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst, uint8_t, iYRegMsk, uint8_t, iYRegSrc);
3368IEM_CIMPL_PROTO_4(iemCImpl_vmaskmovpd_load_u128, uint8_t, iXRegDst, uint8_t, iXRegMsk, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc);
3369IEM_CIMPL_PROTO_4(iemCImpl_vmaskmovpd_load_u256, uint8_t, iYRegDst, uint8_t, iYRegMsk, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc);
3370IEM_CIMPL_PROTO_4(iemCImpl_vmaskmovpd_store_u128, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst, uint8_t, iXRegMsk, uint8_t, iXRegSrc);
3371IEM_CIMPL_PROTO_4(iemCImpl_vmaskmovpd_store_u256, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst, uint8_t, iYRegMsk, uint8_t, iYRegSrc);
3372IEM_CIMPL_PROTO_4(iemCImpl_vpmaskmovq_load_u128, uint8_t, iXRegDst, uint8_t, iXRegMsk, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc);
3373IEM_CIMPL_PROTO_4(iemCImpl_vpmaskmovq_load_u256, uint8_t, iYRegDst, uint8_t, iYRegMsk, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc);
3374IEM_CIMPL_PROTO_4(iemCImpl_vpmaskmovq_store_u128, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst, uint8_t, iXRegMsk, uint8_t, iXRegSrc);
3375IEM_CIMPL_PROTO_4(iemCImpl_vpmaskmovq_store_u256, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst, uint8_t, iYRegMsk, uint8_t, iYRegSrc);
3376IEM_CIMPL_PROTO_2(iemCImpl_vpgather_worker_xx, uint32_t, u32PackedArgs, uint32_t, u32Disp);
3377
3378/** Packed 32-bit argument for iemCImpl_vpgather_worker_xx. */
3379typedef union IEMGATHERARGS
3380{
3381 /** Integer view. */
3382 uint32_t u;
3383 /** Bitfield view. */
3384 struct
3385 {
3386 uint32_t iYRegDst : 4; /**< 0 - XMM or YMM register number (destination) */
3387 uint32_t iYRegIdc : 4; /**< 4 - XMM or YMM register number (indices) */
3388 uint32_t iYRegMsk : 4; /**< 8 - XMM or YMM register number (mask) */
3389 uint32_t iGRegBase : 4; /**< 12 - general register number (base ptr) */
3390 uint32_t iScale : 2; /**< 16 - scale factor (1/2/4/8) */
3391 uint32_t enmEffOpSize : 2; /**< 18 - operand size (16/32/64/--) */
3392 uint32_t enmEffAddrMode : 2; /**< 20 - addressing mode (16/32/64/--) */
3393 uint32_t iEffSeg : 3; /**< 22 - effective segment (ES/CS/SS/DS/FS/GS) */
3394 uint32_t fVex256 : 1; /**< 25 - overall instruction width (128/256 bits) */
3395 uint32_t fIdxQword : 1; /**< 26 - individual index width (4/8 bytes) */
3396 uint32_t fValQword : 1; /**< 27 - individual value width (4/8 bytes) */
3397 } s;
3398} IEMGATHERARGS;
3399AssertCompileSize(IEMGATHERARGS, sizeof(uint32_t));
3400
3401/** @} */
3402
3403/** @name IEMAllCImplStrInstr.cpp.h
3404 * @note sed -e '/IEM_CIMPL_DEF_/!d' -e 's/IEM_CIMPL_DEF_/IEM_CIMPL_PROTO_/' -e 's/$/;/' -e 's/RT_CONCAT4(//' \
3405 * -e 's/,ADDR_SIZE)/64/g' -e 's/,OP_SIZE,/64/g' -e 's/,OP_rAX,/rax/g' IEMAllCImplStrInstr.cpp.h
3406 * @{ */
3407IEM_CIMPL_PROTO_1(iemCImpl_repe_cmps_op8_addr16, uint8_t, iEffSeg);
3408IEM_CIMPL_PROTO_1(iemCImpl_repne_cmps_op8_addr16, uint8_t, iEffSeg);
3409IEM_CIMPL_PROTO_0(iemCImpl_repe_scas_al_m16);
3410IEM_CIMPL_PROTO_0(iemCImpl_repne_scas_al_m16);
3411IEM_CIMPL_PROTO_1(iemCImpl_rep_movs_op8_addr16, uint8_t, iEffSeg);
3412IEM_CIMPL_PROTO_0(iemCImpl_stos_al_m16);
3413IEM_CIMPL_PROTO_1(iemCImpl_lods_al_m16, int8_t, iEffSeg);
3414IEM_CIMPL_PROTO_1(iemCImpl_ins_op8_addr16, bool, fIoChecked);
3415IEM_CIMPL_PROTO_1(iemCImpl_rep_ins_op8_addr16, bool, fIoChecked);
3416IEM_CIMPL_PROTO_2(iemCImpl_outs_op8_addr16, uint8_t, iEffSeg, bool, fIoChecked);
3417IEM_CIMPL_PROTO_2(iemCImpl_rep_outs_op8_addr16, uint8_t, iEffSeg, bool, fIoChecked);
3418
3419IEM_CIMPL_PROTO_1(iemCImpl_repe_cmps_op16_addr16, uint8_t, iEffSeg);
3420IEM_CIMPL_PROTO_1(iemCImpl_repne_cmps_op16_addr16, uint8_t, iEffSeg);
3421IEM_CIMPL_PROTO_0(iemCImpl_repe_scas_ax_m16);
3422IEM_CIMPL_PROTO_0(iemCImpl_repne_scas_ax_m16);
3423IEM_CIMPL_PROTO_1(iemCImpl_rep_movs_op16_addr16, uint8_t, iEffSeg);
3424IEM_CIMPL_PROTO_0(iemCImpl_stos_ax_m16);
3425IEM_CIMPL_PROTO_1(iemCImpl_lods_ax_m16, int8_t, iEffSeg);
3426IEM_CIMPL_PROTO_1(iemCImpl_ins_op16_addr16, bool, fIoChecked);
3427IEM_CIMPL_PROTO_1(iemCImpl_rep_ins_op16_addr16, bool, fIoChecked);
3428IEM_CIMPL_PROTO_2(iemCImpl_outs_op16_addr16, uint8_t, iEffSeg, bool, fIoChecked);
3429IEM_CIMPL_PROTO_2(iemCImpl_rep_outs_op16_addr16, uint8_t, iEffSeg, bool, fIoChecked);
3430
3431IEM_CIMPL_PROTO_1(iemCImpl_repe_cmps_op32_addr16, uint8_t, iEffSeg);
3432IEM_CIMPL_PROTO_1(iemCImpl_repne_cmps_op32_addr16, uint8_t, iEffSeg);
3433IEM_CIMPL_PROTO_0(iemCImpl_repe_scas_eax_m16);
3434IEM_CIMPL_PROTO_0(iemCImpl_repne_scas_eax_m16);
3435IEM_CIMPL_PROTO_1(iemCImpl_rep_movs_op32_addr16, uint8_t, iEffSeg);
3436IEM_CIMPL_PROTO_0(iemCImpl_stos_eax_m16);
3437IEM_CIMPL_PROTO_1(iemCImpl_lods_eax_m16, int8_t, iEffSeg);
3438IEM_CIMPL_PROTO_1(iemCImpl_ins_op32_addr16, bool, fIoChecked);
3439IEM_CIMPL_PROTO_1(iemCImpl_rep_ins_op32_addr16, bool, fIoChecked);
3440IEM_CIMPL_PROTO_2(iemCImpl_outs_op32_addr16, uint8_t, iEffSeg, bool, fIoChecked);
3441IEM_CIMPL_PROTO_2(iemCImpl_rep_outs_op32_addr16, uint8_t, iEffSeg, bool, fIoChecked);
3442
3443
3444IEM_CIMPL_PROTO_1(iemCImpl_repe_cmps_op8_addr32, uint8_t, iEffSeg);
3445IEM_CIMPL_PROTO_1(iemCImpl_repne_cmps_op8_addr32, uint8_t, iEffSeg);
3446IEM_CIMPL_PROTO_0(iemCImpl_repe_scas_al_m32);
3447IEM_CIMPL_PROTO_0(iemCImpl_repne_scas_al_m32);
3448IEM_CIMPL_PROTO_1(iemCImpl_rep_movs_op8_addr32, uint8_t, iEffSeg);
3449IEM_CIMPL_PROTO_0(iemCImpl_stos_al_m32);
3450IEM_CIMPL_PROTO_1(iemCImpl_lods_al_m32, int8_t, iEffSeg);
3451IEM_CIMPL_PROTO_1(iemCImpl_ins_op8_addr32, bool, fIoChecked);
3452IEM_CIMPL_PROTO_1(iemCImpl_rep_ins_op8_addr32, bool, fIoChecked);
3453IEM_CIMPL_PROTO_2(iemCImpl_outs_op8_addr32, uint8_t, iEffSeg, bool, fIoChecked);
3454IEM_CIMPL_PROTO_2(iemCImpl_rep_outs_op8_addr32, uint8_t, iEffSeg, bool, fIoChecked);
3455
3456IEM_CIMPL_PROTO_1(iemCImpl_repe_cmps_op16_addr32, uint8_t, iEffSeg);
3457IEM_CIMPL_PROTO_1(iemCImpl_repne_cmps_op16_addr32, uint8_t, iEffSeg);
3458IEM_CIMPL_PROTO_0(iemCImpl_repe_scas_ax_m32);
3459IEM_CIMPL_PROTO_0(iemCImpl_repne_scas_ax_m32);
3460IEM_CIMPL_PROTO_1(iemCImpl_rep_movs_op16_addr32, uint8_t, iEffSeg);
3461IEM_CIMPL_PROTO_0(iemCImpl_stos_ax_m32);
3462IEM_CIMPL_PROTO_1(iemCImpl_lods_ax_m32, int8_t, iEffSeg);
3463IEM_CIMPL_PROTO_1(iemCImpl_ins_op16_addr32, bool, fIoChecked);
3464IEM_CIMPL_PROTO_1(iemCImpl_rep_ins_op16_addr32, bool, fIoChecked);
3465IEM_CIMPL_PROTO_2(iemCImpl_outs_op16_addr32, uint8_t, iEffSeg, bool, fIoChecked);
3466IEM_CIMPL_PROTO_2(iemCImpl_rep_outs_op16_addr32, uint8_t, iEffSeg, bool, fIoChecked);
3467
3468IEM_CIMPL_PROTO_1(iemCImpl_repe_cmps_op32_addr32, uint8_t, iEffSeg);
3469IEM_CIMPL_PROTO_1(iemCImpl_repne_cmps_op32_addr32, uint8_t, iEffSeg);
3470IEM_CIMPL_PROTO_0(iemCImpl_repe_scas_eax_m32);
3471IEM_CIMPL_PROTO_0(iemCImpl_repne_scas_eax_m32);
3472IEM_CIMPL_PROTO_1(iemCImpl_rep_movs_op32_addr32, uint8_t, iEffSeg);
3473IEM_CIMPL_PROTO_0(iemCImpl_stos_eax_m32);
3474IEM_CIMPL_PROTO_1(iemCImpl_lods_eax_m32, int8_t, iEffSeg);
3475IEM_CIMPL_PROTO_1(iemCImpl_ins_op32_addr32, bool, fIoChecked);
3476IEM_CIMPL_PROTO_1(iemCImpl_rep_ins_op32_addr32, bool, fIoChecked);
3477IEM_CIMPL_PROTO_2(iemCImpl_outs_op32_addr32, uint8_t, iEffSeg, bool, fIoChecked);
3478IEM_CIMPL_PROTO_2(iemCImpl_rep_outs_op32_addr32, uint8_t, iEffSeg, bool, fIoChecked);
3479
3480IEM_CIMPL_PROTO_1(iemCImpl_repe_cmps_op64_addr32, uint8_t, iEffSeg);
3481IEM_CIMPL_PROTO_1(iemCImpl_repne_cmps_op64_addr32, uint8_t, iEffSeg);
3482IEM_CIMPL_PROTO_0(iemCImpl_repe_scas_rax_m32);
3483IEM_CIMPL_PROTO_0(iemCImpl_repne_scas_rax_m32);
3484IEM_CIMPL_PROTO_1(iemCImpl_rep_movs_op64_addr32, uint8_t, iEffSeg);
3485IEM_CIMPL_PROTO_0(iemCImpl_stos_rax_m32);
3486IEM_CIMPL_PROTO_1(iemCImpl_lods_rax_m32, int8_t, iEffSeg);
3487IEM_CIMPL_PROTO_1(iemCImpl_ins_op64_addr32, bool, fIoChecked);
3488IEM_CIMPL_PROTO_1(iemCImpl_rep_ins_op64_addr32, bool, fIoChecked);
3489IEM_CIMPL_PROTO_2(iemCImpl_outs_op64_addr32, uint8_t, iEffSeg, bool, fIoChecked);
3490IEM_CIMPL_PROTO_2(iemCImpl_rep_outs_op64_addr32, uint8_t, iEffSeg, bool, fIoChecked);
3491
3492
3493IEM_CIMPL_PROTO_1(iemCImpl_repe_cmps_op8_addr64, uint8_t, iEffSeg);
3494IEM_CIMPL_PROTO_1(iemCImpl_repne_cmps_op8_addr64, uint8_t, iEffSeg);
3495IEM_CIMPL_PROTO_0(iemCImpl_repe_scas_al_m64);
3496IEM_CIMPL_PROTO_0(iemCImpl_repne_scas_al_m64);
3497IEM_CIMPL_PROTO_1(iemCImpl_rep_movs_op8_addr64, uint8_t, iEffSeg);
3498IEM_CIMPL_PROTO_0(iemCImpl_stos_al_m64);
3499IEM_CIMPL_PROTO_1(iemCImpl_lods_al_m64, int8_t, iEffSeg);
3500IEM_CIMPL_PROTO_1(iemCImpl_ins_op8_addr64, bool, fIoChecked);
3501IEM_CIMPL_PROTO_1(iemCImpl_rep_ins_op8_addr64, bool, fIoChecked);
3502IEM_CIMPL_PROTO_2(iemCImpl_outs_op8_addr64, uint8_t, iEffSeg, bool, fIoChecked);
3503IEM_CIMPL_PROTO_2(iemCImpl_rep_outs_op8_addr64, uint8_t, iEffSeg, bool, fIoChecked);
3504
3505IEM_CIMPL_PROTO_1(iemCImpl_repe_cmps_op16_addr64, uint8_t, iEffSeg);
3506IEM_CIMPL_PROTO_1(iemCImpl_repne_cmps_op16_addr64, uint8_t, iEffSeg);
3507IEM_CIMPL_PROTO_0(iemCImpl_repe_scas_ax_m64);
3508IEM_CIMPL_PROTO_0(iemCImpl_repne_scas_ax_m64);
3509IEM_CIMPL_PROTO_1(iemCImpl_rep_movs_op16_addr64, uint8_t, iEffSeg);
3510IEM_CIMPL_PROTO_0(iemCImpl_stos_ax_m64);
3511IEM_CIMPL_PROTO_1(iemCImpl_lods_ax_m64, int8_t, iEffSeg);
3512IEM_CIMPL_PROTO_1(iemCImpl_ins_op16_addr64, bool, fIoChecked);
3513IEM_CIMPL_PROTO_1(iemCImpl_rep_ins_op16_addr64, bool, fIoChecked);
3514IEM_CIMPL_PROTO_2(iemCImpl_outs_op16_addr64, uint8_t, iEffSeg, bool, fIoChecked);
3515IEM_CIMPL_PROTO_2(iemCImpl_rep_outs_op16_addr64, uint8_t, iEffSeg, bool, fIoChecked);
3516
3517IEM_CIMPL_PROTO_1(iemCImpl_repe_cmps_op32_addr64, uint8_t, iEffSeg);
3518IEM_CIMPL_PROTO_1(iemCImpl_repne_cmps_op32_addr64, uint8_t, iEffSeg);
3519IEM_CIMPL_PROTO_0(iemCImpl_repe_scas_eax_m64);
3520IEM_CIMPL_PROTO_0(iemCImpl_repne_scas_eax_m64);
3521IEM_CIMPL_PROTO_1(iemCImpl_rep_movs_op32_addr64, uint8_t, iEffSeg);
3522IEM_CIMPL_PROTO_0(iemCImpl_stos_eax_m64);
3523IEM_CIMPL_PROTO_1(iemCImpl_lods_eax_m64, int8_t, iEffSeg);
3524IEM_CIMPL_PROTO_1(iemCImpl_ins_op32_addr64, bool, fIoChecked);
3525IEM_CIMPL_PROTO_1(iemCImpl_rep_ins_op32_addr64, bool, fIoChecked);
3526IEM_CIMPL_PROTO_2(iemCImpl_outs_op32_addr64, uint8_t, iEffSeg, bool, fIoChecked);
3527IEM_CIMPL_PROTO_2(iemCImpl_rep_outs_op32_addr64, uint8_t, iEffSeg, bool, fIoChecked);
3528
3529IEM_CIMPL_PROTO_1(iemCImpl_repe_cmps_op64_addr64, uint8_t, iEffSeg);
3530IEM_CIMPL_PROTO_1(iemCImpl_repne_cmps_op64_addr64, uint8_t, iEffSeg);
3531IEM_CIMPL_PROTO_0(iemCImpl_repe_scas_rax_m64);
3532IEM_CIMPL_PROTO_0(iemCImpl_repne_scas_rax_m64);
3533IEM_CIMPL_PROTO_1(iemCImpl_rep_movs_op64_addr64, uint8_t, iEffSeg);
3534IEM_CIMPL_PROTO_0(iemCImpl_stos_rax_m64);
3535IEM_CIMPL_PROTO_1(iemCImpl_lods_rax_m64, int8_t, iEffSeg);
3536IEM_CIMPL_PROTO_1(iemCImpl_ins_op64_addr64, bool, fIoChecked);
3537IEM_CIMPL_PROTO_1(iemCImpl_rep_ins_op64_addr64, bool, fIoChecked);
3538IEM_CIMPL_PROTO_2(iemCImpl_outs_op64_addr64, uint8_t, iEffSeg, bool, fIoChecked);
3539IEM_CIMPL_PROTO_2(iemCImpl_rep_outs_op64_addr64, uint8_t, iEffSeg, bool, fIoChecked);
3540/** @} */
3541
3542#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
3543VBOXSTRICTRC iemVmxVmexit(PVMCPUCC pVCpu, uint32_t uExitReason, uint64_t u64ExitQual) RT_NOEXCEPT;
3544VBOXSTRICTRC iemVmxVmexitInstr(PVMCPUCC pVCpu, uint32_t uExitReason, uint8_t cbInstr) RT_NOEXCEPT;
3545VBOXSTRICTRC iemVmxVmexitInstrNeedsInfo(PVMCPUCC pVCpu, uint32_t uExitReason, VMXINSTRID uInstrId, uint8_t cbInstr) RT_NOEXCEPT;
3546VBOXSTRICTRC iemVmxVmexitTaskSwitch(PVMCPUCC pVCpu, IEMTASKSWITCH enmTaskSwitch, RTSEL SelNewTss, uint8_t cbInstr) RT_NOEXCEPT;
3547VBOXSTRICTRC iemVmxVmexitEvent(PVMCPUCC pVCpu, uint8_t uVector, uint32_t fFlags, uint32_t uErrCode, uint64_t uCr2, uint8_t cbInstr) RT_NOEXCEPT;
3548VBOXSTRICTRC iemVmxVmexitEventDoubleFault(PVMCPUCC pVCpu) RT_NOEXCEPT;
3549VBOXSTRICTRC iemVmxVmexitEpt(PVMCPUCC pVCpu, PPGMPTWALKFAST pWalk, uint32_t fAccess, uint32_t fSlatFail, uint8_t cbInstr) RT_NOEXCEPT;
3550VBOXSTRICTRC iemVmxVmexitPreemptTimer(PVMCPUCC pVCpu) RT_NOEXCEPT;
3551VBOXSTRICTRC iemVmxVmexitInstrMwait(PVMCPUCC pVCpu, bool fMonitorHwArmed, uint8_t cbInstr) RT_NOEXCEPT;
3552VBOXSTRICTRC iemVmxVmexitInstrIo(PVMCPUCC pVCpu, VMXINSTRID uInstrId, uint16_t u16Port,
3553 bool fImm, uint8_t cbAccess, uint8_t cbInstr) RT_NOEXCEPT;
3554VBOXSTRICTRC iemVmxVmexitInstrStrIo(PVMCPUCC pVCpu, VMXINSTRID uInstrId, uint16_t u16Port, uint8_t cbAccess,
3555 bool fRep, VMXEXITINSTRINFO ExitInstrInfo, uint8_t cbInstr) RT_NOEXCEPT;
3556VBOXSTRICTRC iemVmxVmexitInstrMovDrX(PVMCPUCC pVCpu, VMXINSTRID uInstrId, uint8_t iDrReg, uint8_t iGReg, uint8_t cbInstr) RT_NOEXCEPT;
3557VBOXSTRICTRC iemVmxVmexitInstrMovToCr8(PVMCPUCC pVCpu, uint8_t iGReg, uint8_t cbInstr) RT_NOEXCEPT;
3558VBOXSTRICTRC iemVmxVmexitInstrMovFromCr8(PVMCPUCC pVCpu, uint8_t iGReg, uint8_t cbInstr) RT_NOEXCEPT;
3559VBOXSTRICTRC iemVmxVmexitInstrMovToCr3(PVMCPUCC pVCpu, uint64_t uNewCr3, uint8_t iGReg, uint8_t cbInstr) RT_NOEXCEPT;
3560VBOXSTRICTRC iemVmxVmexitInstrMovFromCr3(PVMCPUCC pVCpu, uint8_t iGReg, uint8_t cbInstr) RT_NOEXCEPT;
3561VBOXSTRICTRC iemVmxVmexitInstrMovToCr0Cr4(PVMCPUCC pVCpu, uint8_t iCrReg, uint64_t *puNewCrX, uint8_t iGReg, uint8_t cbInstr) RT_NOEXCEPT;
3562VBOXSTRICTRC iemVmxVmexitInstrClts(PVMCPUCC pVCpu, uint8_t cbInstr) RT_NOEXCEPT;
3563VBOXSTRICTRC iemVmxVmexitInstrLmsw(PVMCPUCC pVCpu, uint32_t uGuestCr0, uint16_t *pu16NewMsw,
3564 RTGCPTR GCPtrEffDst, uint8_t cbInstr) RT_NOEXCEPT;
3565VBOXSTRICTRC iemVmxVmexitInstrInvlpg(PVMCPUCC pVCpu, RTGCPTR GCPtrPage, uint8_t cbInstr) RT_NOEXCEPT;
3566VBOXSTRICTRC iemVmxApicWriteEmulation(PVMCPUCC pVCpu) RT_NOEXCEPT;
3567VBOXSTRICTRC iemVmxVirtApicAccessUnused(PVMCPUCC pVCpu, PRTGCPHYS pGCPhysAccess, size_t cbAccess, uint32_t fAccess) RT_NOEXCEPT;
3568uint32_t iemVmxVirtApicReadRaw32(PVMCPUCC pVCpu, uint16_t offReg) RT_NOEXCEPT;
3569void iemVmxVirtApicWriteRaw32(PVMCPUCC pVCpu, uint16_t offReg, uint32_t uReg) RT_NOEXCEPT;
3570VBOXSTRICTRC iemVmxInvvpid(PVMCPUCC pVCpu, uint8_t cbInstr, uint8_t iEffSeg, RTGCPTR GCPtrInvvpidDesc,
3571 uint64_t u64InvvpidType, PCVMXVEXITINFO pExitInfo) RT_NOEXCEPT;
3572bool iemVmxIsRdmsrWrmsrInterceptSet(PCVMCPU pVCpu, uint32_t uExitReason, uint32_t idMsr) RT_NOEXCEPT;
3573IEM_CIMPL_PROTO_0(iemCImpl_vmxoff);
3574IEM_CIMPL_PROTO_2(iemCImpl_vmxon, uint8_t, iEffSeg, RTGCPTR, GCPtrVmxon);
3575IEM_CIMPL_PROTO_0(iemCImpl_vmlaunch);
3576IEM_CIMPL_PROTO_0(iemCImpl_vmresume);
3577IEM_CIMPL_PROTO_2(iemCImpl_vmptrld, uint8_t, iEffSeg, RTGCPTR, GCPtrVmcs);
3578IEM_CIMPL_PROTO_2(iemCImpl_vmptrst, uint8_t, iEffSeg, RTGCPTR, GCPtrVmcs);
3579IEM_CIMPL_PROTO_2(iemCImpl_vmclear, uint8_t, iEffSeg, RTGCPTR, GCPtrVmcs);
3580IEM_CIMPL_PROTO_2(iemCImpl_vmwrite_reg, uint64_t, u64Val, uint64_t, u64VmcsField);
3581IEM_CIMPL_PROTO_3(iemCImpl_vmwrite_mem, uint8_t, iEffSeg, RTGCPTR, GCPtrVal, uint32_t, u64VmcsField);
3582IEM_CIMPL_PROTO_2(iemCImpl_vmread_reg64, uint64_t *, pu64Dst, uint64_t, u64VmcsField);
3583IEM_CIMPL_PROTO_2(iemCImpl_vmread_reg32, uint64_t *, pu32Dst, uint32_t, u32VmcsField);
3584IEM_CIMPL_PROTO_3(iemCImpl_vmread_mem_reg64, uint8_t, iEffSeg, RTGCPTR, GCPtrDst, uint32_t, u64VmcsField);
3585IEM_CIMPL_PROTO_3(iemCImpl_vmread_mem_reg32, uint8_t, iEffSeg, RTGCPTR, GCPtrDst, uint32_t, u32VmcsField);
3586IEM_CIMPL_PROTO_3(iemCImpl_invvpid, uint8_t, iEffSeg, RTGCPTR, GCPtrInvvpidDesc, uint64_t, uInvvpidType);
3587IEM_CIMPL_PROTO_3(iemCImpl_invept, uint8_t, iEffSeg, RTGCPTR, GCPtrInveptDesc, uint64_t, uInveptType);
3588IEM_CIMPL_PROTO_0(iemCImpl_vmx_pause);
3589#endif
3590
3591#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
3592VBOXSTRICTRC iemSvmVmexit(PVMCPUCC pVCpu, uint64_t uExitCode, uint64_t uExitInfo1, uint64_t uExitInfo2) RT_NOEXCEPT;
3593VBOXSTRICTRC iemHandleSvmEventIntercept(PVMCPUCC pVCpu, uint8_t cbInstr, uint8_t u8Vector, uint32_t fFlags, uint32_t uErr, uint64_t uCr2) RT_NOEXCEPT;
3594VBOXSTRICTRC iemSvmHandleIOIntercept(PVMCPUCC pVCpu, uint16_t u16Port, SVMIOIOTYPE enmIoType, uint8_t cbReg,
3595 uint8_t cAddrSizeBits, uint8_t iEffSeg, bool fRep, bool fStrIo, uint8_t cbInstr) RT_NOEXCEPT;
3596VBOXSTRICTRC iemSvmHandleMsrIntercept(PVMCPUCC pVCpu, uint32_t idMsr, bool fWrite, uint8_t cbInstr) RT_NOEXCEPT;
3597IEM_CIMPL_PROTO_0(iemCImpl_vmrun);
3598IEM_CIMPL_PROTO_0(iemCImpl_vmload);
3599IEM_CIMPL_PROTO_0(iemCImpl_vmsave);
3600IEM_CIMPL_PROTO_0(iemCImpl_clgi);
3601IEM_CIMPL_PROTO_0(iemCImpl_stgi);
3602IEM_CIMPL_PROTO_0(iemCImpl_invlpga);
3603IEM_CIMPL_PROTO_0(iemCImpl_skinit);
3604IEM_CIMPL_PROTO_0(iemCImpl_svm_pause);
3605#endif
3606
3607IEM_CIMPL_PROTO_0(iemCImpl_vmcall); /* vmx */
3608IEM_CIMPL_PROTO_0(iemCImpl_vmmcall); /* svm */
3609IEM_CIMPL_PROTO_1(iemCImpl_Hypercall, uint16_t, uDisOpcode); /* both */
3610
3611extern const PFNIEMOP g_apfnIemInterpretOnlyOneByteMap[256];
3612extern const PFNIEMOP g_apfnIemInterpretOnlyTwoByteMap[1024];
3613extern const PFNIEMOP g_apfnIemInterpretOnlyThreeByte0f3a[1024];
3614extern const PFNIEMOP g_apfnIemInterpretOnlyThreeByte0f38[1024];
3615extern const PFNIEMOP g_apfnIemInterpretOnlyVecMap1[1024];
3616extern const PFNIEMOP g_apfnIemInterpretOnlyVecMap2[1024];
3617extern const PFNIEMOP g_apfnIemInterpretOnlyVecMap3[1024];
3618
3619/*
3620 * Recompiler related stuff.
3621 */
3622extern const PFNIEMOP g_apfnIemThreadedRecompilerOneByteMap[256];
3623extern const PFNIEMOP g_apfnIemThreadedRecompilerTwoByteMap[1024];
3624extern const PFNIEMOP g_apfnIemThreadedRecompilerThreeByte0f3a[1024];
3625extern const PFNIEMOP g_apfnIemThreadedRecompilerThreeByte0f38[1024];
3626extern const PFNIEMOP g_apfnIemThreadedRecompilerVecMap1[1024];
3627extern const PFNIEMOP g_apfnIemThreadedRecompilerVecMap2[1024];
3628extern const PFNIEMOP g_apfnIemThreadedRecompilerVecMap3[1024];
3629
3630
3631IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_Nop);
3632IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_LogCpuState);
3633
3634IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_DeferToCImpl0);
3635
3636IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckIrq);
3637IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckTimers);
3638IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckTimersAndIrq);
3639IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckMode);
3640IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckHwInstrBps);
3641IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckCsLim);
3642
3643IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckCsLimAndOpcodes);
3644IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckOpcodes);
3645IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckOpcodesConsiderCsLim);
3646
3647/* Branching: */
3648IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckCsLimAndPcAndOpcodes);
3649IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckPcAndOpcodes);
3650IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckPcAndOpcodesConsiderCsLim);
3651
3652IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckCsLimAndOpcodesLoadingTlb);
3653IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckOpcodesLoadingTlb);
3654IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckOpcodesLoadingTlbConsiderCsLim);
3655
3656/* Natural page crossing: */
3657IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckCsLimAndOpcodesAcrossPageLoadingTlb);
3658IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckOpcodesAcrossPageLoadingTlb);
3659IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckOpcodesAcrossPageLoadingTlbConsiderCsLim);
3660
3661IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckCsLimAndOpcodesOnNextPageLoadingTlb);
3662IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckOpcodesOnNextPageLoadingTlb);
3663IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckOpcodesOnNextPageLoadingTlbConsiderCsLim);
3664
3665IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckCsLimAndOpcodesOnNewPageLoadingTlb);
3666IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckOpcodesOnNewPageLoadingTlb);
3667IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckOpcodesOnNewPageLoadingTlbConsiderCsLim);
3668
3669IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_Jump);
3670
3671bool iemThreadedCompileEmitIrqCheckBefore(PVMCPUCC pVCpu, PIEMTB pTb);
3672bool iemThreadedCompileBeginEmitCallsComplications(PVMCPUCC pVCpu, PIEMTB pTb);
3673#ifdef IEM_WITH_INTRA_TB_JUMPS
3674DECLHIDDEN(int) iemThreadedCompileBackAtFirstInstruction(PVMCPU pVCpu, PIEMTB pTb) RT_NOEXCEPT;
3675#endif
3676
3677
3678/** @} */
3679
3680RT_C_DECLS_END
3681
3682
3683#endif /* !VMM_INCLUDED_SRC_VMMAll_target_x86_IEMInternal_x86_h */
3684
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette