VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/target-x86/IEMInternal-x86.h

Last change on this file was 108791, checked in by vboxsync, 11 days ago

VMM/IEM: More ARM target work. jiraref:VBP-1598

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 211.1 KB
Line 
1/* $Id: IEMInternal-x86.h 108791 2025-03-28 21:58:31Z vboxsync $ */
2/** @file
3 * IEM - Internal header file, x86 target specifics.
4 */
5
6/*
7 * Copyright (C) 2011-2024 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28#ifndef VMM_INCLUDED_SRC_VMMAll_target_x86_IEMInternal_x86_h
29#define VMM_INCLUDED_SRC_VMMAll_target_x86_IEMInternal_x86_h
30#ifndef RT_WITHOUT_PRAGMA_ONCE
31# pragma once
32#endif
33
34
35RT_C_DECLS_BEGIN
36
37
38/** @defgroup grp_iem_int_x86 X86 Target Internals
39 * @ingroup grp_iem_int
40 * @internal
41 * @{
42 */
43
44VBOXSTRICTRC iemOpcodeFetchPrefetch(PVMCPUCC pVCpu) RT_NOEXCEPT;
45
46
47/** @name Prefix constants (IEMCPU::fPrefixes)
48 * @note x86 specific
49 * @{ */
50#define IEM_OP_PRF_SEG_CS RT_BIT_32(0) /**< CS segment prefix (0x2e). */
51#define IEM_OP_PRF_SEG_SS RT_BIT_32(1) /**< SS segment prefix (0x36). */
52#define IEM_OP_PRF_SEG_DS RT_BIT_32(2) /**< DS segment prefix (0x3e). */
53#define IEM_OP_PRF_SEG_ES RT_BIT_32(3) /**< ES segment prefix (0x26). */
54#define IEM_OP_PRF_SEG_FS RT_BIT_32(4) /**< FS segment prefix (0x64). */
55#define IEM_OP_PRF_SEG_GS RT_BIT_32(5) /**< GS segment prefix (0x65). */
56#define IEM_OP_PRF_SEG_MASK UINT32_C(0x3f)
57
58#define IEM_OP_PRF_SIZE_OP RT_BIT_32(8) /**< Operand size prefix (0x66). */
59#define IEM_OP_PRF_SIZE_REX_W RT_BIT_32(9) /**< REX.W prefix (0x48-0x4f). */
60#define IEM_OP_PRF_SIZE_ADDR RT_BIT_32(10) /**< Address size prefix (0x67). */
61
62#define IEM_OP_PRF_LOCK RT_BIT_32(16) /**< Lock prefix (0xf0). */
63#define IEM_OP_PRF_REPNZ RT_BIT_32(17) /**< Repeat-not-zero prefix (0xf2). */
64#define IEM_OP_PRF_REPZ RT_BIT_32(18) /**< Repeat-if-zero prefix (0xf3). */
65
66#define IEM_OP_PRF_REX RT_BIT_32(24) /**< Any REX prefix (0x40-0x4f). */
67#define IEM_OP_PRF_REX_B RT_BIT_32(25) /**< REX.B prefix (0x41,0x43,0x45,0x47,0x49,0x4b,0x4d,0x4f). */
68#define IEM_OP_PRF_REX_X RT_BIT_32(26) /**< REX.X prefix (0x42,0x43,0x46,0x47,0x4a,0x4b,0x4e,0x4f). */
69#define IEM_OP_PRF_REX_R RT_BIT_32(27) /**< REX.R prefix (0x44,0x45,0x46,0x47,0x4c,0x4d,0x4e,0x4f). */
70/** Mask with all the REX prefix flags.
71 * This is generally for use when needing to undo the REX prefixes when they
72 * are followed legacy prefixes and therefore does not immediately preceed
73 * the first opcode byte.
74 * For testing whether any REX prefix is present, use IEM_OP_PRF_REX instead. */
75#define IEM_OP_PRF_REX_MASK (IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W )
76
77#define IEM_OP_PRF_VEX RT_BIT_32(28) /**< Indiciates VEX prefix. */
78#define IEM_OP_PRF_EVEX RT_BIT_32(29) /**< Indiciates EVEX prefix. */
79#define IEM_OP_PRF_XOP RT_BIT_32(30) /**< Indiciates XOP prefix. */
80/** @} */
81
82/** @name IEMOPFORM_XXX - Opcode forms
83 * @note These are ORed together with IEMOPHINT_XXX.
84 * @note x86 specific
85 * @{ */
86/** ModR/M: reg, r/m */
87#define IEMOPFORM_RM 0
88/** ModR/M: reg, r/m (register) */
89#define IEMOPFORM_RM_REG (IEMOPFORM_RM | IEMOPFORM_MOD3)
90/** ModR/M: reg, r/m (memory) */
91#define IEMOPFORM_RM_MEM (IEMOPFORM_RM | IEMOPFORM_NOT_MOD3)
92/** ModR/M: reg, r/m, imm */
93#define IEMOPFORM_RMI 1
94/** ModR/M: reg, r/m (register), imm */
95#define IEMOPFORM_RMI_REG (IEMOPFORM_RMI | IEMOPFORM_MOD3)
96/** ModR/M: reg, r/m (memory), imm */
97#define IEMOPFORM_RMI_MEM (IEMOPFORM_RMI | IEMOPFORM_NOT_MOD3)
98/** ModR/M: reg, r/m, xmm0 */
99#define IEMOPFORM_RM0 2
100/** ModR/M: reg, r/m (register), xmm0 */
101#define IEMOPFORM_RM0_REG (IEMOPFORM_RM0 | IEMOPFORM_MOD3)
102/** ModR/M: reg, r/m (memory), xmm0 */
103#define IEMOPFORM_RM0_MEM (IEMOPFORM_RM0 | IEMOPFORM_NOT_MOD3)
104/** ModR/M: r/m, reg */
105#define IEMOPFORM_MR 3
106/** ModR/M: r/m (register), reg */
107#define IEMOPFORM_MR_REG (IEMOPFORM_MR | IEMOPFORM_MOD3)
108/** ModR/M: r/m (memory), reg */
109#define IEMOPFORM_MR_MEM (IEMOPFORM_MR | IEMOPFORM_NOT_MOD3)
110/** ModR/M: r/m, reg, imm */
111#define IEMOPFORM_MRI 4
112/** ModR/M: r/m (register), reg, imm */
113#define IEMOPFORM_MRI_REG (IEMOPFORM_MRI | IEMOPFORM_MOD3)
114/** ModR/M: r/m (memory), reg, imm */
115#define IEMOPFORM_MRI_MEM (IEMOPFORM_MRI | IEMOPFORM_NOT_MOD3)
116/** ModR/M: r/m only */
117#define IEMOPFORM_M 5
118/** ModR/M: r/m only (register). */
119#define IEMOPFORM_M_REG (IEMOPFORM_M | IEMOPFORM_MOD3)
120/** ModR/M: r/m only (memory). */
121#define IEMOPFORM_M_MEM (IEMOPFORM_M | IEMOPFORM_NOT_MOD3)
122/** ModR/M: r/m, imm */
123#define IEMOPFORM_MI 6
124/** ModR/M: r/m (register), imm */
125#define IEMOPFORM_MI_REG (IEMOPFORM_MI | IEMOPFORM_MOD3)
126/** ModR/M: r/m (memory), imm */
127#define IEMOPFORM_MI_MEM (IEMOPFORM_MI | IEMOPFORM_NOT_MOD3)
128/** ModR/M: r/m, 1 (shift and rotate instructions) */
129#define IEMOPFORM_M1 7
130/** ModR/M: r/m (register), 1. */
131#define IEMOPFORM_M1_REG (IEMOPFORM_M1 | IEMOPFORM_MOD3)
132/** ModR/M: r/m (memory), 1. */
133#define IEMOPFORM_M1_MEM (IEMOPFORM_M1 | IEMOPFORM_NOT_MOD3)
134/** ModR/M: r/m, CL (shift and rotate instructions)
135 * @todo This should just've been a generic fixed register. But the python
136 * code doesn't needs more convincing. */
137#define IEMOPFORM_M_CL 8
138/** ModR/M: r/m (register), CL. */
139#define IEMOPFORM_M_CL_REG (IEMOPFORM_M_CL | IEMOPFORM_MOD3)
140/** ModR/M: r/m (memory), CL. */
141#define IEMOPFORM_M_CL_MEM (IEMOPFORM_M_CL | IEMOPFORM_NOT_MOD3)
142/** ModR/M: reg only */
143#define IEMOPFORM_R 9
144
145/** VEX+ModR/M: reg, r/m */
146#define IEMOPFORM_VEX_RM 16
147/** VEX+ModR/M: reg, r/m (register) */
148#define IEMOPFORM_VEX_RM_REG (IEMOPFORM_VEX_RM | IEMOPFORM_MOD3)
149/** VEX+ModR/M: reg, r/m (memory) */
150#define IEMOPFORM_VEX_RM_MEM (IEMOPFORM_VEX_RM | IEMOPFORM_NOT_MOD3)
151/** VEX+ModR/M: r/m, reg */
152#define IEMOPFORM_VEX_MR 17
153/** VEX+ModR/M: r/m (register), reg */
154#define IEMOPFORM_VEX_MR_REG (IEMOPFORM_VEX_MR | IEMOPFORM_MOD3)
155/** VEX+ModR/M: r/m (memory), reg */
156#define IEMOPFORM_VEX_MR_MEM (IEMOPFORM_VEX_MR | IEMOPFORM_NOT_MOD3)
157/** VEX+ModR/M: r/m, reg, imm8 */
158#define IEMOPFORM_VEX_MRI 18
159/** VEX+ModR/M: r/m (register), reg, imm8 */
160#define IEMOPFORM_VEX_MRI_REG (IEMOPFORM_VEX_MRI | IEMOPFORM_MOD3)
161/** VEX+ModR/M: r/m (memory), reg, imm8 */
162#define IEMOPFORM_VEX_MRI_MEM (IEMOPFORM_VEX_MRI | IEMOPFORM_NOT_MOD3)
163/** VEX+ModR/M: r/m only */
164#define IEMOPFORM_VEX_M 19
165/** VEX+ModR/M: r/m only (register). */
166#define IEMOPFORM_VEX_M_REG (IEMOPFORM_VEX_M | IEMOPFORM_MOD3)
167/** VEX+ModR/M: r/m only (memory). */
168#define IEMOPFORM_VEX_M_MEM (IEMOPFORM_VEX_M | IEMOPFORM_NOT_MOD3)
169/** VEX+ModR/M: reg only */
170#define IEMOPFORM_VEX_R 20
171/** VEX+ModR/M: reg, vvvv, r/m */
172#define IEMOPFORM_VEX_RVM 21
173/** VEX+ModR/M: reg, vvvv, r/m (register). */
174#define IEMOPFORM_VEX_RVM_REG (IEMOPFORM_VEX_RVM | IEMOPFORM_MOD3)
175/** VEX+ModR/M: reg, vvvv, r/m (memory). */
176#define IEMOPFORM_VEX_RVM_MEM (IEMOPFORM_VEX_RVM | IEMOPFORM_NOT_MOD3)
177/** VEX+ModR/M: reg, vvvv, r/m, imm */
178#define IEMOPFORM_VEX_RVMI 22
179/** VEX+ModR/M: reg, vvvv, r/m (register), imm. */
180#define IEMOPFORM_VEX_RVMI_REG (IEMOPFORM_VEX_RVMI | IEMOPFORM_MOD3)
181/** VEX+ModR/M: reg, vvvv, r/m (memory), imm. */
182#define IEMOPFORM_VEX_RVMI_MEM (IEMOPFORM_VEX_RVMI | IEMOPFORM_NOT_MOD3)
183/** VEX+ModR/M: reg, vvvv, r/m, imm(reg) */
184#define IEMOPFORM_VEX_RVMR 23
185/** VEX+ModR/M: reg, vvvv, r/m (register), imm(reg). */
186#define IEMOPFORM_VEX_RVMR_REG (IEMOPFORM_VEX_RVMI | IEMOPFORM_MOD3)
187/** VEX+ModR/M: reg, vvvv, r/m (memory), imm(reg). */
188#define IEMOPFORM_VEX_RVMR_MEM (IEMOPFORM_VEX_RVMI | IEMOPFORM_NOT_MOD3)
189/** VEX+ModR/M: reg, r/m, vvvv */
190#define IEMOPFORM_VEX_RMV 24
191/** VEX+ModR/M: reg, r/m, vvvv (register). */
192#define IEMOPFORM_VEX_RMV_REG (IEMOPFORM_VEX_RMV | IEMOPFORM_MOD3)
193/** VEX+ModR/M: reg, r/m, vvvv (memory). */
194#define IEMOPFORM_VEX_RMV_MEM (IEMOPFORM_VEX_RMV | IEMOPFORM_NOT_MOD3)
195/** VEX+ModR/M: reg, r/m, imm8 */
196#define IEMOPFORM_VEX_RMI 25
197/** VEX+ModR/M: reg, r/m, imm8 (register). */
198#define IEMOPFORM_VEX_RMI_REG (IEMOPFORM_VEX_RMI | IEMOPFORM_MOD3)
199/** VEX+ModR/M: reg, r/m, imm8 (memory). */
200#define IEMOPFORM_VEX_RMI_MEM (IEMOPFORM_VEX_RMI | IEMOPFORM_NOT_MOD3)
201/** VEX+ModR/M: r/m, vvvv, reg */
202#define IEMOPFORM_VEX_MVR 26
203/** VEX+ModR/M: r/m, vvvv, reg (register) */
204#define IEMOPFORM_VEX_MVR_REG (IEMOPFORM_VEX_MVR | IEMOPFORM_MOD3)
205/** VEX+ModR/M: r/m, vvvv, reg (memory) */
206#define IEMOPFORM_VEX_MVR_MEM (IEMOPFORM_VEX_MVR | IEMOPFORM_NOT_MOD3)
207/** VEX+ModR/M+/n: vvvv, r/m */
208#define IEMOPFORM_VEX_VM 27
209/** VEX+ModR/M+/n: vvvv, r/m (register) */
210#define IEMOPFORM_VEX_VM_REG (IEMOPFORM_VEX_VM | IEMOPFORM_MOD3)
211/** VEX+ModR/M+/n: vvvv, r/m (memory) */
212#define IEMOPFORM_VEX_VM_MEM (IEMOPFORM_VEX_VM | IEMOPFORM_NOT_MOD3)
213/** VEX+ModR/M+/n: vvvv, r/m, imm8 */
214#define IEMOPFORM_VEX_VMI 28
215/** VEX+ModR/M+/n: vvvv, r/m, imm8 (register) */
216#define IEMOPFORM_VEX_VMI_REG (IEMOPFORM_VEX_VMI | IEMOPFORM_MOD3)
217/** VEX+ModR/M+/n: vvvv, r/m, imm8 (memory) */
218#define IEMOPFORM_VEX_VMI_MEM (IEMOPFORM_VEX_VMI | IEMOPFORM_NOT_MOD3)
219
220/** Fixed register instruction, no R/M. */
221#define IEMOPFORM_FIXED 32
222
223/** The r/m is a register. */
224#define IEMOPFORM_MOD3 RT_BIT_32(8)
225/** The r/m is a memory access. */
226#define IEMOPFORM_NOT_MOD3 RT_BIT_32(9)
227/** @} */
228
229/** @name IEMOPHINT_XXX - Additional Opcode Hints
230 * @note These are ORed together with IEMOPFORM_XXX.
231 * @note x86 specific
232 * @{ */
233/** Ignores the operand size prefix (66h). */
234#define IEMOPHINT_IGNORES_OZ_PFX RT_BIT_32(10)
235/** Ignores REX.W (aka WIG). */
236#define IEMOPHINT_IGNORES_REXW RT_BIT_32(11)
237/** Both the operand size prefixes (66h + REX.W) are ignored. */
238#define IEMOPHINT_IGNORES_OP_SIZES (IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW)
239/** Allowed with the lock prefix. */
240#define IEMOPHINT_LOCK_ALLOWED RT_BIT_32(11)
241/** The VEX.L value is ignored (aka LIG). */
242#define IEMOPHINT_VEX_L_IGNORED RT_BIT_32(12)
243/** The VEX.L value must be zero (i.e. 128-bit width only). */
244#define IEMOPHINT_VEX_L_ZERO RT_BIT_32(13)
245/** The VEX.L value must be one (i.e. 256-bit width only). */
246#define IEMOPHINT_VEX_L_ONE RT_BIT_32(14)
247/** The VEX.V value must be zero. */
248#define IEMOPHINT_VEX_V_ZERO RT_BIT_32(15)
249/** The REX.W/VEX.V value must be zero. */
250#define IEMOPHINT_REX_W_ZERO RT_BIT_32(16)
251#define IEMOPHINT_VEX_W_ZERO IEMOPHINT_REX_W_ZERO
252/** The REX.W/VEX.V value must be one. */
253#define IEMOPHINT_REX_W_ONE RT_BIT_32(17)
254#define IEMOPHINT_VEX_W_ONE IEMOPHINT_REX_W_ONE
255
256/** Hint to IEMAllInstructionPython.py that this macro should be skipped. */
257#define IEMOPHINT_SKIP_PYTHON RT_BIT_32(31)
258/** @} */
259
260/**
261 * Possible hardware task switch sources - iemTaskSwitch(), iemVmxVmexitTaskSwitch().
262 * @note x86 specific
263 */
264typedef enum IEMTASKSWITCH
265{
266 /** Task switch caused by an interrupt/exception. */
267 IEMTASKSWITCH_INT_XCPT = 1,
268 /** Task switch caused by a far CALL. */
269 IEMTASKSWITCH_CALL,
270 /** Task switch caused by a far JMP. */
271 IEMTASKSWITCH_JUMP,
272 /** Task switch caused by an IRET. */
273 IEMTASKSWITCH_IRET
274} IEMTASKSWITCH;
275AssertCompileSize(IEMTASKSWITCH, 4);
276
277/**
278 * Possible CrX load (write) sources - iemCImpl_load_CrX().
279 * @note x86 specific
280 */
281typedef enum IEMACCESSCRX
282{
283 /** CrX access caused by 'mov crX' instruction. */
284 IEMACCESSCRX_MOV_CRX,
285 /** CrX (CR0) write caused by 'lmsw' instruction. */
286 IEMACCESSCRX_LMSW,
287 /** CrX (CR0) write caused by 'clts' instruction. */
288 IEMACCESSCRX_CLTS,
289 /** CrX (CR0) read caused by 'smsw' instruction. */
290 IEMACCESSCRX_SMSW
291} IEMACCESSCRX;
292
293#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
294/** @name IEM_SLAT_FAIL_XXX - Second-level address translation failure information.
295 *
296 * These flags provide further context to SLAT page-walk failures that could not be
297 * determined by PGM (e.g, PGM is not privy to memory access permissions).
298 *
299 * @{
300 */
301/** Translating a nested-guest linear address failed accessing a nested-guest
302 * physical address. */
303# define IEM_SLAT_FAIL_LINEAR_TO_PHYS_ADDR RT_BIT_32(0)
304/** Translating a nested-guest linear address failed accessing a
305 * paging-structure entry or updating accessed/dirty bits. */
306# define IEM_SLAT_FAIL_LINEAR_TO_PAGE_TABLE RT_BIT_32(1)
307/** @} */
308
309DECLCALLBACK(FNPGMPHYSHANDLER) iemVmxApicAccessPageHandler;
310# ifndef IN_RING3
311DECLCALLBACK(FNPGMRZPHYSPFHANDLER) iemVmxApicAccessPagePfHandler;
312# endif
313#endif
314
315/**
316 * Indicates to the verifier that the given flag set is undefined.
317 *
318 * Can be invoked again to add more flags.
319 *
320 * This is a NOOP if the verifier isn't compiled in.
321 *
322 * @note We're temporarily keeping this until code is converted to new
323 * disassembler style opcode handling.
324 */
325#define IEMOP_VERIFICATION_UNDEFINED_EFLAGS(a_fEfl) do { } while (0)
326
327
328/** Defined in IEMAllAImplC.cpp but also used by IEMAllAImplA.asm. */
329RT_C_DECLS_BEGIN
330extern uint8_t const g_afParity[256];
331RT_C_DECLS_END
332
333
334/** @name Arithmetic assignment operations on bytes (binary).
335 * @{ */
336typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLBINU8, (uint32_t fEFlagsIn, uint8_t *pu8Dst, uint8_t u8Src));
337typedef FNIEMAIMPLBINU8 *PFNIEMAIMPLBINU8;
338FNIEMAIMPLBINU8 iemAImpl_add_u8, iemAImpl_add_u8_locked;
339FNIEMAIMPLBINU8 iemAImpl_adc_u8, iemAImpl_adc_u8_locked;
340FNIEMAIMPLBINU8 iemAImpl_sub_u8, iemAImpl_sub_u8_locked;
341FNIEMAIMPLBINU8 iemAImpl_sbb_u8, iemAImpl_sbb_u8_locked;
342FNIEMAIMPLBINU8 iemAImpl_or_u8, iemAImpl_or_u8_locked;
343FNIEMAIMPLBINU8 iemAImpl_xor_u8, iemAImpl_xor_u8_locked;
344FNIEMAIMPLBINU8 iemAImpl_and_u8, iemAImpl_and_u8_locked;
345/** @} */
346
347/** @name Arithmetic assignment operations on words (binary).
348 * @{ */
349typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLBINU16, (uint32_t fEFlagsIn, uint16_t *pu16Dst, uint16_t u16Src));
350typedef FNIEMAIMPLBINU16 *PFNIEMAIMPLBINU16;
351FNIEMAIMPLBINU16 iemAImpl_add_u16, iemAImpl_add_u16_locked;
352FNIEMAIMPLBINU16 iemAImpl_adc_u16, iemAImpl_adc_u16_locked;
353FNIEMAIMPLBINU16 iemAImpl_sub_u16, iemAImpl_sub_u16_locked;
354FNIEMAIMPLBINU16 iemAImpl_sbb_u16, iemAImpl_sbb_u16_locked;
355FNIEMAIMPLBINU16 iemAImpl_or_u16, iemAImpl_or_u16_locked;
356FNIEMAIMPLBINU16 iemAImpl_xor_u16, iemAImpl_xor_u16_locked;
357FNIEMAIMPLBINU16 iemAImpl_and_u16, iemAImpl_and_u16_locked;
358/** @} */
359
360
361/** @name Arithmetic assignment operations on double words (binary).
362 * @{ */
363typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLBINU32, (uint32_t fEFlagsIn, uint32_t *pu32Dst, uint32_t u32Src));
364typedef FNIEMAIMPLBINU32 *PFNIEMAIMPLBINU32;
365FNIEMAIMPLBINU32 iemAImpl_add_u32, iemAImpl_add_u32_locked;
366FNIEMAIMPLBINU32 iemAImpl_adc_u32, iemAImpl_adc_u32_locked;
367FNIEMAIMPLBINU32 iemAImpl_sub_u32, iemAImpl_sub_u32_locked;
368FNIEMAIMPLBINU32 iemAImpl_sbb_u32, iemAImpl_sbb_u32_locked;
369FNIEMAIMPLBINU32 iemAImpl_or_u32, iemAImpl_or_u32_locked;
370FNIEMAIMPLBINU32 iemAImpl_xor_u32, iemAImpl_xor_u32_locked;
371FNIEMAIMPLBINU32 iemAImpl_and_u32, iemAImpl_and_u32_locked;
372FNIEMAIMPLBINU32 iemAImpl_blsi_u32, iemAImpl_blsi_u32_fallback;
373FNIEMAIMPLBINU32 iemAImpl_blsr_u32, iemAImpl_blsr_u32_fallback;
374FNIEMAIMPLBINU32 iemAImpl_blsmsk_u32, iemAImpl_blsmsk_u32_fallback;
375/** @} */
376
377/** @name Arithmetic assignment operations on quad words (binary).
378 * @{ */
379typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLBINU64, (uint32_t fEFlagsIn, uint64_t *pu64Dst, uint64_t u64Src));
380typedef FNIEMAIMPLBINU64 *PFNIEMAIMPLBINU64;
381FNIEMAIMPLBINU64 iemAImpl_add_u64, iemAImpl_add_u64_locked;
382FNIEMAIMPLBINU64 iemAImpl_adc_u64, iemAImpl_adc_u64_locked;
383FNIEMAIMPLBINU64 iemAImpl_sub_u64, iemAImpl_sub_u64_locked;
384FNIEMAIMPLBINU64 iemAImpl_sbb_u64, iemAImpl_sbb_u64_locked;
385FNIEMAIMPLBINU64 iemAImpl_or_u64, iemAImpl_or_u64_locked;
386FNIEMAIMPLBINU64 iemAImpl_xor_u64, iemAImpl_xor_u64_locked;
387FNIEMAIMPLBINU64 iemAImpl_and_u64, iemAImpl_and_u64_locked;
388FNIEMAIMPLBINU64 iemAImpl_blsi_u64, iemAImpl_blsi_u64_fallback;
389FNIEMAIMPLBINU64 iemAImpl_blsr_u64, iemAImpl_blsr_u64_fallback;
390FNIEMAIMPLBINU64 iemAImpl_blsmsk_u64, iemAImpl_blsmsk_u64_fallback;
391/** @} */
392
393typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLBINROU8, (uint32_t fEFlagsIn, uint8_t const *pu8Dst, uint8_t u8Src));
394typedef FNIEMAIMPLBINROU8 *PFNIEMAIMPLBINROU8;
395typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLBINROU16,(uint32_t fEFlagsIn, uint16_t const *pu16Dst, uint16_t u16Src));
396typedef FNIEMAIMPLBINROU16 *PFNIEMAIMPLBINROU16;
397typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLBINROU32,(uint32_t fEFlagsIn, uint32_t const *pu32Dst, uint32_t u32Src));
398typedef FNIEMAIMPLBINROU32 *PFNIEMAIMPLBINROU32;
399typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLBINROU64,(uint32_t fEFlagsIn, uint64_t const *pu64Dst, uint64_t u64Src));
400typedef FNIEMAIMPLBINROU64 *PFNIEMAIMPLBINROU64;
401
402/** @name Compare operations (thrown in with the binary ops).
403 * @{ */
404FNIEMAIMPLBINROU8 iemAImpl_cmp_u8;
405FNIEMAIMPLBINROU16 iemAImpl_cmp_u16;
406FNIEMAIMPLBINROU32 iemAImpl_cmp_u32;
407FNIEMAIMPLBINROU64 iemAImpl_cmp_u64;
408/** @} */
409
410/** @name Test operations (thrown in with the binary ops).
411 * @{ */
412FNIEMAIMPLBINROU8 iemAImpl_test_u8;
413FNIEMAIMPLBINROU16 iemAImpl_test_u16;
414FNIEMAIMPLBINROU32 iemAImpl_test_u32;
415FNIEMAIMPLBINROU64 iemAImpl_test_u64;
416/** @} */
417
418/** @name Bit operations operations (thrown in with the binary ops).
419 * @{ */
420FNIEMAIMPLBINROU16 iemAImpl_bt_u16;
421FNIEMAIMPLBINROU32 iemAImpl_bt_u32;
422FNIEMAIMPLBINROU64 iemAImpl_bt_u64;
423FNIEMAIMPLBINU16 iemAImpl_btc_u16, iemAImpl_btc_u16_locked;
424FNIEMAIMPLBINU32 iemAImpl_btc_u32, iemAImpl_btc_u32_locked;
425FNIEMAIMPLBINU64 iemAImpl_btc_u64, iemAImpl_btc_u64_locked;
426FNIEMAIMPLBINU16 iemAImpl_btr_u16, iemAImpl_btr_u16_locked;
427FNIEMAIMPLBINU32 iemAImpl_btr_u32, iemAImpl_btr_u32_locked;
428FNIEMAIMPLBINU64 iemAImpl_btr_u64, iemAImpl_btr_u64_locked;
429FNIEMAIMPLBINU16 iemAImpl_bts_u16, iemAImpl_bts_u16_locked;
430FNIEMAIMPLBINU32 iemAImpl_bts_u32, iemAImpl_bts_u32_locked;
431FNIEMAIMPLBINU64 iemAImpl_bts_u64, iemAImpl_bts_u64_locked;
432/** @} */
433
434/** @name Arithmetic three operand operations on double words (binary).
435 * @{ */
436typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLBINVEXU32, (uint32_t *pu32Dst, uint32_t u32Src1, uint32_t u32Src2, uint32_t *pEFlags));
437typedef FNIEMAIMPLBINVEXU32 *PFNIEMAIMPLBINVEXU32;
438FNIEMAIMPLBINVEXU32 iemAImpl_andn_u32, iemAImpl_andn_u32_fallback;
439FNIEMAIMPLBINVEXU32 iemAImpl_bextr_u32, iemAImpl_bextr_u32_fallback;
440FNIEMAIMPLBINVEXU32 iemAImpl_bzhi_u32, iemAImpl_bzhi_u32_fallback;
441/** @} */
442
443/** @name Arithmetic three operand operations on quad words (binary).
444 * @{ */
445typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLBINVEXU64, (uint64_t *pu64Dst, uint64_t u64Src1, uint64_t u64Src2, uint32_t *pEFlags));
446typedef FNIEMAIMPLBINVEXU64 *PFNIEMAIMPLBINVEXU64;
447FNIEMAIMPLBINVEXU64 iemAImpl_andn_u64, iemAImpl_andn_u64_fallback;
448FNIEMAIMPLBINVEXU64 iemAImpl_bextr_u64, iemAImpl_bextr_u64_fallback;
449FNIEMAIMPLBINVEXU64 iemAImpl_bzhi_u64, iemAImpl_bzhi_u64_fallback;
450/** @} */
451
452/** @name Arithmetic three operand operations on double words w/o EFLAGS (binary).
453 * @{ */
454typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLBINVEXU32NOEFL, (uint32_t *pu32Dst, uint32_t u32Src1, uint32_t u32Src2));
455typedef FNIEMAIMPLBINVEXU32NOEFL *PFNIEMAIMPLBINVEXU32NOEFL;
456FNIEMAIMPLBINVEXU32NOEFL iemAImpl_pdep_u32, iemAImpl_pdep_u32_fallback;
457FNIEMAIMPLBINVEXU32NOEFL iemAImpl_pext_u32, iemAImpl_pext_u32_fallback;
458FNIEMAIMPLBINVEXU32NOEFL iemAImpl_sarx_u32, iemAImpl_sarx_u32_fallback;
459FNIEMAIMPLBINVEXU32NOEFL iemAImpl_shlx_u32, iemAImpl_shlx_u32_fallback;
460FNIEMAIMPLBINVEXU32NOEFL iemAImpl_shrx_u32, iemAImpl_shrx_u32_fallback;
461FNIEMAIMPLBINVEXU32NOEFL iemAImpl_rorx_u32;
462/** @} */
463
464/** @name Arithmetic three operand operations on quad words w/o EFLAGS (binary).
465 * @{ */
466typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLBINVEXU64NOEFL, (uint64_t *pu64Dst, uint64_t u64Src1, uint64_t u64Src2));
467typedef FNIEMAIMPLBINVEXU64NOEFL *PFNIEMAIMPLBINVEXU64NOEFL;
468FNIEMAIMPLBINVEXU64NOEFL iemAImpl_pdep_u64, iemAImpl_pdep_u64_fallback;
469FNIEMAIMPLBINVEXU64NOEFL iemAImpl_pext_u64, iemAImpl_pext_u64_fallback;
470FNIEMAIMPLBINVEXU64NOEFL iemAImpl_sarx_u64, iemAImpl_sarx_u64_fallback;
471FNIEMAIMPLBINVEXU64NOEFL iemAImpl_shlx_u64, iemAImpl_shlx_u64_fallback;
472FNIEMAIMPLBINVEXU64NOEFL iemAImpl_shrx_u64, iemAImpl_shrx_u64_fallback;
473FNIEMAIMPLBINVEXU64NOEFL iemAImpl_rorx_u64;
474/** @} */
475
476/** @name MULX 32-bit and 64-bit.
477 * @{ */
478typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMULXVEXU32, (uint32_t *puDst1, uint32_t *puDst2, uint32_t uSrc1, uint32_t uSrc2));
479typedef FNIEMAIMPLMULXVEXU32 *PFNIEMAIMPLMULXVEXU32;
480FNIEMAIMPLMULXVEXU32 iemAImpl_mulx_u32, iemAImpl_mulx_u32_fallback;
481
482typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMULXVEXU64, (uint64_t *puDst1, uint64_t *puDst2, uint64_t uSrc1, uint64_t uSrc2));
483typedef FNIEMAIMPLMULXVEXU64 *PFNIEMAIMPLMULXVEXU64;
484FNIEMAIMPLMULXVEXU64 iemAImpl_mulx_u64, iemAImpl_mulx_u64_fallback;
485/** @} */
486
487
488/** @name Exchange memory with register operations.
489 * @{ */
490IEM_DECL_IMPL_DEF(void, iemAImpl_xchg_u8_locked, (uint8_t *pu8Mem, uint8_t *pu8Reg));
491IEM_DECL_IMPL_DEF(void, iemAImpl_xchg_u16_locked,(uint16_t *pu16Mem, uint16_t *pu16Reg));
492IEM_DECL_IMPL_DEF(void, iemAImpl_xchg_u32_locked,(uint32_t *pu32Mem, uint32_t *pu32Reg));
493IEM_DECL_IMPL_DEF(void, iemAImpl_xchg_u64_locked,(uint64_t *pu64Mem, uint64_t *pu64Reg));
494IEM_DECL_IMPL_DEF(void, iemAImpl_xchg_u8_unlocked, (uint8_t *pu8Mem, uint8_t *pu8Reg));
495IEM_DECL_IMPL_DEF(void, iemAImpl_xchg_u16_unlocked,(uint16_t *pu16Mem, uint16_t *pu16Reg));
496IEM_DECL_IMPL_DEF(void, iemAImpl_xchg_u32_unlocked,(uint32_t *pu32Mem, uint32_t *pu32Reg));
497IEM_DECL_IMPL_DEF(void, iemAImpl_xchg_u64_unlocked,(uint64_t *pu64Mem, uint64_t *pu64Reg));
498/** @} */
499
500/** @name Exchange and add operations.
501 * @{ */
502IEM_DECL_IMPL_DEF(void, iemAImpl_xadd_u8, (uint8_t *pu8Dst, uint8_t *pu8Reg, uint32_t *pEFlags));
503IEM_DECL_IMPL_DEF(void, iemAImpl_xadd_u16,(uint16_t *pu16Dst, uint16_t *pu16Reg, uint32_t *pEFlags));
504IEM_DECL_IMPL_DEF(void, iemAImpl_xadd_u32,(uint32_t *pu32Dst, uint32_t *pu32Reg, uint32_t *pEFlags));
505IEM_DECL_IMPL_DEF(void, iemAImpl_xadd_u64,(uint64_t *pu64Dst, uint64_t *pu64Reg, uint32_t *pEFlags));
506IEM_DECL_IMPL_DEF(void, iemAImpl_xadd_u8_locked, (uint8_t *pu8Dst, uint8_t *pu8Reg, uint32_t *pEFlags));
507IEM_DECL_IMPL_DEF(void, iemAImpl_xadd_u16_locked,(uint16_t *pu16Dst, uint16_t *pu16Reg, uint32_t *pEFlags));
508IEM_DECL_IMPL_DEF(void, iemAImpl_xadd_u32_locked,(uint32_t *pu32Dst, uint32_t *pu32Reg, uint32_t *pEFlags));
509IEM_DECL_IMPL_DEF(void, iemAImpl_xadd_u64_locked,(uint64_t *pu64Dst, uint64_t *pu64Reg, uint32_t *pEFlags));
510/** @} */
511
512/** @name Compare and exchange.
513 * @{ */
514IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg_u8, (uint8_t *pu8Dst, uint8_t *puAl, uint8_t uSrcReg, uint32_t *pEFlags));
515IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg_u8_locked, (uint8_t *pu8Dst, uint8_t *puAl, uint8_t uSrcReg, uint32_t *pEFlags));
516IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg_u16, (uint16_t *pu16Dst, uint16_t *puAx, uint16_t uSrcReg, uint32_t *pEFlags));
517IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg_u16_locked,(uint16_t *pu16Dst, uint16_t *puAx, uint16_t uSrcReg, uint32_t *pEFlags));
518IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg_u32, (uint32_t *pu32Dst, uint32_t *puEax, uint32_t uSrcReg, uint32_t *pEFlags));
519IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg_u32_locked,(uint32_t *pu32Dst, uint32_t *puEax, uint32_t uSrcReg, uint32_t *pEFlags));
520#if ARCH_BITS == 32
521IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg_u64, (uint64_t *pu64Dst, uint64_t *puRax, uint64_t *puSrcReg, uint32_t *pEFlags));
522IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg_u64_locked,(uint64_t *pu64Dst, uint64_t *puRax, uint64_t *puSrcReg, uint32_t *pEFlags));
523#else
524IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg_u64, (uint64_t *pu64Dst, uint64_t *puRax, uint64_t uSrcReg, uint32_t *pEFlags));
525IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg_u64_locked,(uint64_t *pu64Dst, uint64_t *puRax, uint64_t uSrcReg, uint32_t *pEFlags));
526#endif
527IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg8b,(uint64_t *pu64Dst, PRTUINT64U pu64EaxEdx, PRTUINT64U pu64EbxEcx,
528 uint32_t *pEFlags));
529IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg8b_locked,(uint64_t *pu64Dst, PRTUINT64U pu64EaxEdx, PRTUINT64U pu64EbxEcx,
530 uint32_t *pEFlags));
531IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg16b,(PRTUINT128U pu128Dst, PRTUINT128U pu128RaxRdx, PRTUINT128U pu128RbxRcx,
532 uint32_t *pEFlags));
533IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg16b_locked,(PRTUINT128U pu128Dst, PRTUINT128U pu128RaxRdx, PRTUINT128U pu128RbxRcx,
534 uint32_t *pEFlags));
535#ifndef RT_ARCH_ARM64
536IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg16b_fallback,(PRTUINT128U pu128Dst, PRTUINT128U pu128RaxRdx,
537 PRTUINT128U pu128RbxRcx, uint32_t *pEFlags));
538#endif
539/** @} */
540
541/** @name Memory ordering
542 * @{ */
543typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEMFENCE,(void));
544typedef FNIEMAIMPLMEMFENCE *PFNIEMAIMPLMEMFENCE;
545IEM_DECL_IMPL_DEF(void, iemAImpl_mfence,(void));
546IEM_DECL_IMPL_DEF(void, iemAImpl_sfence,(void));
547IEM_DECL_IMPL_DEF(void, iemAImpl_lfence,(void));
548#ifndef RT_ARCH_ARM64
549IEM_DECL_IMPL_DEF(void, iemAImpl_alt_mem_fence,(void));
550#endif
551/** @} */
552
553/** @name Double precision shifts
554 * @{ */
555typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLSHIFTDBLU16,(uint16_t *pu16Dst, uint16_t u16Src, uint8_t cShift, uint32_t *pEFlags));
556typedef FNIEMAIMPLSHIFTDBLU16 *PFNIEMAIMPLSHIFTDBLU16;
557typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLSHIFTDBLU32,(uint32_t *pu32Dst, uint32_t u32Src, uint8_t cShift, uint32_t *pEFlags));
558typedef FNIEMAIMPLSHIFTDBLU32 *PFNIEMAIMPLSHIFTDBLU32;
559typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLSHIFTDBLU64,(uint64_t *pu64Dst, uint64_t u64Src, uint8_t cShift, uint32_t *pEFlags));
560typedef FNIEMAIMPLSHIFTDBLU64 *PFNIEMAIMPLSHIFTDBLU64;
561FNIEMAIMPLSHIFTDBLU16 iemAImpl_shld_u16, iemAImpl_shld_u16_amd, iemAImpl_shld_u16_intel;
562FNIEMAIMPLSHIFTDBLU32 iemAImpl_shld_u32, iemAImpl_shld_u32_amd, iemAImpl_shld_u32_intel;
563FNIEMAIMPLSHIFTDBLU64 iemAImpl_shld_u64, iemAImpl_shld_u64_amd, iemAImpl_shld_u64_intel;
564FNIEMAIMPLSHIFTDBLU16 iemAImpl_shrd_u16, iemAImpl_shrd_u16_amd, iemAImpl_shrd_u16_intel;
565FNIEMAIMPLSHIFTDBLU32 iemAImpl_shrd_u32, iemAImpl_shrd_u32_amd, iemAImpl_shrd_u32_intel;
566FNIEMAIMPLSHIFTDBLU64 iemAImpl_shrd_u64, iemAImpl_shrd_u64_amd, iemAImpl_shrd_u64_intel;
567/** @} */
568
569
570/** @name Bit search operations (thrown in with the binary ops).
571 * @{ */
572FNIEMAIMPLBINU16 iemAImpl_bsf_u16, iemAImpl_bsf_u16_amd, iemAImpl_bsf_u16_intel;
573FNIEMAIMPLBINU32 iemAImpl_bsf_u32, iemAImpl_bsf_u32_amd, iemAImpl_bsf_u32_intel;
574FNIEMAIMPLBINU64 iemAImpl_bsf_u64, iemAImpl_bsf_u64_amd, iemAImpl_bsf_u64_intel;
575FNIEMAIMPLBINU16 iemAImpl_bsr_u16, iemAImpl_bsr_u16_amd, iemAImpl_bsr_u16_intel;
576FNIEMAIMPLBINU32 iemAImpl_bsr_u32, iemAImpl_bsr_u32_amd, iemAImpl_bsr_u32_intel;
577FNIEMAIMPLBINU64 iemAImpl_bsr_u64, iemAImpl_bsr_u64_amd, iemAImpl_bsr_u64_intel;
578FNIEMAIMPLBINU16 iemAImpl_lzcnt_u16, iemAImpl_lzcnt_u16_amd, iemAImpl_lzcnt_u16_intel;
579FNIEMAIMPLBINU32 iemAImpl_lzcnt_u32, iemAImpl_lzcnt_u32_amd, iemAImpl_lzcnt_u32_intel;
580FNIEMAIMPLBINU64 iemAImpl_lzcnt_u64, iemAImpl_lzcnt_u64_amd, iemAImpl_lzcnt_u64_intel;
581FNIEMAIMPLBINU16 iemAImpl_tzcnt_u16, iemAImpl_tzcnt_u16_amd, iemAImpl_tzcnt_u16_intel;
582FNIEMAIMPLBINU32 iemAImpl_tzcnt_u32, iemAImpl_tzcnt_u32_amd, iemAImpl_tzcnt_u32_intel;
583FNIEMAIMPLBINU64 iemAImpl_tzcnt_u64, iemAImpl_tzcnt_u64_amd, iemAImpl_tzcnt_u64_intel;
584FNIEMAIMPLBINU16 iemAImpl_popcnt_u16, iemAImpl_popcnt_u16_fallback;
585FNIEMAIMPLBINU32 iemAImpl_popcnt_u32, iemAImpl_popcnt_u32_fallback;
586FNIEMAIMPLBINU64 iemAImpl_popcnt_u64, iemAImpl_popcnt_u64_fallback;
587/** @} */
588
589/** @name Signed multiplication operations (thrown in with the binary ops).
590 * @{ */
591FNIEMAIMPLBINU16 iemAImpl_imul_two_u16, iemAImpl_imul_two_u16_amd, iemAImpl_imul_two_u16_intel;
592FNIEMAIMPLBINU32 iemAImpl_imul_two_u32, iemAImpl_imul_two_u32_amd, iemAImpl_imul_two_u32_intel;
593FNIEMAIMPLBINU64 iemAImpl_imul_two_u64, iemAImpl_imul_two_u64_amd, iemAImpl_imul_two_u64_intel;
594/** @} */
595
596/** @name Arithmetic assignment operations on bytes (unary).
597 * @{ */
598typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLUNARYU8, (uint8_t *pu8Dst, uint32_t *pEFlags));
599typedef FNIEMAIMPLUNARYU8 *PFNIEMAIMPLUNARYU8;
600FNIEMAIMPLUNARYU8 iemAImpl_inc_u8, iemAImpl_inc_u8_locked;
601FNIEMAIMPLUNARYU8 iemAImpl_dec_u8, iemAImpl_dec_u8_locked;
602FNIEMAIMPLUNARYU8 iemAImpl_not_u8, iemAImpl_not_u8_locked;
603FNIEMAIMPLUNARYU8 iemAImpl_neg_u8, iemAImpl_neg_u8_locked;
604/** @} */
605
606/** @name Arithmetic assignment operations on words (unary).
607 * @{ */
608typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLUNARYU16, (uint16_t *pu16Dst, uint32_t *pEFlags));
609typedef FNIEMAIMPLUNARYU16 *PFNIEMAIMPLUNARYU16;
610FNIEMAIMPLUNARYU16 iemAImpl_inc_u16, iemAImpl_inc_u16_locked;
611FNIEMAIMPLUNARYU16 iemAImpl_dec_u16, iemAImpl_dec_u16_locked;
612FNIEMAIMPLUNARYU16 iemAImpl_not_u16, iemAImpl_not_u16_locked;
613FNIEMAIMPLUNARYU16 iemAImpl_neg_u16, iemAImpl_neg_u16_locked;
614/** @} */
615
616/** @name Arithmetic assignment operations on double words (unary).
617 * @{ */
618typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLUNARYU32, (uint32_t *pu32Dst, uint32_t *pEFlags));
619typedef FNIEMAIMPLUNARYU32 *PFNIEMAIMPLUNARYU32;
620FNIEMAIMPLUNARYU32 iemAImpl_inc_u32, iemAImpl_inc_u32_locked;
621FNIEMAIMPLUNARYU32 iemAImpl_dec_u32, iemAImpl_dec_u32_locked;
622FNIEMAIMPLUNARYU32 iemAImpl_not_u32, iemAImpl_not_u32_locked;
623FNIEMAIMPLUNARYU32 iemAImpl_neg_u32, iemAImpl_neg_u32_locked;
624/** @} */
625
626/** @name Arithmetic assignment operations on quad words (unary).
627 * @{ */
628typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLUNARYU64, (uint64_t *pu64Dst, uint32_t *pEFlags));
629typedef FNIEMAIMPLUNARYU64 *PFNIEMAIMPLUNARYU64;
630FNIEMAIMPLUNARYU64 iemAImpl_inc_u64, iemAImpl_inc_u64_locked;
631FNIEMAIMPLUNARYU64 iemAImpl_dec_u64, iemAImpl_dec_u64_locked;
632FNIEMAIMPLUNARYU64 iemAImpl_not_u64, iemAImpl_not_u64_locked;
633FNIEMAIMPLUNARYU64 iemAImpl_neg_u64, iemAImpl_neg_u64_locked;
634/** @} */
635
636
637/** @name Shift operations on bytes (Group 2).
638 * @{ */
639typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSHIFTU8,(uint32_t fEFlagsIn, uint8_t *pu8Dst, uint8_t cShift));
640typedef FNIEMAIMPLSHIFTU8 *PFNIEMAIMPLSHIFTU8;
641FNIEMAIMPLSHIFTU8 iemAImpl_rol_u8, iemAImpl_rol_u8_amd, iemAImpl_rol_u8_intel;
642FNIEMAIMPLSHIFTU8 iemAImpl_ror_u8, iemAImpl_ror_u8_amd, iemAImpl_ror_u8_intel;
643FNIEMAIMPLSHIFTU8 iemAImpl_rcl_u8, iemAImpl_rcl_u8_amd, iemAImpl_rcl_u8_intel;
644FNIEMAIMPLSHIFTU8 iemAImpl_rcr_u8, iemAImpl_rcr_u8_amd, iemAImpl_rcr_u8_intel;
645FNIEMAIMPLSHIFTU8 iemAImpl_shl_u8, iemAImpl_shl_u8_amd, iemAImpl_shl_u8_intel;
646FNIEMAIMPLSHIFTU8 iemAImpl_shr_u8, iemAImpl_shr_u8_amd, iemAImpl_shr_u8_intel;
647FNIEMAIMPLSHIFTU8 iemAImpl_sar_u8, iemAImpl_sar_u8_amd, iemAImpl_sar_u8_intel;
648/** @} */
649
650/** @name Shift operations on words (Group 2).
651 * @{ */
652typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSHIFTU16,(uint32_t fEFlagsIn, uint16_t *pu16Dst, uint8_t cShift));
653typedef FNIEMAIMPLSHIFTU16 *PFNIEMAIMPLSHIFTU16;
654FNIEMAIMPLSHIFTU16 iemAImpl_rol_u16, iemAImpl_rol_u16_amd, iemAImpl_rol_u16_intel;
655FNIEMAIMPLSHIFTU16 iemAImpl_ror_u16, iemAImpl_ror_u16_amd, iemAImpl_ror_u16_intel;
656FNIEMAIMPLSHIFTU16 iemAImpl_rcl_u16, iemAImpl_rcl_u16_amd, iemAImpl_rcl_u16_intel;
657FNIEMAIMPLSHIFTU16 iemAImpl_rcr_u16, iemAImpl_rcr_u16_amd, iemAImpl_rcr_u16_intel;
658FNIEMAIMPLSHIFTU16 iemAImpl_shl_u16, iemAImpl_shl_u16_amd, iemAImpl_shl_u16_intel;
659FNIEMAIMPLSHIFTU16 iemAImpl_shr_u16, iemAImpl_shr_u16_amd, iemAImpl_shr_u16_intel;
660FNIEMAIMPLSHIFTU16 iemAImpl_sar_u16, iemAImpl_sar_u16_amd, iemAImpl_sar_u16_intel;
661/** @} */
662
663/** @name Shift operations on double words (Group 2).
664 * @{ */
665typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSHIFTU32,(uint32_t fEFlagsIn, uint32_t *pu32Dst, uint8_t cShift));
666typedef FNIEMAIMPLSHIFTU32 *PFNIEMAIMPLSHIFTU32;
667FNIEMAIMPLSHIFTU32 iemAImpl_rol_u32, iemAImpl_rol_u32_amd, iemAImpl_rol_u32_intel;
668FNIEMAIMPLSHIFTU32 iemAImpl_ror_u32, iemAImpl_ror_u32_amd, iemAImpl_ror_u32_intel;
669FNIEMAIMPLSHIFTU32 iemAImpl_rcl_u32, iemAImpl_rcl_u32_amd, iemAImpl_rcl_u32_intel;
670FNIEMAIMPLSHIFTU32 iemAImpl_rcr_u32, iemAImpl_rcr_u32_amd, iemAImpl_rcr_u32_intel;
671FNIEMAIMPLSHIFTU32 iemAImpl_shl_u32, iemAImpl_shl_u32_amd, iemAImpl_shl_u32_intel;
672FNIEMAIMPLSHIFTU32 iemAImpl_shr_u32, iemAImpl_shr_u32_amd, iemAImpl_shr_u32_intel;
673FNIEMAIMPLSHIFTU32 iemAImpl_sar_u32, iemAImpl_sar_u32_amd, iemAImpl_sar_u32_intel;
674/** @} */
675
676/** @name Shift operations on words (Group 2).
677 * @{ */
678typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSHIFTU64,(uint32_t fEFlagsIn, uint64_t *pu64Dst, uint8_t cShift));
679typedef FNIEMAIMPLSHIFTU64 *PFNIEMAIMPLSHIFTU64;
680FNIEMAIMPLSHIFTU64 iemAImpl_rol_u64, iemAImpl_rol_u64_amd, iemAImpl_rol_u64_intel;
681FNIEMAIMPLSHIFTU64 iemAImpl_ror_u64, iemAImpl_ror_u64_amd, iemAImpl_ror_u64_intel;
682FNIEMAIMPLSHIFTU64 iemAImpl_rcl_u64, iemAImpl_rcl_u64_amd, iemAImpl_rcl_u64_intel;
683FNIEMAIMPLSHIFTU64 iemAImpl_rcr_u64, iemAImpl_rcr_u64_amd, iemAImpl_rcr_u64_intel;
684FNIEMAIMPLSHIFTU64 iemAImpl_shl_u64, iemAImpl_shl_u64_amd, iemAImpl_shl_u64_intel;
685FNIEMAIMPLSHIFTU64 iemAImpl_shr_u64, iemAImpl_shr_u64_amd, iemAImpl_shr_u64_intel;
686FNIEMAIMPLSHIFTU64 iemAImpl_sar_u64, iemAImpl_sar_u64_amd, iemAImpl_sar_u64_intel;
687/** @} */
688
689/** @name Multiplication and division operations.
690 * @{ */
691typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMULDIVU8,(uint16_t *pu16AX, uint8_t u8FactorDivisor, uint32_t fEFlags));
692typedef FNIEMAIMPLMULDIVU8 *PFNIEMAIMPLMULDIVU8;
693FNIEMAIMPLMULDIVU8 iemAImpl_mul_u8, iemAImpl_mul_u8_amd, iemAImpl_mul_u8_intel;
694FNIEMAIMPLMULDIVU8 iemAImpl_imul_u8, iemAImpl_imul_u8_amd, iemAImpl_imul_u8_intel;
695FNIEMAIMPLMULDIVU8 iemAImpl_div_u8, iemAImpl_div_u8_amd, iemAImpl_div_u8_intel;
696FNIEMAIMPLMULDIVU8 iemAImpl_idiv_u8, iemAImpl_idiv_u8_amd, iemAImpl_idiv_u8_intel;
697
698typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMULDIVU16,(uint16_t *pu16AX, uint16_t *pu16DX, uint16_t u16FactorDivisor, uint32_t fEFlags));
699typedef FNIEMAIMPLMULDIVU16 *PFNIEMAIMPLMULDIVU16;
700FNIEMAIMPLMULDIVU16 iemAImpl_mul_u16, iemAImpl_mul_u16_amd, iemAImpl_mul_u16_intel;
701FNIEMAIMPLMULDIVU16 iemAImpl_imul_u16, iemAImpl_imul_u16_amd, iemAImpl_imul_u16_intel;
702FNIEMAIMPLMULDIVU16 iemAImpl_div_u16, iemAImpl_div_u16_amd, iemAImpl_div_u16_intel;
703FNIEMAIMPLMULDIVU16 iemAImpl_idiv_u16, iemAImpl_idiv_u16_amd, iemAImpl_idiv_u16_intel;
704
705typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMULDIVU32,(uint32_t *pu32EAX, uint32_t *pu32EDX, uint32_t u32FactorDivisor, uint32_t fEFlags));
706typedef FNIEMAIMPLMULDIVU32 *PFNIEMAIMPLMULDIVU32;
707FNIEMAIMPLMULDIVU32 iemAImpl_mul_u32, iemAImpl_mul_u32_amd, iemAImpl_mul_u32_intel;
708FNIEMAIMPLMULDIVU32 iemAImpl_imul_u32, iemAImpl_imul_u32_amd, iemAImpl_imul_u32_intel;
709FNIEMAIMPLMULDIVU32 iemAImpl_div_u32, iemAImpl_div_u32_amd, iemAImpl_div_u32_intel;
710FNIEMAIMPLMULDIVU32 iemAImpl_idiv_u32, iemAImpl_idiv_u32_amd, iemAImpl_idiv_u32_intel;
711
712typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMULDIVU64,(uint64_t *pu64RAX, uint64_t *pu64RDX, uint64_t u64FactorDivisor, uint32_t fEFlags));
713typedef FNIEMAIMPLMULDIVU64 *PFNIEMAIMPLMULDIVU64;
714FNIEMAIMPLMULDIVU64 iemAImpl_mul_u64, iemAImpl_mul_u64_amd, iemAImpl_mul_u64_intel;
715FNIEMAIMPLMULDIVU64 iemAImpl_imul_u64, iemAImpl_imul_u64_amd, iemAImpl_imul_u64_intel;
716FNIEMAIMPLMULDIVU64 iemAImpl_div_u64, iemAImpl_div_u64_amd, iemAImpl_div_u64_intel;
717FNIEMAIMPLMULDIVU64 iemAImpl_idiv_u64, iemAImpl_idiv_u64_amd, iemAImpl_idiv_u64_intel;
718/** @} */
719
720/** @name Byte Swap.
721 * @{ */
722IEM_DECL_IMPL_TYPE(void, iemAImpl_bswap_u16,(uint32_t *pu32Dst)); /* Yes, 32-bit register access. */
723IEM_DECL_IMPL_TYPE(void, iemAImpl_bswap_u32,(uint32_t *pu32Dst));
724IEM_DECL_IMPL_TYPE(void, iemAImpl_bswap_u64,(uint64_t *pu64Dst));
725/** @} */
726
727/** @name Misc.
728 * @{ */
729FNIEMAIMPLBINU16 iemAImpl_arpl;
730/** @} */
731
732/** @name RDRAND and RDSEED
733 * @{ */
734typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLRDRANDSEEDU16,(uint16_t *puDst, uint32_t *pEFlags));
735typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLRDRANDSEEDU32,(uint32_t *puDst, uint32_t *pEFlags));
736typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLRDRANDSEEDU64,(uint64_t *puDst, uint32_t *pEFlags));
737typedef FNIEMAIMPLRDRANDSEEDU16 *PFNIEMAIMPLRDRANDSEEDU16;
738typedef FNIEMAIMPLRDRANDSEEDU32 *PFNIEMAIMPLRDRANDSEEDU32;
739typedef FNIEMAIMPLRDRANDSEEDU64 *PFNIEMAIMPLRDRANDSEEDU64;
740
741FNIEMAIMPLRDRANDSEEDU16 iemAImpl_rdrand_u16, iemAImpl_rdrand_u16_fallback;
742FNIEMAIMPLRDRANDSEEDU32 iemAImpl_rdrand_u32, iemAImpl_rdrand_u32_fallback;
743FNIEMAIMPLRDRANDSEEDU64 iemAImpl_rdrand_u64, iemAImpl_rdrand_u64_fallback;
744FNIEMAIMPLRDRANDSEEDU16 iemAImpl_rdseed_u16, iemAImpl_rdseed_u16_fallback;
745FNIEMAIMPLRDRANDSEEDU32 iemAImpl_rdseed_u32, iemAImpl_rdseed_u32_fallback;
746FNIEMAIMPLRDRANDSEEDU64 iemAImpl_rdseed_u64, iemAImpl_rdseed_u64_fallback;
747/** @} */
748
749/** @name ADOX and ADCX
750 * @{ */
751FNIEMAIMPLBINU32 iemAImpl_adcx_u32, iemAImpl_adcx_u32_fallback;
752FNIEMAIMPLBINU64 iemAImpl_adcx_u64, iemAImpl_adcx_u64_fallback;
753FNIEMAIMPLBINU32 iemAImpl_adox_u32, iemAImpl_adox_u32_fallback;
754FNIEMAIMPLBINU64 iemAImpl_adox_u64, iemAImpl_adox_u64_fallback;
755/** @} */
756
757
758/**
759 * A FPU result.
760 * @note x86 specific
761 */
762typedef struct IEMFPURESULT
763{
764 /** The output value. */
765 RTFLOAT80U r80Result;
766 /** The output status. */
767 uint16_t FSW;
768} IEMFPURESULT;
769AssertCompileMemberOffset(IEMFPURESULT, FSW, 10);
770/** Pointer to a FPU result. */
771typedef IEMFPURESULT *PIEMFPURESULT;
772/** Pointer to a const FPU result. */
773typedef IEMFPURESULT const *PCIEMFPURESULT;
774
775/** @name FPU operations taking a 32-bit float argument
776 * @{ */
777typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUR32FSW,(PCX86FXSTATE pFpuState, uint16_t *pFSW,
778 PCRTFLOAT80U pr80Val1, PCRTFLOAT32U pr32Val2));
779typedef FNIEMAIMPLFPUR32FSW *PFNIEMAIMPLFPUR32FSW;
780
781typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUR32,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes,
782 PCRTFLOAT80U pr80Val1, PCRTFLOAT32U pr32Val2));
783typedef FNIEMAIMPLFPUR32 *PFNIEMAIMPLFPUR32;
784
785FNIEMAIMPLFPUR32FSW iemAImpl_fcom_r80_by_r32;
786FNIEMAIMPLFPUR32 iemAImpl_fadd_r80_by_r32;
787FNIEMAIMPLFPUR32 iemAImpl_fmul_r80_by_r32;
788FNIEMAIMPLFPUR32 iemAImpl_fsub_r80_by_r32;
789FNIEMAIMPLFPUR32 iemAImpl_fsubr_r80_by_r32;
790FNIEMAIMPLFPUR32 iemAImpl_fdiv_r80_by_r32;
791FNIEMAIMPLFPUR32 iemAImpl_fdivr_r80_by_r32;
792
793IEM_DECL_IMPL_DEF(void, iemAImpl_fld_r80_from_r32,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes, PCRTFLOAT32U pr32Val));
794IEM_DECL_IMPL_DEF(void, iemAImpl_fst_r80_to_r32,(PCX86FXSTATE pFpuState, uint16_t *pu16FSW,
795 PRTFLOAT32U pr32Val, PCRTFLOAT80U pr80Val));
796/** @} */
797
798/** @name FPU operations taking a 64-bit float argument
799 * @{ */
800typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUR64FSW,(PCX86FXSTATE pFpuState, uint16_t *pFSW,
801 PCRTFLOAT80U pr80Val1, PCRTFLOAT64U pr64Val2));
802typedef FNIEMAIMPLFPUR64FSW *PFNIEMAIMPLFPUR64FSW;
803
804typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUR64,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes,
805 PCRTFLOAT80U pr80Val1, PCRTFLOAT64U pr64Val2));
806typedef FNIEMAIMPLFPUR64 *PFNIEMAIMPLFPUR64;
807
808FNIEMAIMPLFPUR64FSW iemAImpl_fcom_r80_by_r64;
809FNIEMAIMPLFPUR64 iemAImpl_fadd_r80_by_r64;
810FNIEMAIMPLFPUR64 iemAImpl_fmul_r80_by_r64;
811FNIEMAIMPLFPUR64 iemAImpl_fsub_r80_by_r64;
812FNIEMAIMPLFPUR64 iemAImpl_fsubr_r80_by_r64;
813FNIEMAIMPLFPUR64 iemAImpl_fdiv_r80_by_r64;
814FNIEMAIMPLFPUR64 iemAImpl_fdivr_r80_by_r64;
815
816IEM_DECL_IMPL_DEF(void, iemAImpl_fld_r80_from_r64,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes, PCRTFLOAT64U pr64Val));
817IEM_DECL_IMPL_DEF(void, iemAImpl_fst_r80_to_r64,(PCX86FXSTATE pFpuState, uint16_t *pu16FSW,
818 PRTFLOAT64U pr32Val, PCRTFLOAT80U pr80Val));
819/** @} */
820
821/** @name FPU operations taking a 80-bit float argument
822 * @{ */
823typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUR80,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes,
824 PCRTFLOAT80U pr80Val1, PCRTFLOAT80U pr80Val2));
825typedef FNIEMAIMPLFPUR80 *PFNIEMAIMPLFPUR80;
826FNIEMAIMPLFPUR80 iemAImpl_fadd_r80_by_r80;
827FNIEMAIMPLFPUR80 iemAImpl_fmul_r80_by_r80;
828FNIEMAIMPLFPUR80 iemAImpl_fsub_r80_by_r80;
829FNIEMAIMPLFPUR80 iemAImpl_fsubr_r80_by_r80;
830FNIEMAIMPLFPUR80 iemAImpl_fdiv_r80_by_r80;
831FNIEMAIMPLFPUR80 iemAImpl_fdivr_r80_by_r80;
832FNIEMAIMPLFPUR80 iemAImpl_fprem_r80_by_r80;
833FNIEMAIMPLFPUR80 iemAImpl_fprem1_r80_by_r80;
834FNIEMAIMPLFPUR80 iemAImpl_fscale_r80_by_r80;
835
836FNIEMAIMPLFPUR80 iemAImpl_fpatan_r80_by_r80, iemAImpl_fpatan_r80_by_r80_amd, iemAImpl_fpatan_r80_by_r80_intel;
837FNIEMAIMPLFPUR80 iemAImpl_fyl2x_r80_by_r80, iemAImpl_fyl2x_r80_by_r80_amd, iemAImpl_fyl2x_r80_by_r80_intel;
838FNIEMAIMPLFPUR80 iemAImpl_fyl2xp1_r80_by_r80, iemAImpl_fyl2xp1_r80_by_r80_amd, iemAImpl_fyl2xp1_r80_by_r80_intel;
839
840typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUR80FSW,(PCX86FXSTATE pFpuState, uint16_t *pFSW,
841 PCRTFLOAT80U pr80Val1, PCRTFLOAT80U pr80Val2));
842typedef FNIEMAIMPLFPUR80FSW *PFNIEMAIMPLFPUR80FSW;
843FNIEMAIMPLFPUR80FSW iemAImpl_fcom_r80_by_r80;
844FNIEMAIMPLFPUR80FSW iemAImpl_fucom_r80_by_r80;
845
846typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLFPUR80EFL,(PCX86FXSTATE pFpuState, uint16_t *pu16Fsw,
847 PCRTFLOAT80U pr80Val1, PCRTFLOAT80U pr80Val2));
848typedef FNIEMAIMPLFPUR80EFL *PFNIEMAIMPLFPUR80EFL;
849FNIEMAIMPLFPUR80EFL iemAImpl_fcomi_r80_by_r80;
850FNIEMAIMPLFPUR80EFL iemAImpl_fucomi_r80_by_r80;
851
852typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUR80UNARY,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes, PCRTFLOAT80U pr80Val));
853typedef FNIEMAIMPLFPUR80UNARY *PFNIEMAIMPLFPUR80UNARY;
854FNIEMAIMPLFPUR80UNARY iemAImpl_fabs_r80;
855FNIEMAIMPLFPUR80UNARY iemAImpl_fchs_r80;
856FNIEMAIMPLFPUR80UNARY iemAImpl_f2xm1_r80, iemAImpl_f2xm1_r80_amd, iemAImpl_f2xm1_r80_intel;
857FNIEMAIMPLFPUR80UNARY iemAImpl_fsqrt_r80;
858FNIEMAIMPLFPUR80UNARY iemAImpl_frndint_r80;
859FNIEMAIMPLFPUR80UNARY iemAImpl_fsin_r80, iemAImpl_fsin_r80_amd, iemAImpl_fsin_r80_intel;
860FNIEMAIMPLFPUR80UNARY iemAImpl_fcos_r80, iemAImpl_fcos_r80_amd, iemAImpl_fcos_r80_intel;
861
862typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUR80UNARYFSW,(PCX86FXSTATE pFpuState, uint16_t *pu16Fsw, PCRTFLOAT80U pr80Val));
863typedef FNIEMAIMPLFPUR80UNARYFSW *PFNIEMAIMPLFPUR80UNARYFSW;
864FNIEMAIMPLFPUR80UNARYFSW iemAImpl_ftst_r80;
865FNIEMAIMPLFPUR80UNARYFSW iemAImpl_fxam_r80;
866
867typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUR80LDCONST,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes));
868typedef FNIEMAIMPLFPUR80LDCONST *PFNIEMAIMPLFPUR80LDCONST;
869FNIEMAIMPLFPUR80LDCONST iemAImpl_fld1;
870FNIEMAIMPLFPUR80LDCONST iemAImpl_fldl2t;
871FNIEMAIMPLFPUR80LDCONST iemAImpl_fldl2e;
872FNIEMAIMPLFPUR80LDCONST iemAImpl_fldpi;
873FNIEMAIMPLFPUR80LDCONST iemAImpl_fldlg2;
874FNIEMAIMPLFPUR80LDCONST iemAImpl_fldln2;
875FNIEMAIMPLFPUR80LDCONST iemAImpl_fldz;
876
877/**
878 * A FPU result consisting of two output values and FSW.
879 * @note x86 specific
880 */
881typedef struct IEMFPURESULTTWO
882{
883 /** The first output value. */
884 RTFLOAT80U r80Result1;
885 /** The output status. */
886 uint16_t FSW;
887 /** The second output value. */
888 RTFLOAT80U r80Result2;
889} IEMFPURESULTTWO;
890AssertCompileMemberOffset(IEMFPURESULTTWO, FSW, 10);
891AssertCompileMemberOffset(IEMFPURESULTTWO, r80Result2, 12);
892/** Pointer to a FPU result consisting of two output values and FSW. */
893typedef IEMFPURESULTTWO *PIEMFPURESULTTWO;
894/** Pointer to a const FPU result consisting of two output values and FSW. */
895typedef IEMFPURESULTTWO const *PCIEMFPURESULTTWO;
896
897typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUR80UNARYTWO,(PCX86FXSTATE pFpuState, PIEMFPURESULTTWO pFpuResTwo,
898 PCRTFLOAT80U pr80Val));
899typedef FNIEMAIMPLFPUR80UNARYTWO *PFNIEMAIMPLFPUR80UNARYTWO;
900FNIEMAIMPLFPUR80UNARYTWO iemAImpl_fptan_r80_r80, iemAImpl_fptan_r80_r80_amd, iemAImpl_fptan_r80_r80_intel;
901FNIEMAIMPLFPUR80UNARYTWO iemAImpl_fxtract_r80_r80;
902FNIEMAIMPLFPUR80UNARYTWO iemAImpl_fsincos_r80_r80, iemAImpl_fsincos_r80_r80_amd, iemAImpl_fsincos_r80_r80_intel;
903
904IEM_DECL_IMPL_DEF(void, iemAImpl_fld_r80_from_r80,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes, PCRTFLOAT80U pr80Val));
905IEM_DECL_IMPL_DEF(void, iemAImpl_fst_r80_to_r80,(PCX86FXSTATE pFpuState, uint16_t *pu16FSW,
906 PRTFLOAT80U pr80Dst, PCRTFLOAT80U pr80Src));
907
908IEM_DECL_IMPL_DEF(void, iemAImpl_fld_r80_from_d80,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes, PCRTPBCD80U pd80Val));
909IEM_DECL_IMPL_DEF(void, iemAImpl_fst_r80_to_d80,(PCX86FXSTATE pFpuState, uint16_t *pu16FSW,
910 PRTPBCD80U pd80Dst, PCRTFLOAT80U pr80Src));
911
912/** @} */
913
914/** @name FPU operations taking a 16-bit signed integer argument
915 * @{ */
916typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUI16,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes,
917 PCRTFLOAT80U pr80Val1, int16_t const *pi16Val2));
918typedef FNIEMAIMPLFPUI16 *PFNIEMAIMPLFPUI16;
919typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUSTR80TOI16,(PCX86FXSTATE pFpuState, uint16_t *pFpuRes,
920 int16_t *pi16Dst, PCRTFLOAT80U pr80Src));
921typedef FNIEMAIMPLFPUSTR80TOI16 *PFNIEMAIMPLFPUSTR80TOI16;
922
923FNIEMAIMPLFPUI16 iemAImpl_fiadd_r80_by_i16;
924FNIEMAIMPLFPUI16 iemAImpl_fimul_r80_by_i16;
925FNIEMAIMPLFPUI16 iemAImpl_fisub_r80_by_i16;
926FNIEMAIMPLFPUI16 iemAImpl_fisubr_r80_by_i16;
927FNIEMAIMPLFPUI16 iemAImpl_fidiv_r80_by_i16;
928FNIEMAIMPLFPUI16 iemAImpl_fidivr_r80_by_i16;
929
930typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUI16FSW,(PCX86FXSTATE pFpuState, uint16_t *pFSW,
931 PCRTFLOAT80U pr80Val1, int16_t const *pi16Val2));
932typedef FNIEMAIMPLFPUI16FSW *PFNIEMAIMPLFPUI16FSW;
933FNIEMAIMPLFPUI16FSW iemAImpl_ficom_r80_by_i16;
934
935IEM_DECL_IMPL_DEF(void, iemAImpl_fild_r80_from_i16,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes, int16_t const *pi16Val));
936FNIEMAIMPLFPUSTR80TOI16 iemAImpl_fist_r80_to_i16;
937FNIEMAIMPLFPUSTR80TOI16 iemAImpl_fistt_r80_to_i16, iemAImpl_fistt_r80_to_i16_amd, iemAImpl_fistt_r80_to_i16_intel;
938/** @} */
939
940/** @name FPU operations taking a 32-bit signed integer argument
941 * @{ */
942typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUI32,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes,
943 PCRTFLOAT80U pr80Val1, int32_t const *pi32Val2));
944typedef FNIEMAIMPLFPUI32 *PFNIEMAIMPLFPUI32;
945typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUSTR80TOI32,(PCX86FXSTATE pFpuState, uint16_t *pFpuRes,
946 int32_t *pi32Dst, PCRTFLOAT80U pr80Src));
947typedef FNIEMAIMPLFPUSTR80TOI32 *PFNIEMAIMPLFPUSTR80TOI32;
948
949FNIEMAIMPLFPUI32 iemAImpl_fiadd_r80_by_i32;
950FNIEMAIMPLFPUI32 iemAImpl_fimul_r80_by_i32;
951FNIEMAIMPLFPUI32 iemAImpl_fisub_r80_by_i32;
952FNIEMAIMPLFPUI32 iemAImpl_fisubr_r80_by_i32;
953FNIEMAIMPLFPUI32 iemAImpl_fidiv_r80_by_i32;
954FNIEMAIMPLFPUI32 iemAImpl_fidivr_r80_by_i32;
955
956typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUI32FSW,(PCX86FXSTATE pFpuState, uint16_t *pFSW,
957 PCRTFLOAT80U pr80Val1, int32_t const *pi32Val2));
958typedef FNIEMAIMPLFPUI32FSW *PFNIEMAIMPLFPUI32FSW;
959FNIEMAIMPLFPUI32FSW iemAImpl_ficom_r80_by_i32;
960
961IEM_DECL_IMPL_DEF(void, iemAImpl_fild_r80_from_i32,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes, int32_t const *pi32Val));
962FNIEMAIMPLFPUSTR80TOI32 iemAImpl_fist_r80_to_i32;
963FNIEMAIMPLFPUSTR80TOI32 iemAImpl_fistt_r80_to_i32;
964/** @} */
965
966/** @name FPU operations taking a 64-bit signed integer argument
967 * @{ */
968typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUSTR80TOI64,(PCX86FXSTATE pFpuState, uint16_t *pFpuRes,
969 int64_t *pi64Dst, PCRTFLOAT80U pr80Src));
970typedef FNIEMAIMPLFPUSTR80TOI64 *PFNIEMAIMPLFPUSTR80TOI64;
971
972IEM_DECL_IMPL_DEF(void, iemAImpl_fild_r80_from_i64,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes, int64_t const *pi64Val));
973FNIEMAIMPLFPUSTR80TOI64 iemAImpl_fist_r80_to_i64;
974FNIEMAIMPLFPUSTR80TOI64 iemAImpl_fistt_r80_to_i64;
975/** @} */
976
977
978/** Temporary type representing a 256-bit vector register. */
979typedef struct { uint64_t au64[4]; } IEMVMM256;
980/** Temporary type pointing to a 256-bit vector register. */
981typedef IEMVMM256 *PIEMVMM256;
982/** Temporary type pointing to a const 256-bit vector register. */
983typedef IEMVMM256 *PCIEMVMM256;
984
985
986/** @name Media (SSE/MMX/AVX) operations: full1 + full2 -> full1.
987 * @{ */
988typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAF2U64,(PCX86FXSTATE pFpuState, uint64_t *puDst, uint64_t const *puSrc));
989typedef FNIEMAIMPLMEDIAF2U64 *PFNIEMAIMPLMEDIAF2U64;
990typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMEDIAF2U128,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86XMMREG puSrc));
991typedef FNIEMAIMPLMEDIAF2U128 *PFNIEMAIMPLMEDIAF2U128;
992typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMEDIAF2U256,(uint32_t uMxCsrIn, PX86YMMREG puDst, PCX86YMMREG puSrc));
993typedef FNIEMAIMPLMEDIAF2U256 *PFNIEMAIMPLMEDIAF2U256;
994typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMEDIAF3U128,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86XMMREG puSrc1, PCX86XMMREG puSrc2));
995typedef FNIEMAIMPLMEDIAF3U128 *PFNIEMAIMPLMEDIAF3U128;
996typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMEDIAF3U256,(uint32_t uMxCsrIn, PX86YMMREG puDst, PCX86YMMREG puSrc1, PCX86YMMREG puSrc2));
997typedef FNIEMAIMPLMEDIAF3U256 *PFNIEMAIMPLMEDIAF3U256;
998typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAOPTF2U64,(uint64_t *puDst, uint64_t const *puSrc));
999typedef FNIEMAIMPLMEDIAOPTF2U64 *PFNIEMAIMPLMEDIAOPTF2U64;
1000typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAOPTF2U128,(PRTUINT128U puDst, PCRTUINT128U puSrc));
1001typedef FNIEMAIMPLMEDIAOPTF2U128 *PFNIEMAIMPLMEDIAOPTF2U128;
1002typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAOPTF3U128,(PRTUINT128U puDst, PCRTUINT128U puSrc1, PCRTUINT128U puSrc2));
1003typedef FNIEMAIMPLMEDIAOPTF3U128 *PFNIEMAIMPLMEDIAOPTF3U128;
1004typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAOPTF3U256,(PRTUINT256U puDst, PCRTUINT256U puSrc1, PCRTUINT256U puSrc2));
1005typedef FNIEMAIMPLMEDIAOPTF3U256 *PFNIEMAIMPLMEDIAOPTF3U256;
1006typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAOPTF2U256,(PRTUINT256U puDst, PCRTUINT256U puSrc));
1007typedef FNIEMAIMPLMEDIAOPTF2U256 *PFNIEMAIMPLMEDIAOPTF2U256;
1008FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_pshufb_u64, iemAImpl_pshufb_u64_fallback;
1009FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_pand_u64, iemAImpl_pandn_u64, iemAImpl_por_u64, iemAImpl_pxor_u64;
1010FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_pcmpeqb_u64, iemAImpl_pcmpeqw_u64, iemAImpl_pcmpeqd_u64;
1011FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_pcmpgtb_u64, iemAImpl_pcmpgtw_u64, iemAImpl_pcmpgtd_u64;
1012FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_paddb_u64, iemAImpl_paddsb_u64, iemAImpl_paddusb_u64;
1013FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_paddw_u64, iemAImpl_paddsw_u64, iemAImpl_paddusw_u64;
1014FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_paddd_u64;
1015FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_paddq_u64;
1016FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_psubb_u64, iemAImpl_psubsb_u64, iemAImpl_psubusb_u64;
1017FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_psubw_u64, iemAImpl_psubsw_u64, iemAImpl_psubusw_u64;
1018FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_psubd_u64;
1019FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_psubq_u64;
1020FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_pmaddwd_u64, iemAImpl_pmaddwd_u64_fallback;
1021FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_pmullw_u64, iemAImpl_pmulhw_u64;
1022FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_pminub_u64, iemAImpl_pmaxub_u64;
1023FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_pminsw_u64, iemAImpl_pmaxsw_u64;
1024FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_pabsb_u64, iemAImpl_pabsb_u64_fallback;
1025FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_pabsw_u64, iemAImpl_pabsw_u64_fallback;
1026FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_pabsd_u64, iemAImpl_pabsd_u64_fallback;
1027FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_psignb_u64, iemAImpl_psignb_u64_fallback;
1028FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_psignw_u64, iemAImpl_psignw_u64_fallback;
1029FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_psignd_u64, iemAImpl_psignd_u64_fallback;
1030FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_phaddw_u64, iemAImpl_phaddw_u64_fallback;
1031FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_phaddd_u64, iemAImpl_phaddd_u64_fallback;
1032FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_phsubw_u64, iemAImpl_phsubw_u64_fallback;
1033FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_phsubd_u64, iemAImpl_phsubd_u64_fallback;
1034FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_phaddsw_u64, iemAImpl_phaddsw_u64_fallback;
1035FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_phsubsw_u64, iemAImpl_phsubsw_u64_fallback;
1036FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_pmaddubsw_u64, iemAImpl_pmaddubsw_u64_fallback;
1037FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_pmulhrsw_u64, iemAImpl_pmulhrsw_u64_fallback;
1038FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_pmuludq_u64;
1039FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_psllw_u64, iemAImpl_psrlw_u64, iemAImpl_psraw_u64;
1040FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_pslld_u64, iemAImpl_psrld_u64, iemAImpl_psrad_u64;
1041FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_psllq_u64, iemAImpl_psrlq_u64;
1042FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_packsswb_u64, iemAImpl_packuswb_u64;
1043FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_packssdw_u64;
1044FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_pmulhuw_u64;
1045FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_pavgb_u64, iemAImpl_pavgw_u64;
1046FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_psadbw_u64;
1047
1048FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pshufb_u128, iemAImpl_pshufb_u128_fallback;
1049FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pand_u128, iemAImpl_pandn_u128, iemAImpl_por_u128, iemAImpl_pxor_u128;
1050FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pcmpeqb_u128, iemAImpl_pcmpeqw_u128, iemAImpl_pcmpeqd_u128;
1051FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pcmpeqq_u128, iemAImpl_pcmpeqq_u128_fallback;
1052FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pcmpgtb_u128, iemAImpl_pcmpgtw_u128, iemAImpl_pcmpgtd_u128;
1053FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pcmpgtq_u128, iemAImpl_pcmpgtq_u128_fallback;
1054FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_paddb_u128, iemAImpl_paddsb_u128, iemAImpl_paddusb_u128;
1055FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_paddw_u128, iemAImpl_paddsw_u128, iemAImpl_paddusw_u128;
1056FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_paddd_u128;
1057FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_paddq_u128;
1058FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_psubb_u128, iemAImpl_psubsb_u128, iemAImpl_psubusb_u128;
1059FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_psubw_u128, iemAImpl_psubsw_u128, iemAImpl_psubusw_u128;
1060FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_psubd_u128;
1061FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_psubq_u128;
1062FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmullw_u128, iemAImpl_pmullw_u128_fallback;
1063FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmulhw_u128;
1064FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmulld_u128, iemAImpl_pmulld_u128_fallback;
1065FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmaddwd_u128, iemAImpl_pmaddwd_u128_fallback;
1066FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pminub_u128;
1067FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pminud_u128, iemAImpl_pminud_u128_fallback;
1068FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pminuw_u128, iemAImpl_pminuw_u128_fallback;
1069FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pminsb_u128, iemAImpl_pminsb_u128_fallback;
1070FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pminsd_u128, iemAImpl_pminsd_u128_fallback;
1071FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pminsw_u128, iemAImpl_pminsw_u128_fallback;
1072FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmaxub_u128;
1073FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmaxud_u128, iemAImpl_pmaxud_u128_fallback;
1074FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmaxuw_u128, iemAImpl_pmaxuw_u128_fallback;
1075FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmaxsb_u128, iemAImpl_pmaxsb_u128_fallback;
1076FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmaxsw_u128;
1077FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmaxsd_u128, iemAImpl_pmaxsd_u128_fallback;
1078FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pabsb_u128, iemAImpl_pabsb_u128_fallback;
1079FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pabsw_u128, iemAImpl_pabsw_u128_fallback;
1080FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pabsd_u128, iemAImpl_pabsd_u128_fallback;
1081FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_psignb_u128, iemAImpl_psignb_u128_fallback;
1082FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_psignw_u128, iemAImpl_psignw_u128_fallback;
1083FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_psignd_u128, iemAImpl_psignd_u128_fallback;
1084FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_phaddw_u128, iemAImpl_phaddw_u128_fallback;
1085FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_phaddd_u128, iemAImpl_phaddd_u128_fallback;
1086FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_phsubw_u128, iemAImpl_phsubw_u128_fallback;
1087FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_phsubd_u128, iemAImpl_phsubd_u128_fallback;
1088FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_phaddsw_u128, iemAImpl_phaddsw_u128_fallback;
1089FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_phsubsw_u128, iemAImpl_phsubsw_u128_fallback;
1090FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmaddubsw_u128, iemAImpl_pmaddubsw_u128_fallback;
1091FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmulhrsw_u128, iemAImpl_pmulhrsw_u128_fallback;
1092FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmuludq_u128;
1093FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmaddwd_u128, iemAImpl_pmaddwd_u128_fallback;
1094FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_packsswb_u128, iemAImpl_packuswb_u128;
1095FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_packssdw_u128, iemAImpl_packusdw_u128;
1096FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_psllw_u128, iemAImpl_psrlw_u128, iemAImpl_psraw_u128;
1097FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pslld_u128, iemAImpl_psrld_u128, iemAImpl_psrad_u128;
1098FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_psllq_u128, iemAImpl_psrlq_u128;
1099FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmulhuw_u128;
1100FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pavgb_u128, iemAImpl_pavgw_u128;
1101FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_psadbw_u128;
1102FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmuldq_u128, iemAImpl_pmuldq_u128_fallback;
1103FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_unpcklps_u128, iemAImpl_unpcklpd_u128;
1104FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_unpckhps_u128, iemAImpl_unpckhpd_u128;
1105FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_phminposuw_u128, iemAImpl_phminposuw_u128_fallback;
1106
1107FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpshufb_u128, iemAImpl_vpshufb_u128_fallback;
1108FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpand_u128, iemAImpl_vpand_u128_fallback;
1109FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpandn_u128, iemAImpl_vpandn_u128_fallback;
1110FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpor_u128, iemAImpl_vpor_u128_fallback;
1111FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpxor_u128, iemAImpl_vpxor_u128_fallback;
1112FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpcmpeqb_u128, iemAImpl_vpcmpeqb_u128_fallback;
1113FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpcmpeqw_u128, iemAImpl_vpcmpeqw_u128_fallback;
1114FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpcmpeqd_u128, iemAImpl_vpcmpeqd_u128_fallback;
1115FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpcmpeqq_u128, iemAImpl_vpcmpeqq_u128_fallback;
1116FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpcmpgtb_u128, iemAImpl_vpcmpgtb_u128_fallback;
1117FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpcmpgtw_u128, iemAImpl_vpcmpgtw_u128_fallback;
1118FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpcmpgtd_u128, iemAImpl_vpcmpgtd_u128_fallback;
1119FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpcmpgtq_u128, iemAImpl_vpcmpgtq_u128_fallback;
1120FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpaddb_u128, iemAImpl_vpaddb_u128_fallback;
1121FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpaddw_u128, iemAImpl_vpaddw_u128_fallback;
1122FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpaddd_u128, iemAImpl_vpaddd_u128_fallback;
1123FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpaddq_u128, iemAImpl_vpaddq_u128_fallback;
1124FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsubb_u128, iemAImpl_vpsubb_u128_fallback;
1125FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsubw_u128, iemAImpl_vpsubw_u128_fallback;
1126FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsubd_u128, iemAImpl_vpsubd_u128_fallback;
1127FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsubq_u128, iemAImpl_vpsubq_u128_fallback;
1128FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpminub_u128, iemAImpl_vpminub_u128_fallback;
1129FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpminuw_u128, iemAImpl_vpminuw_u128_fallback;
1130FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpminud_u128, iemAImpl_vpminud_u128_fallback;
1131FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpminsb_u128, iemAImpl_vpminsb_u128_fallback;
1132FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpminsw_u128, iemAImpl_vpminsw_u128_fallback;
1133FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpminsd_u128, iemAImpl_vpminsd_u128_fallback;
1134FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmaxub_u128, iemAImpl_vpmaxub_u128_fallback;
1135FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmaxuw_u128, iemAImpl_vpmaxuw_u128_fallback;
1136FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmaxud_u128, iemAImpl_vpmaxud_u128_fallback;
1137FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmaxsb_u128, iemAImpl_vpmaxsb_u128_fallback;
1138FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmaxsw_u128, iemAImpl_vpmaxsw_u128_fallback;
1139FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmaxsd_u128, iemAImpl_vpmaxsd_u128_fallback;
1140FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpacksswb_u128, iemAImpl_vpacksswb_u128_fallback;
1141FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpackssdw_u128, iemAImpl_vpackssdw_u128_fallback;
1142FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpackuswb_u128, iemAImpl_vpackuswb_u128_fallback;
1143FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpackusdw_u128, iemAImpl_vpackusdw_u128_fallback;
1144FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmullw_u128, iemAImpl_vpmullw_u128_fallback;
1145FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmulld_u128, iemAImpl_vpmulld_u128_fallback;
1146FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmulhw_u128, iemAImpl_vpmulhw_u128_fallback;
1147FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmulhuw_u128, iemAImpl_vpmulhuw_u128_fallback;
1148FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpavgb_u128, iemAImpl_vpavgb_u128_fallback;
1149FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpavgw_u128, iemAImpl_vpavgw_u128_fallback;
1150FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsignb_u128, iemAImpl_vpsignb_u128_fallback;
1151FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsignw_u128, iemAImpl_vpsignw_u128_fallback;
1152FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsignd_u128, iemAImpl_vpsignd_u128_fallback;
1153FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vphaddw_u128, iemAImpl_vphaddw_u128_fallback;
1154FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vphaddd_u128, iemAImpl_vphaddd_u128_fallback;
1155FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vphsubw_u128, iemAImpl_vphsubw_u128_fallback;
1156FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vphsubd_u128, iemAImpl_vphsubd_u128_fallback;
1157FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vphaddsw_u128, iemAImpl_vphaddsw_u128_fallback;
1158FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vphsubsw_u128, iemAImpl_vphsubsw_u128_fallback;
1159FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmaddubsw_u128, iemAImpl_vpmaddubsw_u128_fallback;
1160FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmulhrsw_u128, iemAImpl_vpmulhrsw_u128_fallback;
1161FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsadbw_u128, iemAImpl_vpsadbw_u128_fallback;
1162FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmuldq_u128, iemAImpl_vpmuldq_u128_fallback;
1163FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmuludq_u128, iemAImpl_vpmuludq_u128_fallback;
1164FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsubsb_u128, iemAImpl_vpsubsb_u128_fallback;
1165FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsubsw_u128, iemAImpl_vpsubsw_u128_fallback;
1166FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsubusb_u128, iemAImpl_vpsubusb_u128_fallback;
1167FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsubusw_u128, iemAImpl_vpsubusw_u128_fallback;
1168FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpaddusb_u128, iemAImpl_vpaddusb_u128_fallback;
1169FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpaddusw_u128, iemAImpl_vpaddusw_u128_fallback;
1170FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpaddsb_u128, iemAImpl_vpaddsb_u128_fallback;
1171FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpaddsw_u128, iemAImpl_vpaddsw_u128_fallback;
1172FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsllw_u128, iemAImpl_vpsllw_u128_fallback;
1173FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpslld_u128, iemAImpl_vpslld_u128_fallback;
1174FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsllq_u128, iemAImpl_vpsllq_u128_fallback;
1175FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsraw_u128, iemAImpl_vpsraw_u128_fallback;
1176FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsrad_u128, iemAImpl_vpsrad_u128_fallback;
1177FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsrlw_u128, iemAImpl_vpsrlw_u128_fallback;
1178FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsrld_u128, iemAImpl_vpsrld_u128_fallback;
1179FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsrlq_u128, iemAImpl_vpsrlq_u128_fallback;
1180FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmaddwd_u128, iemAImpl_vpmaddwd_u128_fallback;
1181
1182FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_vpabsb_u128, iemAImpl_vpabsb_u128_fallback;
1183FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_vpabsw_u128, iemAImpl_vpabsd_u128_fallback;
1184FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_vpabsd_u128, iemAImpl_vpabsw_u128_fallback;
1185FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_vphminposuw_u128, iemAImpl_vphminposuw_u128_fallback;
1186
1187FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpshufb_u256, iemAImpl_vpshufb_u256_fallback;
1188FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpand_u256, iemAImpl_vpand_u256_fallback;
1189FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpandn_u256, iemAImpl_vpandn_u256_fallback;
1190FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpor_u256, iemAImpl_vpor_u256_fallback;
1191FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpxor_u256, iemAImpl_vpxor_u256_fallback;
1192FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpcmpeqb_u256, iemAImpl_vpcmpeqb_u256_fallback;
1193FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpcmpeqw_u256, iemAImpl_vpcmpeqw_u256_fallback;
1194FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpcmpeqd_u256, iemAImpl_vpcmpeqd_u256_fallback;
1195FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpcmpeqq_u256, iemAImpl_vpcmpeqq_u256_fallback;
1196FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpcmpgtb_u256, iemAImpl_vpcmpgtb_u256_fallback;
1197FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpcmpgtw_u256, iemAImpl_vpcmpgtw_u256_fallback;
1198FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpcmpgtd_u256, iemAImpl_vpcmpgtd_u256_fallback;
1199FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpcmpgtq_u256, iemAImpl_vpcmpgtq_u256_fallback;
1200FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpaddb_u256, iemAImpl_vpaddb_u256_fallback;
1201FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpaddw_u256, iemAImpl_vpaddw_u256_fallback;
1202FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpaddd_u256, iemAImpl_vpaddd_u256_fallback;
1203FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpaddq_u256, iemAImpl_vpaddq_u256_fallback;
1204FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsubb_u256, iemAImpl_vpsubb_u256_fallback;
1205FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsubw_u256, iemAImpl_vpsubw_u256_fallback;
1206FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsubd_u256, iemAImpl_vpsubd_u256_fallback;
1207FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsubq_u256, iemAImpl_vpsubq_u256_fallback;
1208FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpminub_u256, iemAImpl_vpminub_u256_fallback;
1209FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpminuw_u256, iemAImpl_vpminuw_u256_fallback;
1210FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpminud_u256, iemAImpl_vpminud_u256_fallback;
1211FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpminsb_u256, iemAImpl_vpminsb_u256_fallback;
1212FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpminsw_u256, iemAImpl_vpminsw_u256_fallback;
1213FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpminsd_u256, iemAImpl_vpminsd_u256_fallback;
1214FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmaxub_u256, iemAImpl_vpmaxub_u256_fallback;
1215FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmaxuw_u256, iemAImpl_vpmaxuw_u256_fallback;
1216FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmaxud_u256, iemAImpl_vpmaxud_u256_fallback;
1217FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmaxsb_u256, iemAImpl_vpmaxsb_u256_fallback;
1218FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmaxsw_u256, iemAImpl_vpmaxsw_u256_fallback;
1219FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmaxsd_u256, iemAImpl_vpmaxsd_u256_fallback;
1220FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpacksswb_u256, iemAImpl_vpacksswb_u256_fallback;
1221FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpackssdw_u256, iemAImpl_vpackssdw_u256_fallback;
1222FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpackuswb_u256, iemAImpl_vpackuswb_u256_fallback;
1223FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpackusdw_u256, iemAImpl_vpackusdw_u256_fallback;
1224FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmullw_u256, iemAImpl_vpmullw_u256_fallback;
1225FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmulld_u256, iemAImpl_vpmulld_u256_fallback;
1226FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmulhw_u256, iemAImpl_vpmulhw_u256_fallback;
1227FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmulhuw_u256, iemAImpl_vpmulhuw_u256_fallback;
1228FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpavgb_u256, iemAImpl_vpavgb_u256_fallback;
1229FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpavgw_u256, iemAImpl_vpavgw_u256_fallback;
1230FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsignb_u256, iemAImpl_vpsignb_u256_fallback;
1231FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsignw_u256, iemAImpl_vpsignw_u256_fallback;
1232FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsignd_u256, iemAImpl_vpsignd_u256_fallback;
1233FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vphaddw_u256, iemAImpl_vphaddw_u256_fallback;
1234FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vphaddd_u256, iemAImpl_vphaddd_u256_fallback;
1235FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vphsubw_u256, iemAImpl_vphsubw_u256_fallback;
1236FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vphsubd_u256, iemAImpl_vphsubd_u256_fallback;
1237FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vphaddsw_u256, iemAImpl_vphaddsw_u256_fallback;
1238FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vphsubsw_u256, iemAImpl_vphsubsw_u256_fallback;
1239FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmaddubsw_u256, iemAImpl_vpmaddubsw_u256_fallback;
1240FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmulhrsw_u256, iemAImpl_vpmulhrsw_u256_fallback;
1241FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsadbw_u256, iemAImpl_vpsadbw_u256_fallback;
1242FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmuldq_u256, iemAImpl_vpmuldq_u256_fallback;
1243FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmuludq_u256, iemAImpl_vpmuludq_u256_fallback;
1244FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsubsb_u256, iemAImpl_vpsubsb_u256_fallback;
1245FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsubsw_u256, iemAImpl_vpsubsw_u256_fallback;
1246FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsubusb_u256, iemAImpl_vpsubusb_u256_fallback;
1247FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsubusw_u256, iemAImpl_vpsubusw_u256_fallback;
1248FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpaddusb_u256, iemAImpl_vpaddusb_u256_fallback;
1249FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpaddusw_u256, iemAImpl_vpaddusw_u256_fallback;
1250FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpaddsb_u256, iemAImpl_vpaddsb_u256_fallback;
1251FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpaddsw_u256, iemAImpl_vpaddsw_u256_fallback;
1252FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsllw_u256, iemAImpl_vpsllw_u256_fallback;
1253FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpslld_u256, iemAImpl_vpslld_u256_fallback;
1254FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsllq_u256, iemAImpl_vpsllq_u256_fallback;
1255FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsraw_u256, iemAImpl_vpsraw_u256_fallback;
1256FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsrad_u256, iemAImpl_vpsrad_u256_fallback;
1257FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsrlw_u256, iemAImpl_vpsrlw_u256_fallback;
1258FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsrld_u256, iemAImpl_vpsrld_u256_fallback;
1259FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsrlq_u256, iemAImpl_vpsrlq_u256_fallback;
1260FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmaddwd_u256, iemAImpl_vpmaddwd_u256_fallback;
1261FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpermps_u256, iemAImpl_vpermps_u256_fallback;
1262FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpermd_u256, iemAImpl_vpermd_u256_fallback;
1263
1264FNIEMAIMPLMEDIAOPTF2U256 iemAImpl_vpabsb_u256, iemAImpl_vpabsb_u256_fallback;
1265FNIEMAIMPLMEDIAOPTF2U256 iemAImpl_vpabsw_u256, iemAImpl_vpabsw_u256_fallback;
1266FNIEMAIMPLMEDIAOPTF2U256 iemAImpl_vpabsd_u256, iemAImpl_vpabsd_u256_fallback;
1267/** @} */
1268
1269/** @name Media (SSE/MMX/AVX) operations: lowhalf1 + lowhalf1 -> full1.
1270 * @{ */
1271FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_punpcklbw_u64, iemAImpl_punpcklwd_u64, iemAImpl_punpckldq_u64;
1272FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_punpcklbw_u128, iemAImpl_punpcklwd_u128, iemAImpl_punpckldq_u128, iemAImpl_punpcklqdq_u128;
1273FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpunpcklbw_u128, iemAImpl_vpunpcklbw_u128_fallback,
1274 iemAImpl_vpunpcklwd_u128, iemAImpl_vpunpcklwd_u128_fallback,
1275 iemAImpl_vpunpckldq_u128, iemAImpl_vpunpckldq_u128_fallback,
1276 iemAImpl_vpunpcklqdq_u128, iemAImpl_vpunpcklqdq_u128_fallback,
1277 iemAImpl_vunpcklps_u128, iemAImpl_vunpcklps_u128_fallback,
1278 iemAImpl_vunpcklpd_u128, iemAImpl_vunpcklpd_u128_fallback,
1279 iemAImpl_vunpckhps_u128, iemAImpl_vunpckhps_u128_fallback,
1280 iemAImpl_vunpckhpd_u128, iemAImpl_vunpckhpd_u128_fallback;
1281
1282FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpunpcklbw_u256, iemAImpl_vpunpcklbw_u256_fallback,
1283 iemAImpl_vpunpcklwd_u256, iemAImpl_vpunpcklwd_u256_fallback,
1284 iemAImpl_vpunpckldq_u256, iemAImpl_vpunpckldq_u256_fallback,
1285 iemAImpl_vpunpcklqdq_u256, iemAImpl_vpunpcklqdq_u256_fallback,
1286 iemAImpl_vunpcklps_u256, iemAImpl_vunpcklps_u256_fallback,
1287 iemAImpl_vunpcklpd_u256, iemAImpl_vunpcklpd_u256_fallback,
1288 iemAImpl_vunpckhps_u256, iemAImpl_vunpckhps_u256_fallback,
1289 iemAImpl_vunpckhpd_u256, iemAImpl_vunpckhpd_u256_fallback;
1290/** @} */
1291
1292/** @name Media (SSE/MMX/AVX) operations: hihalf1 + hihalf2 -> full1.
1293 * @{ */
1294FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_punpckhbw_u64, iemAImpl_punpckhwd_u64, iemAImpl_punpckhdq_u64;
1295FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_punpckhbw_u128, iemAImpl_punpckhwd_u128, iemAImpl_punpckhdq_u128, iemAImpl_punpckhqdq_u128;
1296FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpunpckhbw_u128, iemAImpl_vpunpckhbw_u128_fallback,
1297 iemAImpl_vpunpckhwd_u128, iemAImpl_vpunpckhwd_u128_fallback,
1298 iemAImpl_vpunpckhdq_u128, iemAImpl_vpunpckhdq_u128_fallback,
1299 iemAImpl_vpunpckhqdq_u128, iemAImpl_vpunpckhqdq_u128_fallback;
1300FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpunpckhbw_u256, iemAImpl_vpunpckhbw_u256_fallback,
1301 iemAImpl_vpunpckhwd_u256, iemAImpl_vpunpckhwd_u256_fallback,
1302 iemAImpl_vpunpckhdq_u256, iemAImpl_vpunpckhdq_u256_fallback,
1303 iemAImpl_vpunpckhqdq_u256, iemAImpl_vpunpckhqdq_u256_fallback;
1304/** @} */
1305
1306/** @name Media (SSE/MMX/AVX) operation: Packed Shuffle Stuff (evil)
1307 * @{ */
1308typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAPSHUFU128,(PRTUINT128U puDst, PCRTUINT128U puSrc, uint8_t bEvil));
1309typedef FNIEMAIMPLMEDIAPSHUFU128 *PFNIEMAIMPLMEDIAPSHUFU128;
1310typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAPSHUFU256,(PRTUINT256U puDst, PCRTUINT256U puSrc, uint8_t bEvil));
1311typedef FNIEMAIMPLMEDIAPSHUFU256 *PFNIEMAIMPLMEDIAPSHUFU256;
1312IEM_DECL_IMPL_DEF(void, iemAImpl_pshufw_u64,(uint64_t *puDst, uint64_t const *puSrc, uint8_t bEvil));
1313FNIEMAIMPLMEDIAPSHUFU128 iemAImpl_pshufhw_u128, iemAImpl_pshuflw_u128, iemAImpl_pshufd_u128;
1314#ifndef IEM_WITHOUT_ASSEMBLY
1315FNIEMAIMPLMEDIAPSHUFU256 iemAImpl_vpshufhw_u256, iemAImpl_vpshuflw_u256, iemAImpl_vpshufd_u256;
1316#endif
1317FNIEMAIMPLMEDIAPSHUFU256 iemAImpl_vpshufhw_u256_fallback, iemAImpl_vpshuflw_u256_fallback, iemAImpl_vpshufd_u256_fallback;
1318/** @} */
1319
1320/** @name Media (SSE/MMX/AVX) operation: Shift Immediate Stuff (evil)
1321 * @{ */
1322typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAPSHIFTU64,(uint64_t *puDst, uint8_t bShift));
1323typedef FNIEMAIMPLMEDIAPSHIFTU64 *PFNIEMAIMPLMEDIAPSHIFTU64;
1324typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAPSHIFTU128,(PRTUINT128U puDst, uint8_t bShift));
1325typedef FNIEMAIMPLMEDIAPSHIFTU128 *PFNIEMAIMPLMEDIAPSHIFTU128;
1326typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAPSHIFTU256,(PRTUINT256U puDst, uint8_t bShift));
1327typedef FNIEMAIMPLMEDIAPSHIFTU256 *PFNIEMAIMPLMEDIAPSHIFTU256;
1328FNIEMAIMPLMEDIAPSHIFTU64 iemAImpl_psllw_imm_u64, iemAImpl_pslld_imm_u64, iemAImpl_psllq_imm_u64;
1329FNIEMAIMPLMEDIAPSHIFTU64 iemAImpl_psrlw_imm_u64, iemAImpl_psrld_imm_u64, iemAImpl_psrlq_imm_u64;
1330FNIEMAIMPLMEDIAPSHIFTU64 iemAImpl_psraw_imm_u64, iemAImpl_psrad_imm_u64;
1331FNIEMAIMPLMEDIAPSHIFTU128 iemAImpl_psllw_imm_u128, iemAImpl_pslld_imm_u128, iemAImpl_psllq_imm_u128;
1332FNIEMAIMPLMEDIAPSHIFTU128 iemAImpl_psrlw_imm_u128, iemAImpl_psrld_imm_u128, iemAImpl_psrlq_imm_u128;
1333FNIEMAIMPLMEDIAPSHIFTU128 iemAImpl_psraw_imm_u128, iemAImpl_psrad_imm_u128;
1334FNIEMAIMPLMEDIAPSHIFTU128 iemAImpl_pslldq_imm_u128, iemAImpl_psrldq_imm_u128;
1335/** @} */
1336
1337/** @name Media (SSE/MMX/AVX) operation: Move Byte Mask
1338 * @{ */
1339IEM_DECL_IMPL_DEF(void, iemAImpl_maskmovq_u64,(uint64_t *puMem, uint64_t const *puSrc, uint64_t const *puMsk));
1340IEM_DECL_IMPL_DEF(void, iemAImpl_maskmovdqu_u128,(PRTUINT128U puMem, PCRTUINT128U puSrc, PCRTUINT128U puMsk));
1341IEM_DECL_IMPL_DEF(void, iemAImpl_pmovmskb_u64,(uint64_t *pu64Dst, uint64_t const *puSrc));
1342IEM_DECL_IMPL_DEF(void, iemAImpl_pmovmskb_u128,(uint64_t *pu64Dst, PCRTUINT128U puSrc));
1343#ifndef IEM_WITHOUT_ASSEMBLY
1344IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovmskb_u256,(uint64_t *pu64Dst, PCRTUINT256U puSrc));
1345#endif
1346IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovmskb_u256_fallback,(uint64_t *pu64Dst, PCRTUINT256U puSrc));
1347/** @} */
1348
1349/** @name Media (SSE/MMX/AVX) operations: Variable Blend Packed Bytes/R32/R64.
1350 * @{ */
1351typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLBLENDU128,(PRTUINT128U puDst, PCRTUINT128U puSrc, PCRTUINT128U puMask));
1352typedef FNIEMAIMPLBLENDU128 *PFNIEMAIMPLBLENDU128;
1353typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLAVXBLENDU128,(PRTUINT128U puDst, PCRTUINT128U puSrc1, PCRTUINT128U puSrc2, PCRTUINT128U puMask));
1354typedef FNIEMAIMPLAVXBLENDU128 *PFNIEMAIMPLAVXBLENDU128;
1355typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLAVXBLENDU256,(PRTUINT256U puDst, PCRTUINT256U puSrc1, PCRTUINT256U puSrc2, PCRTUINT256U puMask));
1356typedef FNIEMAIMPLAVXBLENDU256 *PFNIEMAIMPLAVXBLENDU256;
1357
1358FNIEMAIMPLBLENDU128 iemAImpl_pblendvb_u128;
1359FNIEMAIMPLBLENDU128 iemAImpl_pblendvb_u128_fallback;
1360FNIEMAIMPLAVXBLENDU128 iemAImpl_vpblendvb_u128;
1361FNIEMAIMPLAVXBLENDU128 iemAImpl_vpblendvb_u128_fallback;
1362FNIEMAIMPLAVXBLENDU256 iemAImpl_vpblendvb_u256;
1363FNIEMAIMPLAVXBLENDU256 iemAImpl_vpblendvb_u256_fallback;
1364
1365FNIEMAIMPLBLENDU128 iemAImpl_blendvps_u128;
1366FNIEMAIMPLBLENDU128 iemAImpl_blendvps_u128_fallback;
1367FNIEMAIMPLAVXBLENDU128 iemAImpl_vblendvps_u128;
1368FNIEMAIMPLAVXBLENDU128 iemAImpl_vblendvps_u128_fallback;
1369FNIEMAIMPLAVXBLENDU256 iemAImpl_vblendvps_u256;
1370FNIEMAIMPLAVXBLENDU256 iemAImpl_vblendvps_u256_fallback;
1371
1372FNIEMAIMPLBLENDU128 iemAImpl_blendvpd_u128;
1373FNIEMAIMPLBLENDU128 iemAImpl_blendvpd_u128_fallback;
1374FNIEMAIMPLAVXBLENDU128 iemAImpl_vblendvpd_u128;
1375FNIEMAIMPLAVXBLENDU128 iemAImpl_vblendvpd_u128_fallback;
1376FNIEMAIMPLAVXBLENDU256 iemAImpl_vblendvpd_u256;
1377FNIEMAIMPLAVXBLENDU256 iemAImpl_vblendvpd_u256_fallback;
1378/** @} */
1379
1380
1381/** @name Media (SSE/MMX/AVX) operation: Sort this later
1382 * @{ */
1383IEM_DECL_IMPL_DEF(void, iemAImpl_pmovsxbw_u128,(PRTUINT128U puDst, uint64_t uSrc));
1384IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxbw_u128,(PRTUINT128U puDst, uint64_t uSrc));
1385IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxbw_u128_fallback,(PRTUINT128U puDst, uint64_t uSrc));
1386IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxbw_u256,(PRTUINT256U puDst, PCRTUINT128U puSrc));
1387IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxbw_u256_fallback,(PRTUINT256U puDst, PCRTUINT128U puSrc));
1388
1389IEM_DECL_IMPL_DEF(void, iemAImpl_pmovsxbd_u128,(PRTUINT128U puDst, uint32_t uSrc));
1390IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxbd_u128,(PRTUINT128U puDst, uint32_t uSrc));
1391IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxbd_u128_fallback,(PRTUINT128U puDst, uint32_t uSrc));
1392IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxbd_u256,(PRTUINT256U puDst, PCRTUINT128U puSrc));
1393IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxbd_u256_fallback,(PRTUINT256U puDst, PCRTUINT128U puSrc));
1394
1395IEM_DECL_IMPL_DEF(void, iemAImpl_pmovsxbq_u128,(PRTUINT128U puDst, uint16_t uSrc));
1396IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxbq_u128,(PRTUINT128U puDst, uint16_t uSrc));
1397IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxbq_u128_fallback,(PRTUINT128U puDst, uint16_t uSrc));
1398IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxbq_u256,(PRTUINT256U puDst, PCRTUINT128U puSrc));
1399IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxbq_u256_fallback,(PRTUINT256U puDst, PCRTUINT128U puSrc));
1400
1401IEM_DECL_IMPL_DEF(void, iemAImpl_pmovsxwd_u128,(PRTUINT128U puDst, uint64_t uSrc));
1402IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxwd_u128,(PRTUINT128U puDst, uint64_t uSrc));
1403IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxwd_u128_fallback,(PRTUINT128U puDst, uint64_t uSrc));
1404IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxwd_u256,(PRTUINT256U puDst, PCRTUINT128U puSrc));
1405IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxwd_u256_fallback,(PRTUINT256U puDst, PCRTUINT128U puSrc));
1406
1407IEM_DECL_IMPL_DEF(void, iemAImpl_pmovsxwq_u128,(PRTUINT128U puDst, uint32_t uSrc));
1408IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxwq_u128,(PRTUINT128U puDst, uint32_t uSrc));
1409IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxwq_u128_fallback,(PRTUINT128U puDst, uint32_t uSrc));
1410IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxwq_u256,(PRTUINT256U puDst, PCRTUINT128U puSrc));
1411IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxwq_u256_fallback,(PRTUINT256U puDst, PCRTUINT128U puSrc));
1412
1413IEM_DECL_IMPL_DEF(void, iemAImpl_pmovsxdq_u128,(PRTUINT128U puDst, uint64_t uSrc));
1414IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxdq_u128,(PRTUINT128U puDst, uint64_t uSrc));
1415IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxdq_u128_fallback,(PRTUINT128U puDst, uint64_t uSrc));
1416IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxdq_u256,(PRTUINT256U puDst, PCRTUINT128U puSrc));
1417IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxdq_u256_fallback,(PRTUINT256U puDst, PCRTUINT128U puSrc));
1418
1419IEM_DECL_IMPL_DEF(void, iemAImpl_pmovzxbw_u128,(PRTUINT128U puDst, uint64_t uSrc));
1420IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxbw_u128,(PRTUINT128U puDst, uint64_t uSrc));
1421IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxbw_u128_fallback,(PRTUINT128U puDst, uint64_t uSrc));
1422IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxbw_u256,(PRTUINT256U puDst, PCRTUINT128U puSrc));
1423IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxbw_u256_fallback,(PRTUINT256U puDst, PCRTUINT128U puSrc));
1424
1425IEM_DECL_IMPL_DEF(void, iemAImpl_pmovzxbd_u128,(PRTUINT128U puDst, uint32_t uSrc));
1426IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxbd_u128,(PRTUINT128U puDst, uint32_t uSrc));
1427IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxbd_u128_fallback,(PRTUINT128U puDst, uint32_t uSrc));
1428IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxbd_u256,(PRTUINT256U puDst, PCRTUINT128U puSrc));
1429IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxbd_u256_fallback,(PRTUINT256U puDst, PCRTUINT128U puSrc));
1430
1431IEM_DECL_IMPL_DEF(void, iemAImpl_pmovzxbq_u128,(PRTUINT128U puDst, uint16_t uSrc));
1432IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxbq_u128,(PRTUINT128U puDst, uint16_t uSrc));
1433IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxbq_u128_fallback,(PRTUINT128U puDst, uint16_t uSrc));
1434IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxbq_u256,(PRTUINT256U puDst, PCRTUINT128U puSrc));
1435IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxbq_u256_fallback,(PRTUINT256U puDst, PCRTUINT128U puSrc));
1436
1437IEM_DECL_IMPL_DEF(void, iemAImpl_pmovzxwd_u128,(PRTUINT128U puDst, uint64_t uSrc));
1438IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxwd_u128,(PRTUINT128U puDst, uint64_t uSrc));
1439IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxwd_u128_fallback,(PRTUINT128U puDst, uint64_t uSrc));
1440IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxwd_u256,(PRTUINT256U puDst, PCRTUINT128U puSrc));
1441IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxwd_u256_fallback,(PRTUINT256U puDst, PCRTUINT128U puSrc));
1442
1443IEM_DECL_IMPL_DEF(void, iemAImpl_pmovzxwq_u128,(PRTUINT128U puDst, uint32_t uSrc));
1444IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxwq_u128,(PRTUINT128U puDst, uint32_t uSrc));
1445IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxwq_u128_fallback,(PRTUINT128U puDst, uint32_t uSrc));
1446IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxwq_u256,(PRTUINT256U puDst, PCRTUINT128U puSrc));
1447IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxwq_u256_fallback,(PRTUINT256U puDst, PCRTUINT128U puSrc));
1448
1449IEM_DECL_IMPL_DEF(void, iemAImpl_pmovzxdq_u128,(PRTUINT128U puDst, uint64_t uSrc));
1450IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxdq_u128,(PRTUINT128U puDst, uint64_t uSrc));
1451IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxdq_u128_fallback,(PRTUINT128U puDst, uint64_t uSrc));
1452IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxdq_u256,(PRTUINT256U puDst, PCRTUINT128U puSrc));
1453IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxdq_u256_fallback,(PRTUINT256U puDst, PCRTUINT128U puSrc));
1454
1455IEM_DECL_IMPL_DEF(void, iemAImpl_shufpd_u128,(PRTUINT128U puDst, PCRTUINT128U puSrc, uint8_t bEvil));
1456IEM_DECL_IMPL_DEF(void, iemAImpl_vshufpd_u128,(PRTUINT128U puDst, PCRTUINT128U puSrc1, PCRTUINT128U puSrc2, uint8_t bEvil));
1457IEM_DECL_IMPL_DEF(void, iemAImpl_vshufpd_u128_fallback,(PRTUINT128U puDst, PCRTUINT128U puSrc1, PCRTUINT128U puSrc2, uint8_t bEvil));
1458IEM_DECL_IMPL_DEF(void, iemAImpl_vshufpd_u256,(PRTUINT256U puDst, PCRTUINT256U puSrc1, PCRTUINT256U puSrc2, uint8_t bEvil));
1459IEM_DECL_IMPL_DEF(void, iemAImpl_vshufpd_u256_fallback,(PRTUINT256U puDst, PCRTUINT256U puSrc1, PCRTUINT256U puSrc2, uint8_t bEvil));
1460
1461IEM_DECL_IMPL_DEF(void, iemAImpl_shufps_u128,(PRTUINT128U puDst, PCRTUINT128U puSrc, uint8_t bEvil));
1462IEM_DECL_IMPL_DEF(void, iemAImpl_vshufps_u128,(PRTUINT128U puDst, PCRTUINT128U puSrc1, PCRTUINT128U puSrc2, uint8_t bEvil));
1463IEM_DECL_IMPL_DEF(void, iemAImpl_vshufps_u128_fallback,(PRTUINT128U puDst, PCRTUINT128U puSrc1, PCRTUINT128U puSrc2, uint8_t bEvil));
1464IEM_DECL_IMPL_DEF(void, iemAImpl_vshufps_u256,(PRTUINT256U puDst, PCRTUINT256U puSrc1, PCRTUINT256U puSrc2, uint8_t bEvil));
1465IEM_DECL_IMPL_DEF(void, iemAImpl_vshufps_u256_fallback,(PRTUINT256U puDst, PCRTUINT256U puSrc1, PCRTUINT256U puSrc2, uint8_t bEvil));
1466
1467IEM_DECL_IMPL_DEF(void, iemAImpl_palignr_u64,(uint64_t *pu64Dst, uint64_t u64Src, uint8_t bEvil));
1468IEM_DECL_IMPL_DEF(void, iemAImpl_palignr_u64_fallback,(uint64_t *pu64Dst, uint64_t u64Src, uint8_t bEvil));
1469
1470IEM_DECL_IMPL_DEF(void, iemAImpl_movmskps_u128,(uint8_t *pu8Dst, PCRTUINT128U puSrc));
1471IEM_DECL_IMPL_DEF(void, iemAImpl_vmovmskps_u128,(uint8_t *pu8Dst, PCRTUINT128U puSrc));
1472IEM_DECL_IMPL_DEF(void, iemAImpl_vmovmskps_u128_fallback,(uint8_t *pu8Dst, PCRTUINT128U puSrc));
1473IEM_DECL_IMPL_DEF(void, iemAImpl_vmovmskps_u256,(uint8_t *pu8Dst, PCRTUINT256U puSrc));
1474IEM_DECL_IMPL_DEF(void, iemAImpl_vmovmskps_u256_fallback,(uint8_t *pu8Dst, PCRTUINT256U puSrc));
1475
1476IEM_DECL_IMPL_DEF(void, iemAImpl_movmskpd_u128,(uint8_t *pu8Dst, PCRTUINT128U puSrc));
1477IEM_DECL_IMPL_DEF(void, iemAImpl_vmovmskpd_u128,(uint8_t *pu8Dst, PCRTUINT128U puSrc));
1478IEM_DECL_IMPL_DEF(void, iemAImpl_vmovmskpd_u128_fallback,(uint8_t *pu8Dst, PCRTUINT128U puSrc));
1479IEM_DECL_IMPL_DEF(void, iemAImpl_vmovmskpd_u256,(uint8_t *pu8Dst, PCRTUINT256U puSrc));
1480IEM_DECL_IMPL_DEF(void, iemAImpl_vmovmskpd_u256_fallback,(uint8_t *pu8Dst, PCRTUINT256U puSrc));
1481
1482
1483typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAOPTF2U128IMM8,(PRTUINT128U puDst, PCRTUINT128U puSrc, uint8_t bEvil));
1484typedef FNIEMAIMPLMEDIAOPTF2U128IMM8 *PFNIEMAIMPLMEDIAOPTF2U128IMM8;
1485typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAOPTF2U256IMM8,(PRTUINT256U puDst, PCRTUINT256U puSrc, uint8_t bEvil));
1486typedef FNIEMAIMPLMEDIAOPTF2U256IMM8 *PFNIEMAIMPLMEDIAOPTF2U256IMM8;
1487typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAOPTF3U128IMM8,(PRTUINT128U puDst, PCRTUINT128U puSrc1, PCRTUINT128U puSrc2, uint8_t bEvil));
1488typedef FNIEMAIMPLMEDIAOPTF3U128IMM8 *PFNIEMAIMPLMEDIAOPTF3U128IMM8;
1489typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAOPTF3U256IMM8,(PRTUINT256U puDst, PCRTUINT256U puSrc1, PCRTUINT256U puSrc2, uint8_t bEvil));
1490typedef FNIEMAIMPLMEDIAOPTF3U256IMM8 *PFNIEMAIMPLMEDIAOPTF3U256IMM8;
1491
1492FNIEMAIMPLMEDIAOPTF2U128IMM8 iemAImpl_palignr_u128, iemAImpl_palignr_u128_fallback;
1493FNIEMAIMPLMEDIAOPTF2U128IMM8 iemAImpl_pblendw_u128, iemAImpl_pblendw_u128_fallback;
1494FNIEMAIMPLMEDIAOPTF2U128IMM8 iemAImpl_blendps_u128, iemAImpl_blendps_u128_fallback;
1495FNIEMAIMPLMEDIAOPTF2U128IMM8 iemAImpl_blendpd_u128, iemAImpl_blendpd_u128_fallback;
1496
1497FNIEMAIMPLMEDIAOPTF3U128IMM8 iemAImpl_vpalignr_u128, iemAImpl_vpalignr_u128_fallback;
1498FNIEMAIMPLMEDIAOPTF3U128IMM8 iemAImpl_vpblendw_u128, iemAImpl_vpblendw_u128_fallback;
1499FNIEMAIMPLMEDIAOPTF3U128IMM8 iemAImpl_vpblendd_u128, iemAImpl_vpblendd_u128_fallback;
1500FNIEMAIMPLMEDIAOPTF3U128IMM8 iemAImpl_vblendps_u128, iemAImpl_vblendps_u128_fallback;
1501FNIEMAIMPLMEDIAOPTF3U128IMM8 iemAImpl_vblendpd_u128, iemAImpl_vblendpd_u128_fallback;
1502
1503FNIEMAIMPLMEDIAOPTF3U256IMM8 iemAImpl_vpalignr_u256, iemAImpl_vpalignr_u256_fallback;
1504FNIEMAIMPLMEDIAOPTF3U256IMM8 iemAImpl_vpblendw_u256, iemAImpl_vpblendw_u256_fallback;
1505FNIEMAIMPLMEDIAOPTF3U256IMM8 iemAImpl_vpblendd_u256, iemAImpl_vpblendd_u256_fallback;
1506FNIEMAIMPLMEDIAOPTF3U256IMM8 iemAImpl_vblendps_u256, iemAImpl_vblendps_u256_fallback;
1507FNIEMAIMPLMEDIAOPTF3U256IMM8 iemAImpl_vblendpd_u256, iemAImpl_vblendpd_u256_fallback;
1508FNIEMAIMPLMEDIAOPTF3U256IMM8 iemAImpl_vperm2i128_u256, iemAImpl_vperm2i128_u256_fallback;
1509FNIEMAIMPLMEDIAOPTF3U256IMM8 iemAImpl_vperm2f128_u256, iemAImpl_vperm2f128_u256_fallback;
1510
1511FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_aesimc_u128, iemAImpl_aesimc_u128_fallback;
1512FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_aesenc_u128, iemAImpl_aesenc_u128_fallback;
1513FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_aesenclast_u128, iemAImpl_aesenclast_u128_fallback;
1514FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_aesdec_u128, iemAImpl_aesdec_u128_fallback;
1515FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_aesdeclast_u128, iemAImpl_aesdeclast_u128_fallback;
1516
1517FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_vaesimc_u128, iemAImpl_vaesimc_u128_fallback;
1518FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vaesenc_u128, iemAImpl_vaesenc_u128_fallback;
1519FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vaesenclast_u128, iemAImpl_vaesenclast_u128_fallback;
1520FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vaesdec_u128, iemAImpl_vaesdec_u128_fallback;
1521FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vaesdeclast_u128, iemAImpl_vaesdeclast_u128_fallback;
1522
1523FNIEMAIMPLMEDIAOPTF2U128IMM8 iemAImpl_aeskeygenassist_u128, iemAImpl_aeskeygenassist_u128_fallback;
1524
1525FNIEMAIMPLMEDIAOPTF2U128IMM8 iemAImpl_vaeskeygenassist_u128, iemAImpl_vaeskeygenassist_u128_fallback;
1526
1527FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_sha1nexte_u128, iemAImpl_sha1nexte_u128_fallback;
1528FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_sha1msg1_u128, iemAImpl_sha1msg1_u128_fallback;
1529FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_sha1msg2_u128, iemAImpl_sha1msg2_u128_fallback;
1530FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_sha256msg1_u128, iemAImpl_sha256msg1_u128_fallback;
1531FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_sha256msg2_u128, iemAImpl_sha256msg2_u128_fallback;
1532FNIEMAIMPLMEDIAOPTF2U128IMM8 iemAImpl_sha1rnds4_u128, iemAImpl_sha1rnds4_u128_fallback;
1533IEM_DECL_IMPL_DEF(void, iemAImpl_sha256rnds2_u128,(PRTUINT128U puDst, PCRTUINT128U puSrc, PCRTUINT128U puXmm0Constants));
1534IEM_DECL_IMPL_DEF(void, iemAImpl_sha256rnds2_u128_fallback,(PRTUINT128U puDst, PCRTUINT128U puSrc, PCRTUINT128U puXmm0Constants));
1535
1536FNIEMAIMPLMEDIAOPTF2U256IMM8 iemAImpl_vpermq_u256, iemAImpl_vpermq_u256_fallback;
1537FNIEMAIMPLMEDIAOPTF2U256IMM8 iemAImpl_vpermpd_u256, iemAImpl_vpermpd_u256_fallback;
1538
1539typedef struct IEMPCMPISTRXSRC
1540{
1541 RTUINT128U uSrc1;
1542 RTUINT128U uSrc2;
1543} IEMPCMPISTRXSRC;
1544typedef IEMPCMPISTRXSRC *PIEMPCMPISTRXSRC;
1545typedef const IEMPCMPISTRXSRC *PCIEMPCMPISTRXSRC;
1546
1547typedef struct IEMPCMPESTRXSRC
1548{
1549 RTUINT128U uSrc1;
1550 RTUINT128U uSrc2;
1551 uint64_t u64Rax;
1552 uint64_t u64Rdx;
1553} IEMPCMPESTRXSRC;
1554typedef IEMPCMPESTRXSRC *PIEMPCMPESTRXSRC;
1555typedef const IEMPCMPESTRXSRC *PCIEMPCMPESTRXSRC;
1556
1557typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLPCMPISTRIU128IMM8,(uint32_t *pEFlags, PCRTUINT128U pSrc1, PCRTUINT128U pSrc2, uint8_t bEvil));
1558typedef FNIEMAIMPLPCMPISTRIU128IMM8 *PFNIEMAIMPLPCMPISTRIU128IMM8;
1559typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLPCMPESTRIU128IMM8,(uint32_t *pu32Ecx, uint32_t *pEFlags, PCIEMPCMPESTRXSRC pSrc, uint8_t bEvil));
1560typedef FNIEMAIMPLPCMPESTRIU128IMM8 *PFNIEMAIMPLPCMPESTRIU128IMM8;
1561
1562typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLPCMPISTRMU128IMM8,(PRTUINT128U puDst, uint32_t *pEFlags, PCIEMPCMPISTRXSRC pSrc, uint8_t bEvil));
1563typedef FNIEMAIMPLPCMPISTRMU128IMM8 *PFNIEMAIMPLPCMPISTRMU128IMM8;
1564typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLPCMPESTRMU128IMM8,(PRTUINT128U puDst, uint32_t *pEFlags, PCIEMPCMPESTRXSRC pSrc, uint8_t bEvil));
1565typedef FNIEMAIMPLPCMPESTRMU128IMM8 *PFNIEMAIMPLPCMPESTRMU128IMM8;
1566
1567FNIEMAIMPLPCMPISTRIU128IMM8 iemAImpl_pcmpistri_u128, iemAImpl_pcmpistri_u128_fallback;
1568FNIEMAIMPLPCMPESTRIU128IMM8 iemAImpl_pcmpestri_u128, iemAImpl_pcmpestri_u128_fallback;
1569FNIEMAIMPLPCMPISTRMU128IMM8 iemAImpl_pcmpistrm_u128, iemAImpl_pcmpistrm_u128_fallback;
1570FNIEMAIMPLPCMPESTRMU128IMM8 iemAImpl_pcmpestrm_u128, iemAImpl_pcmpestrm_u128_fallback;
1571FNIEMAIMPLPCMPISTRIU128IMM8 iemAImpl_vpcmpistri_u128, iemAImpl_vpcmpistri_u128_fallback;
1572FNIEMAIMPLPCMPESTRIU128IMM8 iemAImpl_vpcmpestri_u128, iemAImpl_vpcmpestri_u128_fallback;
1573FNIEMAIMPLPCMPISTRMU128IMM8 iemAImpl_vpcmpistrm_u128, iemAImpl_vpcmpistrm_u128_fallback;
1574FNIEMAIMPLPCMPESTRMU128IMM8 iemAImpl_vpcmpestrm_u128, iemAImpl_vpcmpestrm_u128_fallback;
1575
1576
1577FNIEMAIMPLMEDIAOPTF2U128IMM8 iemAImpl_pclmulqdq_u128, iemAImpl_pclmulqdq_u128_fallback;
1578FNIEMAIMPLMEDIAOPTF3U128IMM8 iemAImpl_vpclmulqdq_u128, iemAImpl_vpclmulqdq_u128_fallback;
1579
1580FNIEMAIMPLMEDIAOPTF2U128IMM8 iemAImpl_mpsadbw_u128, iemAImpl_mpsadbw_u128_fallback;
1581FNIEMAIMPLMEDIAOPTF3U128IMM8 iemAImpl_vmpsadbw_u128, iemAImpl_vmpsadbw_u128_fallback;
1582FNIEMAIMPLMEDIAOPTF3U256IMM8 iemAImpl_vmpsadbw_u256, iemAImpl_vmpsadbw_u256_fallback;
1583
1584FNIEMAIMPLMEDIAPSHUFU128 iemAImpl_vpsllw_imm_u128, iemAImpl_vpsllw_imm_u128_fallback;
1585FNIEMAIMPLMEDIAPSHUFU256 iemAImpl_vpsllw_imm_u256, iemAImpl_vpsllw_imm_u256_fallback;
1586FNIEMAIMPLMEDIAPSHUFU128 iemAImpl_vpslld_imm_u128, iemAImpl_vpslld_imm_u128_fallback;
1587FNIEMAIMPLMEDIAPSHUFU256 iemAImpl_vpslld_imm_u256, iemAImpl_vpslld_imm_u256_fallback;
1588FNIEMAIMPLMEDIAPSHUFU128 iemAImpl_vpsllq_imm_u128, iemAImpl_vpsllq_imm_u128_fallback;
1589FNIEMAIMPLMEDIAPSHUFU256 iemAImpl_vpsllq_imm_u256, iemAImpl_vpsllq_imm_u256_fallback;
1590IEM_DECL_IMPL_DEF(void, iemAImpl_vpslldq_imm_u128,(PRTUINT128U puDst, PCRTUINT128U puSrc, uint8_t uShift));
1591IEM_DECL_IMPL_DEF(void, iemAImpl_vpslldq_imm_u128_fallback,(PRTUINT128U puDst, PCRTUINT128U puSrc, uint8_t uShift));
1592IEM_DECL_IMPL_DEF(void, iemAImpl_vpslldq_imm_u256,(PRTUINT256U puDst, PCRTUINT256U puSrc, uint8_t uShift));
1593IEM_DECL_IMPL_DEF(void, iemAImpl_vpslldq_imm_u256_fallback,(PRTUINT256U puDst, PCRTUINT256U puSrc, uint8_t uShift));
1594
1595FNIEMAIMPLMEDIAPSHUFU128 iemAImpl_vpsraw_imm_u128, iemAImpl_vpsraw_imm_u128_fallback;
1596FNIEMAIMPLMEDIAPSHUFU256 iemAImpl_vpsraw_imm_u256, iemAImpl_vpsraw_imm_u256_fallback;
1597FNIEMAIMPLMEDIAPSHUFU128 iemAImpl_vpsrad_imm_u128, iemAImpl_vpsrad_imm_u128_fallback;
1598FNIEMAIMPLMEDIAPSHUFU256 iemAImpl_vpsrad_imm_u256, iemAImpl_vpsrad_imm_u256_fallback;
1599
1600FNIEMAIMPLMEDIAPSHUFU128 iemAImpl_vpsrlw_imm_u128, iemAImpl_vpsrlw_imm_u128_fallback;
1601FNIEMAIMPLMEDIAPSHUFU256 iemAImpl_vpsrlw_imm_u256, iemAImpl_vpsrlw_imm_u256_fallback;
1602FNIEMAIMPLMEDIAPSHUFU128 iemAImpl_vpsrld_imm_u128, iemAImpl_vpsrld_imm_u128_fallback;
1603FNIEMAIMPLMEDIAPSHUFU256 iemAImpl_vpsrld_imm_u256, iemAImpl_vpsrld_imm_u256_fallback;
1604FNIEMAIMPLMEDIAPSHUFU128 iemAImpl_vpsrlq_imm_u128, iemAImpl_vpsrlq_imm_u128_fallback;
1605FNIEMAIMPLMEDIAPSHUFU256 iemAImpl_vpsrlq_imm_u256, iemAImpl_vpsrlq_imm_u256_fallback;
1606IEM_DECL_IMPL_DEF(void, iemAImpl_vpsrldq_imm_u128,(PRTUINT128U puDst, PCRTUINT128U puSrc, uint8_t uShift));
1607IEM_DECL_IMPL_DEF(void, iemAImpl_vpsrldq_imm_u128_fallback,(PRTUINT128U puDst, PCRTUINT128U puSrc, uint8_t uShift));
1608IEM_DECL_IMPL_DEF(void, iemAImpl_vpsrldq_imm_u256,(PRTUINT256U puDst, PCRTUINT256U puSrc, uint8_t uShift));
1609IEM_DECL_IMPL_DEF(void, iemAImpl_vpsrldq_imm_u256_fallback,(PRTUINT256U puDst, PCRTUINT256U puSrc, uint8_t uShift));
1610
1611FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpermilps_u128, iemAImpl_vpermilps_u128_fallback;
1612FNIEMAIMPLMEDIAOPTF2U128IMM8 iemAImpl_vpermilps_imm_u128, iemAImpl_vpermilps_imm_u128_fallback;
1613FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpermilps_u256, iemAImpl_vpermilps_u256_fallback;
1614FNIEMAIMPLMEDIAOPTF2U256IMM8 iemAImpl_vpermilps_imm_u256, iemAImpl_vpermilps_imm_u256_fallback;
1615
1616FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpermilpd_u128, iemAImpl_vpermilpd_u128_fallback;
1617FNIEMAIMPLMEDIAOPTF2U128IMM8 iemAImpl_vpermilpd_imm_u128, iemAImpl_vpermilpd_imm_u128_fallback;
1618FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpermilpd_u256, iemAImpl_vpermilpd_u256_fallback;
1619FNIEMAIMPLMEDIAOPTF2U256IMM8 iemAImpl_vpermilpd_imm_u256, iemAImpl_vpermilpd_imm_u256_fallback;
1620
1621FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsllvd_u128, iemAImpl_vpsllvd_u128_fallback;
1622FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsllvd_u256, iemAImpl_vpsllvd_u256_fallback;
1623FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsllvq_u128, iemAImpl_vpsllvq_u128_fallback;
1624FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsllvq_u256, iemAImpl_vpsllvq_u256_fallback;
1625FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsravd_u128, iemAImpl_vpsravd_u128_fallback;
1626FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsravd_u256, iemAImpl_vpsravd_u256_fallback;
1627FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsrlvd_u128, iemAImpl_vpsrlvd_u128_fallback;
1628FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsrlvd_u256, iemAImpl_vpsrlvd_u256_fallback;
1629FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsrlvq_u128, iemAImpl_vpsrlvq_u128_fallback;
1630FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsrlvq_u256, iemAImpl_vpsrlvq_u256_fallback;
1631/** @} */
1632
1633/** @name Media Odds and Ends
1634 * @{ */
1635typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLCR32U8,(uint32_t *puDst, uint8_t uSrc));
1636typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLCR32U16,(uint32_t *puDst, uint16_t uSrc));
1637typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLCR32U32,(uint32_t *puDst, uint32_t uSrc));
1638typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLCR32U64,(uint32_t *puDst, uint64_t uSrc));
1639FNIEMAIMPLCR32U8 iemAImpl_crc32_u8, iemAImpl_crc32_u8_fallback;
1640FNIEMAIMPLCR32U16 iemAImpl_crc32_u16, iemAImpl_crc32_u16_fallback;
1641FNIEMAIMPLCR32U32 iemAImpl_crc32_u32, iemAImpl_crc32_u32_fallback;
1642FNIEMAIMPLCR32U64 iemAImpl_crc32_u64, iemAImpl_crc32_u64_fallback;
1643
1644typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLF2EFL128,(PCRTUINT128U puSrc1, PCRTUINT128U puSrc2, uint32_t *pEFlags));
1645typedef FNIEMAIMPLF2EFL128 *PFNIEMAIMPLF2EFL128;
1646typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLF2EFL256,(PCRTUINT256U puSrc1, PCRTUINT256U puSrc2, uint32_t *pEFlags));
1647typedef FNIEMAIMPLF2EFL256 *PFNIEMAIMPLF2EFL256;
1648FNIEMAIMPLF2EFL128 iemAImpl_ptest_u128;
1649FNIEMAIMPLF2EFL256 iemAImpl_vptest_u256, iemAImpl_vptest_u256_fallback;
1650FNIEMAIMPLF2EFL128 iemAImpl_vtestps_u128, iemAImpl_vtestps_u128_fallback;
1651FNIEMAIMPLF2EFL256 iemAImpl_vtestps_u256, iemAImpl_vtestps_u256_fallback;
1652FNIEMAIMPLF2EFL128 iemAImpl_vtestpd_u128, iemAImpl_vtestpd_u128_fallback;
1653FNIEMAIMPLF2EFL256 iemAImpl_vtestpd_u256, iemAImpl_vtestpd_u256_fallback;
1654
1655typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSSEF2I32U64,(uint32_t uMxCsrIn, int32_t *pi32Dst, const uint64_t *pu64Src)); /* pu64Src is a double precision floating point. */
1656typedef FNIEMAIMPLSSEF2I32U64 *PFNIEMAIMPLSSEF2I32U64;
1657typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSSEF2I64U64,(uint32_t uMxCsrIn, int64_t *pi64Dst, const uint64_t *pu64Src)); /* pu64Src is a double precision floating point. */
1658typedef FNIEMAIMPLSSEF2I64U64 *PFNIEMAIMPLSSEF2I64U64;
1659typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSSEF2I32U32,(uint32_t uMxCsrIn, int32_t *pi32Dst, const uint32_t *pu32Src)); /* pu32Src is a single precision floating point. */
1660typedef FNIEMAIMPLSSEF2I32U32 *PFNIEMAIMPLSSEF2I32U32;
1661typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSSEF2I64U32,(uint32_t uMxCsrIn, int64_t *pi64Dst, const uint32_t *pu32Src)); /* pu32Src is a single precision floating point. */
1662typedef FNIEMAIMPLSSEF2I64U32 *PFNIEMAIMPLSSEF2I64U32;
1663typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSSEF2I32R32,(uint32_t uMxCsrIn, int32_t *pi32Dst, PCRTFLOAT32U pr32Src));
1664typedef FNIEMAIMPLSSEF2I32R32 *PFNIEMAIMPLSSEF2I32R32;
1665typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSSEF2I64R32,(uint32_t uMxCsrIn, int64_t *pi64Dst, PCRTFLOAT32U pr32Src));
1666typedef FNIEMAIMPLSSEF2I64R32 *PFNIEMAIMPLSSEF2I64R32;
1667typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSSEF2I32R64,(uint32_t uMxCsrIn, int32_t *pi32Dst, PCRTFLOAT64U pr64Src));
1668typedef FNIEMAIMPLSSEF2I32R64 *PFNIEMAIMPLSSEF2I32R64;
1669typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSSEF2I64R64,(uint32_t uMxCsrIn, int64_t *pi64Dst, PCRTFLOAT64U pr64Src));
1670typedef FNIEMAIMPLSSEF2I64R64 *PFNIEMAIMPLSSEF2I64R64;
1671
1672FNIEMAIMPLSSEF2I32U64 iemAImpl_cvttsd2si_i32_r64;
1673FNIEMAIMPLSSEF2I32U64 iemAImpl_cvtsd2si_i32_r64;
1674
1675FNIEMAIMPLSSEF2I64U64 iemAImpl_cvttsd2si_i64_r64;
1676FNIEMAIMPLSSEF2I64U64 iemAImpl_cvtsd2si_i64_r64;
1677
1678FNIEMAIMPLSSEF2I32U32 iemAImpl_cvttss2si_i32_r32;
1679FNIEMAIMPLSSEF2I32U32 iemAImpl_cvtss2si_i32_r32;
1680
1681FNIEMAIMPLSSEF2I64U32 iemAImpl_cvttss2si_i64_r32;
1682FNIEMAIMPLSSEF2I64U32 iemAImpl_cvtss2si_i64_r32;
1683
1684FNIEMAIMPLSSEF2I32R32 iemAImpl_vcvttss2si_i32_r32, iemAImpl_vcvttss2si_i32_r32_fallback;
1685FNIEMAIMPLSSEF2I64R32 iemAImpl_vcvttss2si_i64_r32, iemAImpl_vcvttss2si_i64_r32_fallback;
1686FNIEMAIMPLSSEF2I32R32 iemAImpl_vcvtss2si_i32_r32, iemAImpl_vcvtss2si_i32_r32_fallback;
1687FNIEMAIMPLSSEF2I64R32 iemAImpl_vcvtss2si_i64_r32, iemAImpl_vcvtss2si_i64_r32_fallback;
1688
1689FNIEMAIMPLSSEF2I32R64 iemAImpl_vcvttss2si_i32_r64, iemAImpl_vcvttss2si_i32_r64_fallback;
1690FNIEMAIMPLSSEF2I64R64 iemAImpl_vcvttss2si_i64_r64, iemAImpl_vcvttss2si_i64_r64_fallback;
1691FNIEMAIMPLSSEF2I32R64 iemAImpl_vcvtss2si_i32_r64, iemAImpl_vcvtss2si_i32_r64_fallback;
1692FNIEMAIMPLSSEF2I64R64 iemAImpl_vcvtss2si_i64_r64, iemAImpl_vcvtss2si_i64_r64_fallback;
1693
1694FNIEMAIMPLSSEF2I32R32 iemAImpl_vcvttsd2si_i32_r32, iemAImpl_vcvttsd2si_i32_r32_fallback;
1695FNIEMAIMPLSSEF2I64R32 iemAImpl_vcvttsd2si_i64_r32, iemAImpl_vcvttsd2si_i64_r32_fallback;
1696FNIEMAIMPLSSEF2I32R32 iemAImpl_vcvtsd2si_i32_r32, iemAImpl_vcvtsd2si_i32_r32_fallback;
1697FNIEMAIMPLSSEF2I64R32 iemAImpl_vcvtsd2si_i64_r32, iemAImpl_vcvtsd2si_i64_r32_fallback;
1698
1699FNIEMAIMPLSSEF2I32R64 iemAImpl_vcvttsd2si_i32_r64, iemAImpl_vcvttsd2si_i32_r64_fallback;
1700FNIEMAIMPLSSEF2I64R64 iemAImpl_vcvttsd2si_i64_r64, iemAImpl_vcvttsd2si_i64_r64_fallback;
1701FNIEMAIMPLSSEF2I32R64 iemAImpl_vcvtsd2si_i32_r64, iemAImpl_vcvtsd2si_i32_r64_fallback;
1702FNIEMAIMPLSSEF2I64R64 iemAImpl_vcvtsd2si_i64_r64, iemAImpl_vcvtsd2si_i64_r64_fallback;
1703
1704typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSSEF2R32I32,(uint32_t uMxCsrIn, PRTFLOAT32U pr32Dst, const int32_t *pi32Src));
1705typedef FNIEMAIMPLSSEF2R32I32 *PFNIEMAIMPLSSEF2R32I32;
1706typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSSEF2R32I64,(uint32_t uMxCsrIn, PRTFLOAT32U pr32Dst, const int64_t *pi64Src));
1707typedef FNIEMAIMPLSSEF2R32I64 *PFNIEMAIMPLSSEF2R32I64;
1708
1709FNIEMAIMPLSSEF2R32I32 iemAImpl_cvtsi2ss_r32_i32;
1710FNIEMAIMPLSSEF2R32I64 iemAImpl_cvtsi2ss_r32_i64;
1711
1712typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLAVXF3XMMI32,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86XMMREG puSrc, const int32_t *pi32Src));
1713typedef FNIEMAIMPLAVXF3XMMI32 *PFNIEMAIMPLAVXF3XMMI32;
1714typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLAVXF3XMMI64,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86XMMREG puSrc, const int64_t *pi64Src));
1715typedef FNIEMAIMPLAVXF3XMMI64 *PFNIEMAIMPLAVXF3XMMI64;
1716
1717FNIEMAIMPLAVXF3XMMI32 iemAImpl_vcvtsi2ss_u128_i32, iemAImpl_vcvtsi2ss_u128_i32_fallback;
1718FNIEMAIMPLAVXF3XMMI64 iemAImpl_vcvtsi2ss_u128_i64, iemAImpl_vcvtsi2ss_u128_i64_fallback;
1719
1720
1721typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSSEF2R64I32,(uint32_t uMxCsrIn, PRTFLOAT64U pr64Dst, const int32_t *pi32Src));
1722typedef FNIEMAIMPLSSEF2R64I32 *PFNIEMAIMPLSSEF2R64I32;
1723typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSSEF2R64I64,(uint32_t uMxCsrIn, PRTFLOAT64U pr64Dst, const int64_t *pi64Src));
1724typedef FNIEMAIMPLSSEF2R64I64 *PFNIEMAIMPLSSEF2R64I64;
1725
1726FNIEMAIMPLSSEF2R64I32 iemAImpl_cvtsi2sd_r64_i32;
1727FNIEMAIMPLSSEF2R64I64 iemAImpl_cvtsi2sd_r64_i64;
1728
1729FNIEMAIMPLAVXF3XMMI32 iemAImpl_vcvtsi2sd_u128_i32, iemAImpl_vcvtsi2sd_u128_i32_fallback;
1730FNIEMAIMPLAVXF3XMMI64 iemAImpl_vcvtsi2sd_u128_i64, iemAImpl_vcvtsi2sd_u128_i64_fallback;
1731
1732IEM_DECL_IMPL_DEF(uint32_t, iemAImpl_vcvtps2pd_u128_u64,(uint32_t uMxCsrIn, PX86XMMREG puDst, const uint64_t *pu64Src)); /* Actually two single precision floating point values. */
1733IEM_DECL_IMPL_DEF(uint32_t, iemAImpl_vcvtps2pd_u128_u64_fallback,(uint32_t uMxCsrIn, PX86XMMREG puDst, const uint64_t *pu64Src)); /* Actually two single precision floating point values. */
1734IEM_DECL_IMPL_DEF(uint32_t, iemAImpl_vcvtps2pd_u256_u128,(uint32_t uMxCsrIn, PX86YMMREG puDst, PCX86XMMREG puSrc));
1735IEM_DECL_IMPL_DEF(uint32_t, iemAImpl_vcvtps2pd_u256_u128_fallback,(uint32_t uMxCsrIn, PX86YMMREG puDst, PCX86XMMREG puSrc));
1736
1737
1738IEM_DECL_IMPL_DEF(uint32_t, iemAImpl_vcvtdq2pd_u128_u64,(uint32_t uMxCsrIn, PX86XMMREG puDst, const uint64_t *pu64Src)); /* Actually two single precision floating point values. */
1739IEM_DECL_IMPL_DEF(uint32_t, iemAImpl_vcvtdq2pd_u128_u64_fallback,(uint32_t uMxCsrIn, PX86XMMREG puDst, const uint64_t *pu64Src)); /* Actually two single precision floating point values. */
1740IEM_DECL_IMPL_DEF(uint32_t, iemAImpl_vcvtdq2pd_u256_u128,(uint32_t uMxCsrIn, PX86YMMREG puDst, PCX86XMMREG puSrc));
1741IEM_DECL_IMPL_DEF(uint32_t, iemAImpl_vcvtdq2pd_u256_u128_fallback,(uint32_t uMxCsrIn, PX86YMMREG puDst, PCX86XMMREG puSrc));
1742
1743
1744typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLF2EFLMXCSRR32R32,(uint32_t uMxCsrIn, uint32_t *pfEFlags, RTFLOAT32U uSrc1, RTFLOAT32U uSrc2));
1745typedef FNIEMAIMPLF2EFLMXCSRR32R32 *PFNIEMAIMPLF2EFLMXCSRR32R32;
1746
1747typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLF2EFLMXCSRR64R64,(uint32_t uMxCsrIn, uint32_t *pfEFlags, RTFLOAT64U uSrc1, RTFLOAT64U uSrc2));
1748typedef FNIEMAIMPLF2EFLMXCSRR64R64 *PFNIEMAIMPLF2EFLMXCSRR64R64;
1749
1750FNIEMAIMPLF2EFLMXCSRR32R32 iemAImpl_ucomiss_u128;
1751FNIEMAIMPLF2EFLMXCSRR32R32 iemAImpl_vucomiss_u128, iemAImpl_vucomiss_u128_fallback;
1752
1753FNIEMAIMPLF2EFLMXCSRR64R64 iemAImpl_ucomisd_u128;
1754FNIEMAIMPLF2EFLMXCSRR64R64 iemAImpl_vucomisd_u128, iemAImpl_vucomisd_u128_fallback;
1755
1756FNIEMAIMPLF2EFLMXCSRR32R32 iemAImpl_comiss_u128;
1757FNIEMAIMPLF2EFLMXCSRR32R32 iemAImpl_vcomiss_u128, iemAImpl_vcomiss_u128_fallback;
1758
1759FNIEMAIMPLF2EFLMXCSRR64R64 iemAImpl_comisd_u128;
1760FNIEMAIMPLF2EFLMXCSRR64R64 iemAImpl_vcomisd_u128, iemAImpl_vcomisd_u128_fallback;
1761
1762
1763typedef struct IEMMEDIAF2XMMSRC
1764{
1765 X86XMMREG uSrc1;
1766 X86XMMREG uSrc2;
1767} IEMMEDIAF2XMMSRC;
1768typedef IEMMEDIAF2XMMSRC *PIEMMEDIAF2XMMSRC;
1769typedef const IEMMEDIAF2XMMSRC *PCIEMMEDIAF2XMMSRC;
1770
1771
1772typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMEDIAF3XMMIMM8,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCIEMMEDIAF2XMMSRC puSrc, uint8_t bEvil));
1773typedef FNIEMAIMPLMEDIAF3XMMIMM8 *PFNIEMAIMPLMEDIAF3XMMIMM8;
1774
1775
1776typedef struct IEMMEDIAF2YMMSRC
1777{
1778 X86YMMREG uSrc1;
1779 X86YMMREG uSrc2;
1780} IEMMEDIAF2YMMSRC;
1781typedef IEMMEDIAF2YMMSRC *PIEMMEDIAF2YMMSRC;
1782typedef const IEMMEDIAF2YMMSRC *PCIEMMEDIAF2YMMSRC;
1783
1784
1785typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMEDIAF3YMMIMM8,(uint32_t uMxCsrIn, PX86YMMREG puDst, PCIEMMEDIAF2YMMSRC puSrc, uint8_t bEvil));
1786typedef FNIEMAIMPLMEDIAF3YMMIMM8 *PFNIEMAIMPLMEDIAF3YMMIMM8;
1787
1788
1789FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_cmpps_u128;
1790FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_cmppd_u128;
1791FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_cmpss_u128;
1792FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_cmpsd_u128;
1793
1794FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_vcmpps_u128, iemAImpl_vcmpps_u128_fallback;
1795FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_vcmppd_u128, iemAImpl_vcmppd_u128_fallback;
1796FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_vcmpss_u128, iemAImpl_vcmpss_u128_fallback;
1797FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_vcmpsd_u128, iemAImpl_vcmpsd_u128_fallback;
1798
1799FNIEMAIMPLMEDIAF3YMMIMM8 iemAImpl_vcmpps_u256, iemAImpl_vcmpps_u256_fallback;
1800FNIEMAIMPLMEDIAF3YMMIMM8 iemAImpl_vcmppd_u256, iemAImpl_vcmppd_u256_fallback;
1801
1802FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_roundss_u128;
1803FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_roundsd_u128;
1804
1805FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_dpps_u128, iemAImpl_dpps_u128_fallback;
1806FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_dppd_u128, iemAImpl_dppd_u128_fallback;
1807
1808
1809typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMEDIAF2U128IMM8,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86XMMREG puSrc, uint8_t bEvil));
1810typedef FNIEMAIMPLMEDIAF2U128IMM8 *PFNIEMAIMPLMEDIAF2U128IMM8;
1811
1812
1813typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMEDIAF2U256IMM8,(uint32_t uMxCsrIn, PX86YMMREG puDst, PCX86YMMREG puSrc, uint8_t bEvil));
1814typedef FNIEMAIMPLMEDIAF2U256IMM8 *PFNIEMAIMPLMEDIAF2U256IMM8;
1815
1816
1817FNIEMAIMPLMEDIAF2U128IMM8 iemAImpl_roundps_u128, iemAImpl_roundps_u128_fallback;
1818FNIEMAIMPLMEDIAF2U128IMM8 iemAImpl_roundpd_u128, iemAImpl_roundpd_u128_fallback;
1819
1820FNIEMAIMPLMEDIAF2U128IMM8 iemAImpl_vroundps_u128, iemAImpl_vroundps_u128_fallback;
1821FNIEMAIMPLMEDIAF2U128IMM8 iemAImpl_vroundpd_u128, iemAImpl_vroundpd_u128_fallback;
1822
1823FNIEMAIMPLMEDIAF2U256IMM8 iemAImpl_vroundps_u256, iemAImpl_vroundps_u256_fallback;
1824FNIEMAIMPLMEDIAF2U256IMM8 iemAImpl_vroundpd_u256, iemAImpl_vroundpd_u256_fallback;
1825
1826FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_vroundss_u128, iemAImpl_vroundss_u128_fallback;
1827FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_vroundsd_u128, iemAImpl_vroundsd_u128_fallback;
1828
1829FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_vdpps_u128, iemAImpl_vdpps_u128_fallback;
1830FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_vdppd_u128, iemAImpl_vdppd_u128_fallback;
1831
1832FNIEMAIMPLMEDIAF3YMMIMM8 iemAImpl_vdpps_u256, iemAImpl_vdpps_u256_fallback;
1833
1834
1835typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMXCSRU64U128,(uint32_t fMxCsrIn, uint64_t *pu64Dst, PCX86XMMREG pSrc));
1836typedef FNIEMAIMPLMXCSRU64U128 *PFNIEMAIMPLMXCSRU64U128;
1837
1838FNIEMAIMPLMXCSRU64U128 iemAImpl_cvtpd2pi_u128;
1839FNIEMAIMPLMXCSRU64U128 iemAImpl_cvttpd2pi_u128;
1840
1841typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMXCSRU128U64,(uint32_t fMxCsrIn, PX86XMMREG pDst, uint64_t u64Src));
1842typedef FNIEMAIMPLMXCSRU128U64 *PFNIEMAIMPLMXCSRU128U64;
1843
1844FNIEMAIMPLMXCSRU128U64 iemAImpl_cvtpi2ps_u128;
1845FNIEMAIMPLMXCSRU128U64 iemAImpl_cvtpi2pd_u128;
1846
1847typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMXCSRU64U64,(uint32_t fMxCsrIn, uint64_t *pu64Dst, uint64_t u64Src));
1848typedef FNIEMAIMPLMXCSRU64U64 *PFNIEMAIMPLMXCSRU64U64;
1849
1850FNIEMAIMPLMXCSRU64U64 iemAImpl_cvtps2pi_u128;
1851FNIEMAIMPLMXCSRU64U64 iemAImpl_cvttps2pi_u128;
1852
1853/** @} */
1854
1855
1856/** @name Function tables.
1857 * @{
1858 */
1859
1860/**
1861 * Function table for a binary operator providing implementation based on
1862 * operand size.
1863 */
1864typedef struct IEMOPBINSIZES
1865{
1866 PFNIEMAIMPLBINU8 pfnNormalU8, pfnLockedU8;
1867 PFNIEMAIMPLBINU16 pfnNormalU16, pfnLockedU16;
1868 PFNIEMAIMPLBINU32 pfnNormalU32, pfnLockedU32;
1869 PFNIEMAIMPLBINU64 pfnNormalU64, pfnLockedU64;
1870} IEMOPBINSIZES;
1871/** Pointer to a binary operator function table. */
1872typedef IEMOPBINSIZES const *PCIEMOPBINSIZES;
1873
1874
1875/**
1876 * Function table for a unary operator providing implementation based on
1877 * operand size.
1878 */
1879typedef struct IEMOPUNARYSIZES
1880{
1881 PFNIEMAIMPLUNARYU8 pfnNormalU8, pfnLockedU8;
1882 PFNIEMAIMPLUNARYU16 pfnNormalU16, pfnLockedU16;
1883 PFNIEMAIMPLUNARYU32 pfnNormalU32, pfnLockedU32;
1884 PFNIEMAIMPLUNARYU64 pfnNormalU64, pfnLockedU64;
1885} IEMOPUNARYSIZES;
1886/** Pointer to a unary operator function table. */
1887typedef IEMOPUNARYSIZES const *PCIEMOPUNARYSIZES;
1888
1889
1890/**
1891 * Function table for a shift operator providing implementation based on
1892 * operand size.
1893 */
1894typedef struct IEMOPSHIFTSIZES
1895{
1896 PFNIEMAIMPLSHIFTU8 pfnNormalU8;
1897 PFNIEMAIMPLSHIFTU16 pfnNormalU16;
1898 PFNIEMAIMPLSHIFTU32 pfnNormalU32;
1899 PFNIEMAIMPLSHIFTU64 pfnNormalU64;
1900} IEMOPSHIFTSIZES;
1901/** Pointer to a shift operator function table. */
1902typedef IEMOPSHIFTSIZES const *PCIEMOPSHIFTSIZES;
1903
1904
1905/**
1906 * Function table for a multiplication or division operation.
1907 */
1908typedef struct IEMOPMULDIVSIZES
1909{
1910 PFNIEMAIMPLMULDIVU8 pfnU8;
1911 PFNIEMAIMPLMULDIVU16 pfnU16;
1912 PFNIEMAIMPLMULDIVU32 pfnU32;
1913 PFNIEMAIMPLMULDIVU64 pfnU64;
1914} IEMOPMULDIVSIZES;
1915/** Pointer to a multiplication or division operation function table. */
1916typedef IEMOPMULDIVSIZES const *PCIEMOPMULDIVSIZES;
1917
1918
1919/**
1920 * Function table for a double precision shift operator providing implementation
1921 * based on operand size.
1922 */
1923typedef struct IEMOPSHIFTDBLSIZES
1924{
1925 PFNIEMAIMPLSHIFTDBLU16 pfnNormalU16;
1926 PFNIEMAIMPLSHIFTDBLU32 pfnNormalU32;
1927 PFNIEMAIMPLSHIFTDBLU64 pfnNormalU64;
1928} IEMOPSHIFTDBLSIZES;
1929/** Pointer to a double precision shift function table. */
1930typedef IEMOPSHIFTDBLSIZES const *PCIEMOPSHIFTDBLSIZES;
1931
1932
1933/**
1934 * Function table for media instruction taking two full sized media source
1935 * registers and one full sized destination register (AVX).
1936 */
1937typedef struct IEMOPMEDIAF3
1938{
1939 PFNIEMAIMPLMEDIAF3U128 pfnU128;
1940 PFNIEMAIMPLMEDIAF3U256 pfnU256;
1941} IEMOPMEDIAF3;
1942/** Pointer to a media operation function table for 3 full sized ops (AVX). */
1943typedef IEMOPMEDIAF3 const *PCIEMOPMEDIAF3;
1944
1945/** @def IEMOPMEDIAF3_INIT_VARS_EX
1946 * Declares a s_Host (x86 & amd64 only) and a s_Fallback variable with the
1947 * given functions as initializers. For use in AVX functions where a pair of
1948 * functions are only used once and the function table need not be public. */
1949#ifndef TST_IEM_CHECK_MC
1950# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
1951# define IEMOPMEDIAF3_INIT_VARS_EX(a_pfnHostU128, a_pfnHostU256, a_pfnFallbackU128, a_pfnFallbackU256) \
1952 static IEMOPMEDIAF3 const s_Host = { a_pfnHostU128, a_pfnHostU256 }; \
1953 static IEMOPMEDIAF3 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
1954# else
1955# define IEMOPMEDIAF3_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) \
1956 static IEMOPMEDIAF3 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
1957# endif
1958#else
1959# define IEMOPMEDIAF3_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) (void)0
1960#endif
1961/** @def IEMOPMEDIAF3_INIT_VARS
1962 * Generate AVX function tables for the @a a_InstrNm instruction.
1963 * @sa IEMOPMEDIAF3_INIT_VARS_EX */
1964#define IEMOPMEDIAF3_INIT_VARS(a_InstrNm) \
1965 IEMOPMEDIAF3_INIT_VARS_EX(RT_CONCAT3(iemAImpl_,a_InstrNm,_u128), RT_CONCAT3(iemAImpl_,a_InstrNm,_u256),\
1966 RT_CONCAT3(iemAImpl_,a_InstrNm,_u128_fallback), RT_CONCAT3(iemAImpl_,a_InstrNm,_u256_fallback))
1967
1968
1969/**
1970 * Function table for media instruction taking one full sized media source
1971 * registers and one full sized destination register (AVX).
1972 */
1973typedef struct IEMOPMEDIAF2
1974{
1975 PFNIEMAIMPLMEDIAF2U128 pfnU128;
1976 PFNIEMAIMPLMEDIAF2U256 pfnU256;
1977} IEMOPMEDIAF2;
1978/** Pointer to a media operation function table for 2 full sized ops (AVX). */
1979typedef IEMOPMEDIAF2 const *PCIEMOPMEDIAF2;
1980
1981/** @def IEMOPMEDIAF2_INIT_VARS_EX
1982 * Declares a s_Host (x86 & amd64 only) and a s_Fallback variable with the
1983 * given functions as initializers. For use in AVX functions where a pair of
1984 * functions are only used once and the function table need not be public. */
1985#ifndef TST_IEM_CHECK_MC
1986# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
1987# define IEMOPMEDIAF2_INIT_VARS_EX(a_pfnHostU128, a_pfnHostU256, a_pfnFallbackU128, a_pfnFallbackU256) \
1988 static IEMOPMEDIAF2 const s_Host = { a_pfnHostU128, a_pfnHostU256 }; \
1989 static IEMOPMEDIAF2 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
1990# else
1991# define IEMOPMEDIAF2_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) \
1992 static IEMOPMEDIAF2 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
1993# endif
1994#else
1995# define IEMOPMEDIAF2_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) (void)0
1996#endif
1997/** @def IEMOPMEDIAF2_INIT_VARS
1998 * Generate AVX function tables for the @a a_InstrNm instruction.
1999 * @sa IEMOPMEDIAF2_INIT_VARS_EX */
2000#define IEMOPMEDIAF2_INIT_VARS(a_InstrNm) \
2001 IEMOPMEDIAF2_INIT_VARS_EX(RT_CONCAT3(iemAImpl_,a_InstrNm,_u128), RT_CONCAT3(iemAImpl_,a_InstrNm,_u256),\
2002 RT_CONCAT3(iemAImpl_,a_InstrNm,_u128_fallback), RT_CONCAT3(iemAImpl_,a_InstrNm,_u256_fallback))
2003
2004
2005/**
2006 * Function table for media instruction taking two full sized media source
2007 * registers and one full sized destination register, but no additional state
2008 * (AVX).
2009 */
2010typedef struct IEMOPMEDIAOPTF3
2011{
2012 PFNIEMAIMPLMEDIAOPTF3U128 pfnU128;
2013 PFNIEMAIMPLMEDIAOPTF3U256 pfnU256;
2014} IEMOPMEDIAOPTF3;
2015/** Pointer to a media operation function table for 3 full sized ops (AVX). */
2016typedef IEMOPMEDIAOPTF3 const *PCIEMOPMEDIAOPTF3;
2017
2018/** @def IEMOPMEDIAOPTF3_INIT_VARS_EX
2019 * Declares a s_Host (x86 & amd64 only) and a s_Fallback variable with the
2020 * given functions as initializers. For use in AVX functions where a pair of
2021 * functions are only used once and the function table need not be public. */
2022#ifndef TST_IEM_CHECK_MC
2023# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
2024# define IEMOPMEDIAOPTF3_INIT_VARS_EX(a_pfnHostU128, a_pfnHostU256, a_pfnFallbackU128, a_pfnFallbackU256) \
2025 static IEMOPMEDIAOPTF3 const s_Host = { a_pfnHostU128, a_pfnHostU256 }; \
2026 static IEMOPMEDIAOPTF3 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
2027# else
2028# define IEMOPMEDIAOPTF3_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) \
2029 static IEMOPMEDIAOPTF3 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
2030# endif
2031#else
2032# define IEMOPMEDIAOPTF3_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) (void)0
2033#endif
2034/** @def IEMOPMEDIAOPTF3_INIT_VARS
2035 * Generate AVX function tables for the @a a_InstrNm instruction.
2036 * @sa IEMOPMEDIAOPTF3_INIT_VARS_EX */
2037#define IEMOPMEDIAOPTF3_INIT_VARS(a_InstrNm) \
2038 IEMOPMEDIAOPTF3_INIT_VARS_EX(RT_CONCAT3(iemAImpl_,a_InstrNm,_u128), RT_CONCAT3(iemAImpl_,a_InstrNm,_u256),\
2039 RT_CONCAT3(iemAImpl_,a_InstrNm,_u128_fallback), RT_CONCAT3(iemAImpl_,a_InstrNm,_u256_fallback))
2040
2041/**
2042 * Function table for media instruction taking one full sized media source
2043 * registers and one full sized destination register, but no additional state
2044 * (AVX).
2045 */
2046typedef struct IEMOPMEDIAOPTF2
2047{
2048 PFNIEMAIMPLMEDIAOPTF2U128 pfnU128;
2049 PFNIEMAIMPLMEDIAOPTF2U256 pfnU256;
2050} IEMOPMEDIAOPTF2;
2051/** Pointer to a media operation function table for 2 full sized ops (AVX). */
2052typedef IEMOPMEDIAOPTF2 const *PCIEMOPMEDIAOPTF2;
2053
2054/** @def IEMOPMEDIAOPTF2_INIT_VARS_EX
2055 * Declares a s_Host (x86 & amd64 only) and a s_Fallback variable with the
2056 * given functions as initializers. For use in AVX functions where a pair of
2057 * functions are only used once and the function table need not be public. */
2058#ifndef TST_IEM_CHECK_MC
2059# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
2060# define IEMOPMEDIAOPTF2_INIT_VARS_EX(a_pfnHostU128, a_pfnHostU256, a_pfnFallbackU128, a_pfnFallbackU256) \
2061 static IEMOPMEDIAOPTF2 const s_Host = { a_pfnHostU128, a_pfnHostU256 }; \
2062 static IEMOPMEDIAOPTF2 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
2063# else
2064# define IEMOPMEDIAOPTF2_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) \
2065 static IEMOPMEDIAOPTF2 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
2066# endif
2067#else
2068# define IEMOPMEDIAOPTF2_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) (void)0
2069#endif
2070/** @def IEMOPMEDIAOPTF2_INIT_VARS
2071 * Generate AVX function tables for the @a a_InstrNm instruction.
2072 * @sa IEMOPMEDIAOPTF2_INIT_VARS_EX */
2073#define IEMOPMEDIAOPTF2_INIT_VARS(a_InstrNm) \
2074 IEMOPMEDIAOPTF2_INIT_VARS_EX(RT_CONCAT3(iemAImpl_,a_InstrNm,_u128), RT_CONCAT3(iemAImpl_,a_InstrNm,_u256),\
2075 RT_CONCAT3(iemAImpl_,a_InstrNm,_u128_fallback), RT_CONCAT3(iemAImpl_,a_InstrNm,_u256_fallback))
2076
2077
2078/**
2079 * Function table for media instruction taking one full sized media source
2080 * register and one full sized destination register and an 8-bit immediate (AVX).
2081 */
2082typedef struct IEMOPMEDIAF2IMM8
2083{
2084 PFNIEMAIMPLMEDIAF2U128IMM8 pfnU128;
2085 PFNIEMAIMPLMEDIAF2U256IMM8 pfnU256;
2086} IEMOPMEDIAF2IMM8;
2087/** Pointer to a media operation function table for 2 full sized ops (AVX). */
2088typedef IEMOPMEDIAF2IMM8 const *PCIEMOPMEDIAF2IMM8;
2089
2090/** @def IEMOPMEDIAF2IMM8_INIT_VARS_EX
2091 * Declares a s_Host (x86 & amd64 only) and a s_Fallback variable with the
2092 * given functions as initializers. For use in AVX functions where a pair of
2093 * functions are only used once and the function table need not be public. */
2094#ifndef TST_IEM_CHECK_MC
2095# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
2096# define IEMOPMEDIAF2IMM8_INIT_VARS_EX(a_pfnHostU128, a_pfnHostU256, a_pfnFallbackU128, a_pfnFallbackU256) \
2097 static IEMOPMEDIAF2IMM8 const s_Host = { a_pfnHostU128, a_pfnHostU256 }; \
2098 static IEMOPMEDIAF2IMM8 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
2099# else
2100# define IEMOPMEDIAF2IMM8_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) \
2101 static IEMOPMEDIAF2IMM8 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
2102# endif
2103#else
2104# define IEMOPMEDIAF2IMM8_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) (void)0
2105#endif
2106/** @def IEMOPMEDIAF2IMM8_INIT_VARS
2107 * Generate AVX function tables for the @a a_InstrNm instruction.
2108 * @sa IEMOPMEDIAF2IMM8_INIT_VARS_EX */
2109#define IEMOPMEDIAF2IMM8_INIT_VARS(a_InstrNm) \
2110 IEMOPMEDIAF2IMM8_INIT_VARS_EX(RT_CONCAT3(iemAImpl_,a_InstrNm,_u128), RT_CONCAT3(iemAImpl_,a_InstrNm,_u256),\
2111 RT_CONCAT3(iemAImpl_,a_InstrNm,_u128_fallback), RT_CONCAT3(iemAImpl_,a_InstrNm,_u256_fallback))
2112
2113
2114/**
2115 * Function table for media instruction taking one full sized media source
2116 * register and one full sized destination register and an 8-bit immediate, but no additional state
2117 * (AVX).
2118 */
2119typedef struct IEMOPMEDIAOPTF2IMM8
2120{
2121 PFNIEMAIMPLMEDIAOPTF2U128IMM8 pfnU128;
2122 PFNIEMAIMPLMEDIAOPTF2U256IMM8 pfnU256;
2123} IEMOPMEDIAOPTF2IMM8;
2124/** Pointer to a media operation function table for 2 full sized ops (AVX). */
2125typedef IEMOPMEDIAOPTF2IMM8 const *PCIEMOPMEDIAOPTF2IMM8;
2126
2127/** @def IEMOPMEDIAOPTF2IMM8_INIT_VARS_EX
2128 * Declares a s_Host (x86 & amd64 only) and a s_Fallback variable with the
2129 * given functions as initializers. For use in AVX functions where a pair of
2130 * functions are only used once and the function table need not be public. */
2131#ifndef TST_IEM_CHECK_MC
2132# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
2133# define IEMOPMEDIAOPTF2IMM8_INIT_VARS_EX(a_pfnHostU128, a_pfnHostU256, a_pfnFallbackU128, a_pfnFallbackU256) \
2134 static IEMOPMEDIAOPTF2IMM8 const s_Host = { a_pfnHostU128, a_pfnHostU256 }; \
2135 static IEMOPMEDIAOPTF2IMM8 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
2136# else
2137# define IEMOPMEDIAOPTF2IMM8_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) \
2138 static IEMOPMEDIAOPTF2IMM8 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
2139# endif
2140#else
2141# define IEMOPMEDIAOPTF2IMM8_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) (void)0
2142#endif
2143/** @def IEMOPMEDIAOPTF2IMM8_INIT_VARS
2144 * Generate AVX function tables for the @a a_InstrNm instruction.
2145 * @sa IEMOPMEDIAOPTF2IMM8_INIT_VARS_EX */
2146#define IEMOPMEDIAOPTF2IMM8_INIT_VARS(a_InstrNm) \
2147 IEMOPMEDIAOPTF2IMM8_INIT_VARS_EX(RT_CONCAT3(iemAImpl_,a_InstrNm,_imm_u128), RT_CONCAT3(iemAImpl_,a_InstrNm,_imm_u256),\
2148 RT_CONCAT3(iemAImpl_,a_InstrNm,_imm_u128_fallback), RT_CONCAT3(iemAImpl_,a_InstrNm,_imm_u256_fallback))
2149
2150/**
2151 * Function table for media instruction taking two full sized media source
2152 * registers and one full sized destination register and an 8-bit immediate, but no additional state
2153 * (AVX).
2154 */
2155typedef struct IEMOPMEDIAOPTF3IMM8
2156{
2157 PFNIEMAIMPLMEDIAOPTF3U128IMM8 pfnU128;
2158 PFNIEMAIMPLMEDIAOPTF3U256IMM8 pfnU256;
2159} IEMOPMEDIAOPTF3IMM8;
2160/** Pointer to a media operation function table for 3 full sized ops (AVX). */
2161typedef IEMOPMEDIAOPTF3IMM8 const *PCIEMOPMEDIAOPTF3IMM8;
2162
2163/** @def IEMOPMEDIAOPTF3IMM8_INIT_VARS_EX
2164 * Declares a s_Host (x86 & amd64 only) and a s_Fallback variable with the
2165 * given functions as initializers. For use in AVX functions where a pair of
2166 * functions are only used once and the function table need not be public. */
2167#ifndef TST_IEM_CHECK_MC
2168# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
2169# define IEMOPMEDIAOPTF3IMM8_INIT_VARS_EX(a_pfnHostU128, a_pfnHostU256, a_pfnFallbackU128, a_pfnFallbackU256) \
2170 static IEMOPMEDIAOPTF3IMM8 const s_Host = { a_pfnHostU128, a_pfnHostU256 }; \
2171 static IEMOPMEDIAOPTF3IMM8 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
2172# else
2173# define IEMOPMEDIAOPTF3IMM8_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) \
2174 static IEMOPMEDIAOPTF3IMM8 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
2175# endif
2176#else
2177# define IEMOPMEDIAOPTF3IMM8_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) (void)0
2178#endif
2179/** @def IEMOPMEDIAOPTF3IMM8_INIT_VARS
2180 * Generate AVX function tables for the @a a_InstrNm instruction.
2181 * @sa IEMOPMEDIAOPTF3IMM8_INIT_VARS_EX */
2182#define IEMOPMEDIAOPTF3IMM8_INIT_VARS(a_InstrNm) \
2183 IEMOPMEDIAOPTF3IMM8_INIT_VARS_EX(RT_CONCAT3(iemAImpl_,a_InstrNm,_u128), RT_CONCAT3(iemAImpl_,a_InstrNm,_u256),\
2184 RT_CONCAT3(iemAImpl_,a_InstrNm,_u128_fallback), RT_CONCAT3(iemAImpl_,a_InstrNm,_u256_fallback))
2185/** @} */
2186
2187
2188/**
2189 * Function table for blend type instruction taking three full sized media source
2190 * registers and one full sized destination register, but no additional state
2191 * (AVX).
2192 */
2193typedef struct IEMOPBLENDOP
2194{
2195 PFNIEMAIMPLAVXBLENDU128 pfnU128;
2196 PFNIEMAIMPLAVXBLENDU256 pfnU256;
2197} IEMOPBLENDOP;
2198/** Pointer to a media operation function table for 4 full sized ops (AVX). */
2199typedef IEMOPBLENDOP const *PCIEMOPBLENDOP;
2200
2201/** @def IEMOPBLENDOP_INIT_VARS_EX
2202 * Declares a s_Host (x86 & amd64 only) and a s_Fallback variable with the
2203 * given functions as initializers. For use in AVX functions where a pair of
2204 * functions are only used once and the function table need not be public. */
2205#ifndef TST_IEM_CHECK_MC
2206# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
2207# define IEMOPBLENDOP_INIT_VARS_EX(a_pfnHostU128, a_pfnHostU256, a_pfnFallbackU128, a_pfnFallbackU256) \
2208 static IEMOPBLENDOP const s_Host = { a_pfnHostU128, a_pfnHostU256 }; \
2209 static IEMOPBLENDOP const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
2210# else
2211# define IEMOPBLENDOP_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) \
2212 static IEMOPBLENDOP const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
2213# endif
2214#else
2215# define IEMOPBLENDOP_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) (void)0
2216#endif
2217/** @def IEMOPBLENDOP_INIT_VARS
2218 * Generate AVX function tables for the @a a_InstrNm instruction.
2219 * @sa IEMOPBLENDOP_INIT_VARS_EX */
2220#define IEMOPBLENDOP_INIT_VARS(a_InstrNm) \
2221 IEMOPBLENDOP_INIT_VARS_EX(RT_CONCAT3(iemAImpl_,a_InstrNm,_u128), RT_CONCAT3(iemAImpl_,a_InstrNm,_u256),\
2222 RT_CONCAT3(iemAImpl_,a_InstrNm,_u128_fallback), RT_CONCAT3(iemAImpl_,a_InstrNm,_u256_fallback))
2223
2224
2225/** @name SSE/AVX single/double precision floating point operations.
2226 * @{ */
2227typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLFPSSEF2U128,(uint32_t uMxCsrIn, PX86XMMREG pResult, PCX86XMMREG puSrc1, PCX86XMMREG puSrc2));
2228typedef FNIEMAIMPLFPSSEF2U128 *PFNIEMAIMPLFPSSEF2U128;
2229typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLFPSSEF2U128R32,(uint32_t uMxCsrIn, PX86XMMREG Result, PCX86XMMREG puSrc1, PCRTFLOAT32U pr32Src2));
2230typedef FNIEMAIMPLFPSSEF2U128R32 *PFNIEMAIMPLFPSSEF2U128R32;
2231typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLFPSSEF2U128R64,(uint32_t uMxCsrIn, PX86XMMREG pResult, PCX86XMMREG puSrc1, PCRTFLOAT64U pr64Src2));
2232typedef FNIEMAIMPLFPSSEF2U128R64 *PFNIEMAIMPLFPSSEF2U128R64;
2233
2234typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLFPAVXF3U128,(uint32_t uMxCsrIn, PX86XMMREG pResult, PCX86XMMREG puSrc1, PCX86XMMREG puSrc2));
2235typedef FNIEMAIMPLFPAVXF3U128 *PFNIEMAIMPLFPAVXF3U128;
2236typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLFPAVXF3U128R32,(uint32_t uMxCsrIn, PX86XMMREG pResult, PCX86XMMREG puSrc1, PCRTFLOAT32U pr32Src2));
2237typedef FNIEMAIMPLFPAVXF3U128R32 *PFNIEMAIMPLFPAVXF3U128R32;
2238typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLFPAVXF3U128R64,(uint32_t uMxCsrIn, PX86XMMREG pResult, PCX86XMMREG puSrc1, PCRTFLOAT64U pr64Src2));
2239typedef FNIEMAIMPLFPAVXF3U128R64 *PFNIEMAIMPLFPAVXF3U128R64;
2240
2241typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLFPAVXF3U256,(uint32_t uMxCsrIn, PX86YMMREG pResult, PCX86YMMREG puSrc1, PCX86YMMREG puSrc2));
2242typedef FNIEMAIMPLFPAVXF3U256 *PFNIEMAIMPLFPAVXF3U256;
2243
2244FNIEMAIMPLFPSSEF2U128 iemAImpl_addps_u128;
2245FNIEMAIMPLFPSSEF2U128 iemAImpl_addpd_u128;
2246FNIEMAIMPLFPSSEF2U128 iemAImpl_mulps_u128;
2247FNIEMAIMPLFPSSEF2U128 iemAImpl_mulpd_u128;
2248FNIEMAIMPLFPSSEF2U128 iemAImpl_subps_u128;
2249FNIEMAIMPLFPSSEF2U128 iemAImpl_subpd_u128;
2250FNIEMAIMPLFPSSEF2U128 iemAImpl_minps_u128;
2251FNIEMAIMPLFPSSEF2U128 iemAImpl_minpd_u128;
2252FNIEMAIMPLFPSSEF2U128 iemAImpl_divps_u128;
2253FNIEMAIMPLFPSSEF2U128 iemAImpl_divpd_u128;
2254FNIEMAIMPLFPSSEF2U128 iemAImpl_maxps_u128;
2255FNIEMAIMPLFPSSEF2U128 iemAImpl_maxpd_u128;
2256FNIEMAIMPLFPSSEF2U128 iemAImpl_haddps_u128;
2257FNIEMAIMPLFPSSEF2U128 iemAImpl_haddpd_u128;
2258FNIEMAIMPLFPSSEF2U128 iemAImpl_hsubps_u128;
2259FNIEMAIMPLFPSSEF2U128 iemAImpl_hsubpd_u128;
2260FNIEMAIMPLFPSSEF2U128 iemAImpl_sqrtps_u128;
2261FNIEMAIMPLFPSSEF2U128 iemAImpl_rsqrtps_u128;
2262FNIEMAIMPLFPSSEF2U128 iemAImpl_sqrtpd_u128;
2263FNIEMAIMPLFPSSEF2U128 iemAImpl_rcpps_u128;
2264FNIEMAIMPLFPSSEF2U128 iemAImpl_addsubps_u128;
2265FNIEMAIMPLFPSSEF2U128 iemAImpl_addsubpd_u128;
2266
2267FNIEMAIMPLFPSSEF2U128 iemAImpl_cvtpd2ps_u128;
2268IEM_DECL_IMPL_PROTO(uint32_t, iemAImpl_cvtps2pd_u128,(uint32_t uMxCsrIn, PX86XMMREG pResult, uint64_t const *pu64Src));
2269
2270FNIEMAIMPLFPSSEF2U128 iemAImpl_cvtdq2ps_u128;
2271FNIEMAIMPLFPSSEF2U128 iemAImpl_cvtps2dq_u128;
2272FNIEMAIMPLFPSSEF2U128 iemAImpl_cvttps2dq_u128;
2273FNIEMAIMPLFPSSEF2U128 iemAImpl_cvttpd2dq_u128;
2274FNIEMAIMPLFPSSEF2U128 iemAImpl_cvtdq2pd_u128;
2275FNIEMAIMPLFPSSEF2U128 iemAImpl_cvtpd2dq_u128;
2276
2277FNIEMAIMPLFPSSEF2U128R32 iemAImpl_addss_u128_r32;
2278FNIEMAIMPLFPSSEF2U128R64 iemAImpl_addsd_u128_r64;
2279FNIEMAIMPLFPSSEF2U128R32 iemAImpl_mulss_u128_r32;
2280FNIEMAIMPLFPSSEF2U128R64 iemAImpl_mulsd_u128_r64;
2281FNIEMAIMPLFPSSEF2U128R32 iemAImpl_subss_u128_r32;
2282FNIEMAIMPLFPSSEF2U128R64 iemAImpl_subsd_u128_r64;
2283FNIEMAIMPLFPSSEF2U128R32 iemAImpl_minss_u128_r32;
2284FNIEMAIMPLFPSSEF2U128R64 iemAImpl_minsd_u128_r64;
2285FNIEMAIMPLFPSSEF2U128R32 iemAImpl_divss_u128_r32;
2286FNIEMAIMPLFPSSEF2U128R64 iemAImpl_divsd_u128_r64;
2287FNIEMAIMPLFPSSEF2U128R32 iemAImpl_maxss_u128_r32;
2288FNIEMAIMPLFPSSEF2U128R64 iemAImpl_maxsd_u128_r64;
2289FNIEMAIMPLFPSSEF2U128R32 iemAImpl_cvtss2sd_u128_r32;
2290FNIEMAIMPLFPSSEF2U128R64 iemAImpl_cvtsd2ss_u128_r64;
2291FNIEMAIMPLFPSSEF2U128R32 iemAImpl_sqrtss_u128_r32;
2292FNIEMAIMPLFPSSEF2U128R64 iemAImpl_sqrtsd_u128_r64;
2293FNIEMAIMPLFPSSEF2U128R32 iemAImpl_rsqrtss_u128_r32;
2294FNIEMAIMPLFPSSEF2U128R32 iemAImpl_rcpss_u128_r32;
2295
2296FNIEMAIMPLMEDIAF3U128 iemAImpl_vaddps_u128, iemAImpl_vaddps_u128_fallback;
2297FNIEMAIMPLMEDIAF3U128 iemAImpl_vaddpd_u128, iemAImpl_vaddpd_u128_fallback;
2298FNIEMAIMPLMEDIAF3U128 iemAImpl_vmulps_u128, iemAImpl_vmulps_u128_fallback;
2299FNIEMAIMPLMEDIAF3U128 iemAImpl_vmulpd_u128, iemAImpl_vmulpd_u128_fallback;
2300FNIEMAIMPLMEDIAF3U128 iemAImpl_vsubps_u128, iemAImpl_vsubps_u128_fallback;
2301FNIEMAIMPLMEDIAF3U128 iemAImpl_vsubpd_u128, iemAImpl_vsubpd_u128_fallback;
2302FNIEMAIMPLMEDIAF3U128 iemAImpl_vminps_u128, iemAImpl_vminps_u128_fallback;
2303FNIEMAIMPLMEDIAF3U128 iemAImpl_vminpd_u128, iemAImpl_vminpd_u128_fallback;
2304FNIEMAIMPLMEDIAF3U128 iemAImpl_vdivps_u128, iemAImpl_vdivps_u128_fallback;
2305FNIEMAIMPLMEDIAF3U128 iemAImpl_vdivpd_u128, iemAImpl_vdivpd_u128_fallback;
2306FNIEMAIMPLMEDIAF3U128 iemAImpl_vmaxps_u128, iemAImpl_vmaxps_u128_fallback;
2307FNIEMAIMPLMEDIAF3U128 iemAImpl_vmaxpd_u128, iemAImpl_vmaxpd_u128_fallback;
2308FNIEMAIMPLMEDIAF3U128 iemAImpl_vhaddps_u128, iemAImpl_vhaddps_u128_fallback;
2309FNIEMAIMPLMEDIAF3U128 iemAImpl_vhaddpd_u128, iemAImpl_vhaddpd_u128_fallback;
2310FNIEMAIMPLMEDIAF3U128 iemAImpl_vhsubps_u128, iemAImpl_vhsubps_u128_fallback;
2311FNIEMAIMPLMEDIAF3U128 iemAImpl_vhsubpd_u128, iemAImpl_vhsubpd_u128_fallback;
2312FNIEMAIMPLMEDIAF2U128 iemAImpl_vsqrtps_u128, iemAImpl_vsqrtps_u128_fallback;
2313FNIEMAIMPLMEDIAF2U128 iemAImpl_vsqrtpd_u128, iemAImpl_vsqrtpd_u128_fallback;
2314FNIEMAIMPLMEDIAF2U128 iemAImpl_vrsqrtps_u128, iemAImpl_vrsqrtps_u128_fallback;
2315FNIEMAIMPLMEDIAF2U128 iemAImpl_vrcpps_u128, iemAImpl_vrcpps_u128_fallback;
2316FNIEMAIMPLMEDIAF3U128 iemAImpl_vaddsubps_u128, iemAImpl_vaddsubps_u128_fallback;
2317FNIEMAIMPLMEDIAF3U128 iemAImpl_vaddsubpd_u128, iemAImpl_vaddsubpd_u128_fallback;
2318FNIEMAIMPLMEDIAF2U128 iemAImpl_vcvtdq2ps_u128, iemAImpl_vcvtdq2ps_u128_fallback;
2319FNIEMAIMPLMEDIAF2U128 iemAImpl_vcvtps2dq_u128, iemAImpl_vcvtps2dq_u128_fallback;
2320FNIEMAIMPLMEDIAF2U128 iemAImpl_vcvttps2dq_u128, iemAImpl_vcvttps2dq_u128_fallback;
2321IEM_DECL_IMPL_PROTO(uint32_t, iemAImpl_vcvtpd2ps_u128_u128,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86XMMREG puSrc));
2322IEM_DECL_IMPL_PROTO(uint32_t, iemAImpl_vcvtpd2ps_u128_u128_fallback,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86XMMREG puSrc));
2323IEM_DECL_IMPL_PROTO(uint32_t, iemAImpl_vcvttpd2dq_u128_u128,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86XMMREG puSrc));
2324IEM_DECL_IMPL_PROTO(uint32_t, iemAImpl_vcvttpd2dq_u128_u128_fallback,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86XMMREG puSrc));
2325IEM_DECL_IMPL_PROTO(uint32_t, iemAImpl_vcvtpd2dq_u128_u128,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86XMMREG puSrc));
2326IEM_DECL_IMPL_PROTO(uint32_t, iemAImpl_vcvtpd2dq_u128_u128_fallback,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86XMMREG puSrc));
2327
2328
2329FNIEMAIMPLFPAVXF3U128R32 iemAImpl_vaddss_u128_r32, iemAImpl_vaddss_u128_r32_fallback;
2330FNIEMAIMPLFPAVXF3U128R64 iemAImpl_vaddsd_u128_r64, iemAImpl_vaddsd_u128_r64_fallback;
2331FNIEMAIMPLFPAVXF3U128R32 iemAImpl_vmulss_u128_r32, iemAImpl_vmulss_u128_r32_fallback;
2332FNIEMAIMPLFPAVXF3U128R64 iemAImpl_vmulsd_u128_r64, iemAImpl_vmulsd_u128_r64_fallback;
2333FNIEMAIMPLFPAVXF3U128R32 iemAImpl_vsubss_u128_r32, iemAImpl_vsubss_u128_r32_fallback;
2334FNIEMAIMPLFPAVXF3U128R64 iemAImpl_vsubsd_u128_r64, iemAImpl_vsubsd_u128_r64_fallback;
2335FNIEMAIMPLFPAVXF3U128R32 iemAImpl_vminss_u128_r32, iemAImpl_vminss_u128_r32_fallback;
2336FNIEMAIMPLFPAVXF3U128R64 iemAImpl_vminsd_u128_r64, iemAImpl_vminsd_u128_r64_fallback;
2337FNIEMAIMPLFPAVXF3U128R32 iemAImpl_vdivss_u128_r32, iemAImpl_vdivss_u128_r32_fallback;
2338FNIEMAIMPLFPAVXF3U128R64 iemAImpl_vdivsd_u128_r64, iemAImpl_vdivsd_u128_r64_fallback;
2339FNIEMAIMPLFPAVXF3U128R32 iemAImpl_vmaxss_u128_r32, iemAImpl_vmaxss_u128_r32_fallback;
2340FNIEMAIMPLFPAVXF3U128R64 iemAImpl_vmaxsd_u128_r64, iemAImpl_vmaxsd_u128_r64_fallback;
2341FNIEMAIMPLFPAVXF3U128R32 iemAImpl_vsqrtss_u128_r32, iemAImpl_vsqrtss_u128_r32_fallback;
2342FNIEMAIMPLFPAVXF3U128R64 iemAImpl_vsqrtsd_u128_r64, iemAImpl_vsqrtsd_u128_r64_fallback;
2343FNIEMAIMPLFPAVXF3U128R32 iemAImpl_vrsqrtss_u128_r32, iemAImpl_vrsqrtss_u128_r32_fallback;
2344FNIEMAIMPLFPAVXF3U128R32 iemAImpl_vrcpss_u128_r32, iemAImpl_vrcpss_u128_r32_fallback;
2345FNIEMAIMPLFPAVXF3U128R32 iemAImpl_vcvtss2sd_u128_r32, iemAImpl_vcvtss2sd_u128_r32_fallback;
2346FNIEMAIMPLFPAVXF3U128R64 iemAImpl_vcvtsd2ss_u128_r64, iemAImpl_vcvtsd2ss_u128_r64_fallback;
2347
2348
2349FNIEMAIMPLFPAVXF3U256 iemAImpl_vaddps_u256, iemAImpl_vaddps_u256_fallback;
2350FNIEMAIMPLFPAVXF3U256 iemAImpl_vaddpd_u256, iemAImpl_vaddpd_u256_fallback;
2351FNIEMAIMPLFPAVXF3U256 iemAImpl_vmulps_u256, iemAImpl_vmulps_u256_fallback;
2352FNIEMAIMPLFPAVXF3U256 iemAImpl_vmulpd_u256, iemAImpl_vmulpd_u256_fallback;
2353FNIEMAIMPLFPAVXF3U256 iemAImpl_vsubps_u256, iemAImpl_vsubps_u256_fallback;
2354FNIEMAIMPLFPAVXF3U256 iemAImpl_vsubpd_u256, iemAImpl_vsubpd_u256_fallback;
2355FNIEMAIMPLFPAVXF3U256 iemAImpl_vminps_u256, iemAImpl_vminps_u256_fallback;
2356FNIEMAIMPLFPAVXF3U256 iemAImpl_vminpd_u256, iemAImpl_vminpd_u256_fallback;
2357FNIEMAIMPLFPAVXF3U256 iemAImpl_vdivps_u256, iemAImpl_vdivps_u256_fallback;
2358FNIEMAIMPLFPAVXF3U256 iemAImpl_vdivpd_u256, iemAImpl_vdivpd_u256_fallback;
2359FNIEMAIMPLFPAVXF3U256 iemAImpl_vmaxps_u256, iemAImpl_vmaxps_u256_fallback;
2360FNIEMAIMPLFPAVXF3U256 iemAImpl_vmaxpd_u256, iemAImpl_vmaxpd_u256_fallback;
2361FNIEMAIMPLFPAVXF3U256 iemAImpl_vhaddps_u256, iemAImpl_vhaddps_u256_fallback;
2362FNIEMAIMPLFPAVXF3U256 iemAImpl_vhaddpd_u256, iemAImpl_vhaddpd_u256_fallback;
2363FNIEMAIMPLFPAVXF3U256 iemAImpl_vhsubps_u256, iemAImpl_vhsubps_u256_fallback;
2364FNIEMAIMPLFPAVXF3U256 iemAImpl_vhsubpd_u256, iemAImpl_vhsubpd_u256_fallback;
2365FNIEMAIMPLMEDIAF3U256 iemAImpl_vaddsubps_u256, iemAImpl_vaddsubps_u256_fallback;
2366FNIEMAIMPLMEDIAF3U256 iemAImpl_vaddsubpd_u256, iemAImpl_vaddsubpd_u256_fallback;
2367FNIEMAIMPLMEDIAF2U256 iemAImpl_vsqrtps_u256, iemAImpl_vsqrtps_u256_fallback;
2368FNIEMAIMPLMEDIAF2U256 iemAImpl_vsqrtpd_u256, iemAImpl_vsqrtpd_u256_fallback;
2369FNIEMAIMPLMEDIAF2U256 iemAImpl_vrsqrtps_u256, iemAImpl_vrsqrtps_u256_fallback;
2370FNIEMAIMPLMEDIAF2U256 iemAImpl_vrcpps_u256, iemAImpl_vrcpps_u256_fallback;
2371FNIEMAIMPLMEDIAF2U256 iemAImpl_vcvtdq2ps_u256, iemAImpl_vcvtdq2ps_u256_fallback;
2372FNIEMAIMPLMEDIAF2U256 iemAImpl_vcvtps2dq_u256, iemAImpl_vcvtps2dq_u256_fallback;
2373FNIEMAIMPLMEDIAF2U256 iemAImpl_vcvttps2dq_u256, iemAImpl_vcvttps2dq_u256_fallback;
2374IEM_DECL_IMPL_PROTO(uint32_t, iemAImpl_vcvtpd2ps_u128_u256,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86YMMREG puSrc));
2375IEM_DECL_IMPL_PROTO(uint32_t, iemAImpl_vcvtpd2ps_u128_u256_fallback,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86YMMREG puSrc));
2376IEM_DECL_IMPL_PROTO(uint32_t, iemAImpl_vcvttpd2dq_u128_u256,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86YMMREG puSrc));
2377IEM_DECL_IMPL_PROTO(uint32_t, iemAImpl_vcvttpd2dq_u128_u256_fallback,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86YMMREG puSrc));
2378IEM_DECL_IMPL_PROTO(uint32_t, iemAImpl_vcvtpd2dq_u128_u256,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86YMMREG puSrc));
2379IEM_DECL_IMPL_PROTO(uint32_t, iemAImpl_vcvtpd2dq_u128_u256_fallback,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86YMMREG puSrc));
2380/** @} */
2381
2382
2383/** @name Misc Helpers
2384 * @{ */
2385
2386/** @def IEM_GET_INSTR_LEN
2387 * Gets the instruction length. */
2388#ifdef IEM_WITH_CODE_TLB
2389# define IEM_GET_INSTR_LEN(a_pVCpu) ((a_pVCpu)->iem.s.offInstrNextByte - (uint32_t)(int32_t)(a_pVCpu)->iem.s.offCurInstrStart)
2390#else
2391# define IEM_GET_INSTR_LEN(a_pVCpu) ((a_pVCpu)->iem.s.offOpcode)
2392#endif
2393
2394/**
2395 * Gets the CPU mode (from fExec) as a IEMMODE value.
2396 *
2397 * @returns IEMMODE
2398 * @param a_pVCpu The cross context virtual CPU structure of the calling thread.
2399 */
2400#define IEM_GET_CPU_MODE(a_pVCpu) ((a_pVCpu)->iem.s.fExec & IEM_F_MODE_X86_CPUMODE_MASK)
2401
2402/**
2403 * Check if we're currently executing in real or virtual 8086 mode.
2404 *
2405 * @returns @c true if it is, @c false if not.
2406 * @param a_pVCpu The cross context virtual CPU structure of the calling thread.
2407 */
2408#define IEM_IS_REAL_OR_V86_MODE(a_pVCpu) (( ((a_pVCpu)->iem.s.fExec ^ IEM_F_MODE_X86_PROT_MASK) \
2409 & (IEM_F_MODE_X86_V86_MASK | IEM_F_MODE_X86_PROT_MASK)) != 0)
2410
2411/**
2412 * Check if we're currently executing in virtual 8086 mode.
2413 *
2414 * @returns @c true if it is, @c false if not.
2415 * @param a_pVCpu The cross context virtual CPU structure of the calling thread.
2416 */
2417#define IEM_IS_V86_MODE(a_pVCpu) (((a_pVCpu)->iem.s.fExec & IEM_F_MODE_X86_V86_MASK) != 0)
2418
2419/**
2420 * Check if we're currently executing in long mode.
2421 *
2422 * @returns @c true if it is, @c false if not.
2423 * @param a_pVCpu The cross context virtual CPU structure of the calling thread.
2424 */
2425#define IEM_IS_LONG_MODE(a_pVCpu) (CPUMIsGuestInLongModeEx(IEM_GET_CTX(a_pVCpu)))
2426
2427/**
2428 * Check if we're currently executing in a 16-bit code segment.
2429 *
2430 * @returns @c true if it is, @c false if not.
2431 * @param a_pVCpu The cross context virtual CPU structure of the calling thread.
2432 */
2433#define IEM_IS_16BIT_CODE(a_pVCpu) (IEM_GET_CPU_MODE(a_pVCpu) == IEMMODE_16BIT)
2434
2435/**
2436 * Check if we're currently executing in a 32-bit code segment.
2437 *
2438 * @returns @c true if it is, @c false if not.
2439 * @param a_pVCpu The cross context virtual CPU structure of the calling thread.
2440 */
2441#define IEM_IS_32BIT_CODE(a_pVCpu) (IEM_GET_CPU_MODE(a_pVCpu) == IEMMODE_32BIT)
2442
2443/**
2444 * Check if we're currently executing in a 64-bit code segment.
2445 *
2446 * @returns @c true if it is, @c false if not.
2447 * @param a_pVCpu The cross context virtual CPU structure of the calling thread.
2448 */
2449#define IEM_IS_64BIT_CODE(a_pVCpu) (IEM_GET_CPU_MODE(a_pVCpu) == IEMMODE_64BIT)
2450
2451/**
2452 * Check if we're currently executing in real mode.
2453 *
2454 * @returns @c true if it is, @c false if not.
2455 * @param a_pVCpu The cross context virtual CPU structure of the calling thread.
2456 */
2457#define IEM_IS_REAL_MODE(a_pVCpu) (!((a_pVCpu)->iem.s.fExec & IEM_F_MODE_X86_PROT_MASK))
2458
2459/**
2460 * Gets the current protection level (CPL).
2461 *
2462 * @returns 0..3
2463 * @param a_pVCpu The cross context virtual CPU structure of the calling thread.
2464 */
2465#define IEM_GET_CPL(a_pVCpu) (((a_pVCpu)->iem.s.fExec >> IEM_F_X86_CPL_SHIFT) & IEM_F_X86_CPL_SMASK)
2466
2467/**
2468 * Sets the current protection level (CPL).
2469 *
2470 * @param a_pVCpu The cross context virtual CPU structure of the calling thread.
2471 */
2472#define IEM_SET_CPL(a_pVCpu, a_uCpl) \
2473 do { (a_pVCpu)->iem.s.fExec = ((a_pVCpu)->iem.s.fExec & ~IEM_F_X86_CPL_MASK) | ((a_uCpl) << IEM_F_X86_CPL_SHIFT); } while (0)
2474
2475/**
2476 * Returns a (const) pointer to the CPUMFEATURES for the guest CPU.
2477 * @returns PCCPUMFEATURES
2478 * @param a_pVCpu The cross context virtual CPU structure of the calling thread.
2479 */
2480#define IEM_GET_GUEST_CPU_FEATURES(a_pVCpu) (&((a_pVCpu)->CTX_SUFF(pVM)->cpum.ro.GuestFeatures))
2481
2482/**
2483 * Returns a (const) pointer to the CPUMFEATURES for the host CPU.
2484 * @returns PCCPUMFEATURES
2485 * @param a_pVCpu The cross context virtual CPU structure of the calling thread.
2486 */
2487#define IEM_GET_HOST_CPU_FEATURES(a_pVCpu) (&g_CpumHostFeatures.s)
2488
2489/**
2490 * Evaluates to true if we're presenting an Intel CPU to the guest.
2491 */
2492#define IEM_IS_GUEST_CPU_INTEL(a_pVCpu) ( (a_pVCpu)->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL )
2493
2494/**
2495 * Evaluates to true if we're presenting an AMD CPU to the guest.
2496 */
2497#define IEM_IS_GUEST_CPU_AMD(a_pVCpu) ( (a_pVCpu)->iem.s.enmCpuVendor == CPUMCPUVENDOR_AMD || (a_pVCpu)->iem.s.enmCpuVendor == CPUMCPUVENDOR_HYGON )
2498
2499/**
2500 * Check if the address is canonical.
2501 */
2502#define IEM_IS_CANONICAL(a_u64Addr) X86_IS_CANONICAL(a_u64Addr)
2503
2504/** Checks if the ModR/M byte is in register mode or not. */
2505#define IEM_IS_MODRM_REG_MODE(a_bRm) ( ((a_bRm) & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT) )
2506/** Checks if the ModR/M byte is in memory mode or not. */
2507#define IEM_IS_MODRM_MEM_MODE(a_bRm) ( ((a_bRm) & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT) )
2508
2509/**
2510 * Gets the register (reg) part of a ModR/M encoding, with REX.R added in.
2511 *
2512 * For use during decoding.
2513 */
2514#define IEM_GET_MODRM_REG(a_pVCpu, a_bRm) ( (((a_bRm) >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | (a_pVCpu)->iem.s.uRexReg )
2515/**
2516 * Gets the r/m part of a ModR/M encoding as a register index, with REX.B added in.
2517 *
2518 * For use during decoding.
2519 */
2520#define IEM_GET_MODRM_RM(a_pVCpu, a_bRm) ( ((a_bRm) & X86_MODRM_RM_MASK) | (a_pVCpu)->iem.s.uRexB )
2521
2522/**
2523 * Gets the register (reg) part of a ModR/M encoding, without REX.R.
2524 *
2525 * For use during decoding.
2526 */
2527#define IEM_GET_MODRM_REG_8(a_bRm) ( (((a_bRm) >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) )
2528/**
2529 * Gets the r/m part of a ModR/M encoding as a register index, without REX.B.
2530 *
2531 * For use during decoding.
2532 */
2533#define IEM_GET_MODRM_RM_8(a_bRm) ( ((a_bRm) & X86_MODRM_RM_MASK) )
2534
2535/**
2536 * Gets the register (reg) part of a ModR/M encoding as an extended 8-bit
2537 * register index, with REX.R added in.
2538 *
2539 * For use during decoding.
2540 *
2541 * @see iemGRegRefU8Ex, iemGRegFetchU8Ex, iemGRegStoreU8Ex
2542 */
2543#define IEM_GET_MODRM_REG_EX8(a_pVCpu, a_bRm) \
2544 ( (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX) \
2545 || !((a_bRm) & (4 << X86_MODRM_REG_SHIFT)) /* IEM_GET_MODRM_REG(pVCpu, a_bRm) < 4 */ \
2546 ? IEM_GET_MODRM_REG(pVCpu, a_bRm) : (((a_bRm) >> X86_MODRM_REG_SHIFT) & 3) | 16)
2547/**
2548 * Gets the r/m part of a ModR/M encoding as an extended 8-bit register index,
2549 * with REX.B added in.
2550 *
2551 * For use during decoding.
2552 *
2553 * @see iemGRegRefU8Ex, iemGRegFetchU8Ex, iemGRegStoreU8Ex
2554 */
2555#define IEM_GET_MODRM_RM_EX8(a_pVCpu, a_bRm) \
2556 ( (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX) \
2557 || !((a_bRm) & 4) /* IEM_GET_MODRM_RM(pVCpu, a_bRm) < 4 */ \
2558 ? IEM_GET_MODRM_RM(pVCpu, a_bRm) : ((a_bRm) & 3) | 16)
2559
2560/**
2561 * Combines the prefix REX and ModR/M byte for passing to
2562 * iemOpHlpCalcRmEffAddrThreadedAddr64().
2563 *
2564 * @returns The ModRM byte but with bit 3 set to REX.B and bit 4 to REX.X.
2565 * The two bits are part of the REG sub-field, which isn't needed in
2566 * iemOpHlpCalcRmEffAddrThreadedAddr64().
2567 *
2568 * For use during decoding/recompiling.
2569 */
2570#define IEM_GET_MODRM_EX(a_pVCpu, a_bRm) \
2571 ( ((a_bRm) & ~X86_MODRM_REG_MASK) \
2572 | (uint8_t)( (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X)) >> (25 - 3) ) )
2573AssertCompile(IEM_OP_PRF_REX_B == RT_BIT_32(25));
2574AssertCompile(IEM_OP_PRF_REX_X == RT_BIT_32(26));
2575
2576/**
2577 * Gets the effective VEX.VVVV value.
2578 *
2579 * The 4th bit is ignored if not 64-bit code.
2580 * @returns effective V-register value.
2581 * @param a_pVCpu The cross context virtual CPU structure of the calling thread.
2582 */
2583#define IEM_GET_EFFECTIVE_VVVV(a_pVCpu) \
2584 (IEM_IS_64BIT_CODE(a_pVCpu) ? (a_pVCpu)->iem.s.uVex3rdReg : (a_pVCpu)->iem.s.uVex3rdReg & 7)
2585
2586
2587/**
2588 * Gets the register (reg) part of a the special 4th register byte used by
2589 * vblendvps and vblendvpd.
2590 *
2591 * For use during decoding.
2592 */
2593#define IEM_GET_IMM8_REG(a_pVCpu, a_bRegImm8) \
2594 (IEM_IS_64BIT_CODE(a_pVCpu) ? (a_bRegImm8) >> 4 : ((a_bRegImm8) >> 4) & 7)
2595
2596
2597/**
2598 * Checks if we're executing inside an AMD-V or VT-x guest.
2599 */
2600#if defined(VBOX_WITH_NESTED_HWVIRT_VMX) || defined(VBOX_WITH_NESTED_HWVIRT_SVM)
2601# define IEM_IS_IN_GUEST(a_pVCpu) RT_BOOL((a_pVCpu)->iem.s.fExec & IEM_F_X86_CTX_IN_GUEST)
2602#else
2603# define IEM_IS_IN_GUEST(a_pVCpu) false
2604#endif
2605
2606
2607#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
2608
2609/**
2610 * Check if the guest has entered VMX root operation.
2611 */
2612# define IEM_VMX_IS_ROOT_MODE(a_pVCpu) (CPUMIsGuestInVmxRootMode(IEM_GET_CTX(a_pVCpu)))
2613
2614/**
2615 * Check if the guest has entered VMX non-root operation.
2616 */
2617# define IEM_VMX_IS_NON_ROOT_MODE(a_pVCpu) ( ((a_pVCpu)->iem.s.fExec & (IEM_F_X86_CTX_VMX | IEM_F_X86_CTX_IN_GUEST)) \
2618 == (IEM_F_X86_CTX_VMX | IEM_F_X86_CTX_IN_GUEST) )
2619
2620/**
2621 * Check if the nested-guest has the given Pin-based VM-execution control set.
2622 */
2623# define IEM_VMX_IS_PINCTLS_SET(a_pVCpu, a_PinCtl) (CPUMIsGuestVmxPinCtlsSet(IEM_GET_CTX(a_pVCpu), (a_PinCtl)))
2624
2625/**
2626 * Check if the nested-guest has the given Processor-based VM-execution control set.
2627 */
2628# define IEM_VMX_IS_PROCCTLS_SET(a_pVCpu, a_ProcCtl) (CPUMIsGuestVmxProcCtlsSet(IEM_GET_CTX(a_pVCpu), (a_ProcCtl)))
2629
2630/**
2631 * Check if the nested-guest has the given Secondary Processor-based VM-execution
2632 * control set.
2633 */
2634# define IEM_VMX_IS_PROCCTLS2_SET(a_pVCpu, a_ProcCtl2) (CPUMIsGuestVmxProcCtls2Set(IEM_GET_CTX(a_pVCpu), (a_ProcCtl2)))
2635
2636/** Gets the guest-physical address of the shadows VMCS for the given VCPU. */
2637# define IEM_VMX_GET_SHADOW_VMCS(a_pVCpu) ((a_pVCpu)->cpum.GstCtx.hwvirt.vmx.GCPhysShadowVmcs)
2638
2639/** Whether a shadow VMCS is present for the given VCPU. */
2640# define IEM_VMX_HAS_SHADOW_VMCS(a_pVCpu) RT_BOOL(IEM_VMX_GET_SHADOW_VMCS(a_pVCpu) != NIL_RTGCPHYS)
2641
2642/** Gets the VMXON region pointer. */
2643# define IEM_VMX_GET_VMXON_PTR(a_pVCpu) ((a_pVCpu)->cpum.GstCtx.hwvirt.vmx.GCPhysVmxon)
2644
2645/** Gets the guest-physical address of the current VMCS for the given VCPU. */
2646# define IEM_VMX_GET_CURRENT_VMCS(a_pVCpu) ((a_pVCpu)->cpum.GstCtx.hwvirt.vmx.GCPhysVmcs)
2647
2648/** Whether a current VMCS is present for the given VCPU. */
2649# define IEM_VMX_HAS_CURRENT_VMCS(a_pVCpu) RT_BOOL(IEM_VMX_GET_CURRENT_VMCS(a_pVCpu) != NIL_RTGCPHYS)
2650
2651/** Assigns the guest-physical address of the current VMCS for the given VCPU. */
2652# define IEM_VMX_SET_CURRENT_VMCS(a_pVCpu, a_GCPhysVmcs) \
2653 do \
2654 { \
2655 Assert((a_GCPhysVmcs) != NIL_RTGCPHYS); \
2656 (a_pVCpu)->cpum.GstCtx.hwvirt.vmx.GCPhysVmcs = (a_GCPhysVmcs); \
2657 } while (0)
2658
2659/** Clears any current VMCS for the given VCPU. */
2660# define IEM_VMX_CLEAR_CURRENT_VMCS(a_pVCpu) \
2661 do \
2662 { \
2663 (a_pVCpu)->cpum.GstCtx.hwvirt.vmx.GCPhysVmcs = NIL_RTGCPHYS; \
2664 } while (0)
2665
2666/**
2667 * Invokes the VMX VM-exit handler for an instruction intercept.
2668 */
2669# define IEM_VMX_VMEXIT_INSTR_RET(a_pVCpu, a_uExitReason, a_cbInstr) \
2670 do { return iemVmxVmexitInstr((a_pVCpu), (a_uExitReason), (a_cbInstr)); } while (0)
2671
2672/**
2673 * Invokes the VMX VM-exit handler for an instruction intercept where the
2674 * instruction provides additional VM-exit information.
2675 */
2676# define IEM_VMX_VMEXIT_INSTR_NEEDS_INFO_RET(a_pVCpu, a_uExitReason, a_uInstrId, a_cbInstr) \
2677 do { return iemVmxVmexitInstrNeedsInfo((a_pVCpu), (a_uExitReason), (a_uInstrId), (a_cbInstr)); } while (0)
2678
2679/**
2680 * Invokes the VMX VM-exit handler for a task switch.
2681 */
2682# define IEM_VMX_VMEXIT_TASK_SWITCH_RET(a_pVCpu, a_enmTaskSwitch, a_SelNewTss, a_cbInstr) \
2683 do { return iemVmxVmexitTaskSwitch((a_pVCpu), (a_enmTaskSwitch), (a_SelNewTss), (a_cbInstr)); } while (0)
2684
2685/**
2686 * Invokes the VMX VM-exit handler for MWAIT.
2687 */
2688# define IEM_VMX_VMEXIT_MWAIT_RET(a_pVCpu, a_fMonitorArmed, a_cbInstr) \
2689 do { return iemVmxVmexitInstrMwait((a_pVCpu), (a_fMonitorArmed), (a_cbInstr)); } while (0)
2690
2691/**
2692 * Invokes the VMX VM-exit handler for EPT faults.
2693 */
2694# define IEM_VMX_VMEXIT_EPT_RET(a_pVCpu, a_pPtWalk, a_fAccess, a_fSlatFail, a_cbInstr) \
2695 do { return iemVmxVmexitEpt(a_pVCpu, a_pPtWalk, a_fAccess, a_fSlatFail, a_cbInstr); } while (0)
2696
2697/**
2698 * Invokes the VMX VM-exit handler.
2699 */
2700# define IEM_VMX_VMEXIT_TRIPLE_FAULT_RET(a_pVCpu, a_uExitReason, a_uExitQual) \
2701 do { return iemVmxVmexit((a_pVCpu), (a_uExitReason), (a_uExitQual)); } while (0)
2702
2703#else
2704# define IEM_VMX_IS_ROOT_MODE(a_pVCpu) (false)
2705# define IEM_VMX_IS_NON_ROOT_MODE(a_pVCpu) (false)
2706# define IEM_VMX_IS_PINCTLS_SET(a_pVCpu, a_cbInstr) (false)
2707# define IEM_VMX_IS_PROCCTLS_SET(a_pVCpu, a_cbInstr) (false)
2708# define IEM_VMX_IS_PROCCTLS2_SET(a_pVCpu, a_cbInstr) (false)
2709# define IEM_VMX_VMEXIT_INSTR_RET(a_pVCpu, a_uExitReason, a_cbInstr) do { return VERR_VMX_IPE_1; } while (0)
2710# define IEM_VMX_VMEXIT_INSTR_NEEDS_INFO_RET(a_pVCpu, a_uExitReason, a_uInstrId, a_cbInstr) do { return VERR_VMX_IPE_1; } while (0)
2711# define IEM_VMX_VMEXIT_TASK_SWITCH_RET(a_pVCpu, a_enmTaskSwitch, a_SelNewTss, a_cbInstr) do { return VERR_VMX_IPE_1; } while (0)
2712# define IEM_VMX_VMEXIT_MWAIT_RET(a_pVCpu, a_fMonitorArmed, a_cbInstr) do { return VERR_VMX_IPE_1; } while (0)
2713# define IEM_VMX_VMEXIT_EPT_RET(a_pVCpu, a_pPtWalk, a_fAccess, a_fSlatFail, a_cbInstr) do { return VERR_VMX_IPE_1; } while (0)
2714# define IEM_VMX_VMEXIT_TRIPLE_FAULT_RET(a_pVCpu, a_uExitReason, a_uExitQual) do { return VERR_VMX_IPE_1; } while (0)
2715
2716#endif
2717
2718#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
2719/**
2720 * Checks if we're executing a guest using AMD-V.
2721 */
2722# define IEM_SVM_IS_IN_GUEST(a_pVCpu) ( (a_pVCpu->iem.s.fExec & (IEM_F_X86_CTX_SVM | IEM_F_X86_CTX_IN_GUEST)) \
2723 == (IEM_F_X86_CTX_SVM | IEM_F_X86_CTX_IN_GUEST))
2724/**
2725 * Check if an SVM control/instruction intercept is set.
2726 */
2727# define IEM_SVM_IS_CTRL_INTERCEPT_SET(a_pVCpu, a_Intercept) \
2728 (IEM_SVM_IS_IN_GUEST(a_pVCpu) && CPUMIsGuestSvmCtrlInterceptSet(a_pVCpu, IEM_GET_CTX(a_pVCpu), (a_Intercept)))
2729
2730/**
2731 * Check if an SVM read CRx intercept is set.
2732 */
2733# define IEM_SVM_IS_READ_CR_INTERCEPT_SET(a_pVCpu, a_uCr) \
2734 (IEM_SVM_IS_IN_GUEST(a_pVCpu) && CPUMIsGuestSvmReadCRxInterceptSet(a_pVCpu, IEM_GET_CTX(a_pVCpu), (a_uCr)))
2735
2736/**
2737 * Check if an SVM write CRx intercept is set.
2738 */
2739# define IEM_SVM_IS_WRITE_CR_INTERCEPT_SET(a_pVCpu, a_uCr) \
2740 (IEM_SVM_IS_IN_GUEST(a_pVCpu) && CPUMIsGuestSvmWriteCRxInterceptSet(a_pVCpu, IEM_GET_CTX(a_pVCpu), (a_uCr)))
2741
2742/**
2743 * Check if an SVM read DRx intercept is set.
2744 */
2745# define IEM_SVM_IS_READ_DR_INTERCEPT_SET(a_pVCpu, a_uDr) \
2746 (IEM_SVM_IS_IN_GUEST(a_pVCpu) && CPUMIsGuestSvmReadDRxInterceptSet(a_pVCpu, IEM_GET_CTX(a_pVCpu), (a_uDr)))
2747
2748/**
2749 * Check if an SVM write DRx intercept is set.
2750 */
2751# define IEM_SVM_IS_WRITE_DR_INTERCEPT_SET(a_pVCpu, a_uDr) \
2752 (IEM_SVM_IS_IN_GUEST(a_pVCpu) && CPUMIsGuestSvmWriteDRxInterceptSet(a_pVCpu, IEM_GET_CTX(a_pVCpu), (a_uDr)))
2753
2754/**
2755 * Check if an SVM exception intercept is set.
2756 */
2757# define IEM_SVM_IS_XCPT_INTERCEPT_SET(a_pVCpu, a_uVector) \
2758 (IEM_SVM_IS_IN_GUEST(a_pVCpu) && CPUMIsGuestSvmXcptInterceptSet(a_pVCpu, IEM_GET_CTX(a_pVCpu), (a_uVector)))
2759
2760/**
2761 * Invokes the SVM \#VMEXIT handler for the nested-guest.
2762 */
2763# define IEM_SVM_VMEXIT_RET(a_pVCpu, a_uExitCode, a_uExitInfo1, a_uExitInfo2) \
2764 do { return iemSvmVmexit((a_pVCpu), (a_uExitCode), (a_uExitInfo1), (a_uExitInfo2)); } while (0)
2765
2766/**
2767 * Invokes the 'MOV CRx' SVM \#VMEXIT handler after constructing the
2768 * corresponding decode assist information.
2769 */
2770# define IEM_SVM_CRX_VMEXIT_RET(a_pVCpu, a_uExitCode, a_enmAccessCrX, a_iGReg) \
2771 do \
2772 { \
2773 uint64_t uExitInfo1; \
2774 if ( IEM_GET_GUEST_CPU_FEATURES(a_pVCpu)->fSvmDecodeAssists \
2775 && (a_enmAccessCrX) == IEMACCESSCRX_MOV_CRX) \
2776 uExitInfo1 = SVM_EXIT1_MOV_CRX_MASK | ((a_iGReg) & 7); \
2777 else \
2778 uExitInfo1 = 0; \
2779 IEM_SVM_VMEXIT_RET(a_pVCpu, a_uExitCode, uExitInfo1, 0); \
2780 } while (0)
2781
2782/** Check and handles SVM nested-guest instruction intercept and updates
2783 * NRIP if needed.
2784 */
2785# define IEM_SVM_CHECK_INSTR_INTERCEPT(a_pVCpu, a_Intercept, a_uExitCode, a_uExitInfo1, a_uExitInfo2, a_cbInstr) \
2786 do \
2787 { \
2788 if (IEM_SVM_IS_CTRL_INTERCEPT_SET(a_pVCpu, a_Intercept)) \
2789 { \
2790 IEM_SVM_UPDATE_NRIP(a_pVCpu, a_cbInstr); \
2791 IEM_SVM_VMEXIT_RET(a_pVCpu, a_uExitCode, a_uExitInfo1, a_uExitInfo2); \
2792 } \
2793 } while (0)
2794
2795/** Checks and handles SVM nested-guest CR0 read intercept. */
2796# define IEM_SVM_CHECK_READ_CR0_INTERCEPT(a_pVCpu, a_uExitInfo1, a_uExitInfo2, a_cbInstr) \
2797 do \
2798 { \
2799 if (!IEM_SVM_IS_READ_CR_INTERCEPT_SET(a_pVCpu, 0)) \
2800 { /* probably likely */ } \
2801 else \
2802 { \
2803 IEM_SVM_UPDATE_NRIP(a_pVCpu, a_cbInstr); \
2804 IEM_SVM_VMEXIT_RET(a_pVCpu, SVM_EXIT_READ_CR0, a_uExitInfo1, a_uExitInfo2); \
2805 } \
2806 } while (0)
2807
2808/**
2809 * Updates the NextRIP (NRI) field in the nested-guest VMCB.
2810 */
2811# define IEM_SVM_UPDATE_NRIP(a_pVCpu, a_cbInstr) \
2812 do { \
2813 if (IEM_GET_GUEST_CPU_FEATURES(a_pVCpu)->fSvmNextRipSave) \
2814 CPUMGuestSvmUpdateNRip(a_pVCpu, IEM_GET_CTX(a_pVCpu), (a_cbInstr)); \
2815 } while (0)
2816
2817#else
2818# define IEM_SVM_IS_CTRL_INTERCEPT_SET(a_pVCpu, a_Intercept) (false)
2819# define IEM_SVM_IS_READ_CR_INTERCEPT_SET(a_pVCpu, a_uCr) (false)
2820# define IEM_SVM_IS_WRITE_CR_INTERCEPT_SET(a_pVCpu, a_uCr) (false)
2821# define IEM_SVM_IS_READ_DR_INTERCEPT_SET(a_pVCpu, a_uDr) (false)
2822# define IEM_SVM_IS_WRITE_DR_INTERCEPT_SET(a_pVCpu, a_uDr) (false)
2823# define IEM_SVM_IS_XCPT_INTERCEPT_SET(a_pVCpu, a_uVector) (false)
2824# define IEM_SVM_VMEXIT_RET(a_pVCpu, a_uExitCode, a_uExitInfo1, a_uExitInfo2) do { return VERR_SVM_IPE_1; } while (0)
2825# define IEM_SVM_CRX_VMEXIT_RET(a_pVCpu, a_uExitCode, a_enmAccessCrX, a_iGReg) do { return VERR_SVM_IPE_1; } while (0)
2826# define IEM_SVM_CHECK_INSTR_INTERCEPT(a_pVCpu, a_Intercept, a_uExitCode, \
2827 a_uExitInfo1, a_uExitInfo2, a_cbInstr) do { } while (0)
2828# define IEM_SVM_CHECK_READ_CR0_INTERCEPT(a_pVCpu, a_uExitInfo1, a_uExitInfo2, a_cbInstr) do { } while (0)
2829# define IEM_SVM_UPDATE_NRIP(a_pVCpu, a_cbInstr) do { } while (0)
2830
2831#endif
2832
2833/** @} */
2834
2835
2836/**
2837 * Selector descriptor table entry as fetched by iemMemFetchSelDesc.
2838 */
2839typedef union IEMSELDESC
2840{
2841 /** The legacy view. */
2842 X86DESC Legacy;
2843 /** The long mode view. */
2844 X86DESC64 Long;
2845} IEMSELDESC;
2846/** Pointer to a selector descriptor table entry. */
2847typedef IEMSELDESC *PIEMSELDESC;
2848
2849/** @name Raising Exceptions.
2850 * @{ */
2851VBOXSTRICTRC iemTaskSwitch(PVMCPUCC pVCpu, IEMTASKSWITCH enmTaskSwitch, uint32_t uNextEip, uint32_t fFlags,
2852 uint16_t uErr, uint64_t uCr2, RTSEL SelTSS, PIEMSELDESC pNewDescTSS) RT_NOEXCEPT;
2853
2854VBOXSTRICTRC iemRaiseXcptOrInt(PVMCPUCC pVCpu, uint8_t cbInstr, uint8_t u8Vector, uint32_t fFlags,
2855 uint16_t uErr, uint64_t uCr2) RT_NOEXCEPT;
2856DECL_NO_RETURN(void) iemRaiseXcptOrIntJmp(PVMCPUCC pVCpu, uint8_t cbInstr, uint8_t u8Vector,
2857 uint32_t fFlags, uint16_t uErr, uint64_t uCr2) IEM_NOEXCEPT_MAY_LONGJMP;
2858VBOXSTRICTRC iemRaiseDivideError(PVMCPUCC pVCpu) RT_NOEXCEPT;
2859DECL_NO_RETURN(void) iemRaiseDivideErrorJmp(PVMCPUCC pVCpu) IEM_NOEXCEPT_MAY_LONGJMP;
2860VBOXSTRICTRC iemRaiseDebugException(PVMCPUCC pVCpu) RT_NOEXCEPT;
2861VBOXSTRICTRC iemRaiseBoundRangeExceeded(PVMCPUCC pVCpu) RT_NOEXCEPT;
2862VBOXSTRICTRC iemRaiseUndefinedOpcode(PVMCPUCC pVCpu) RT_NOEXCEPT;
2863DECL_NO_RETURN(void) iemRaiseUndefinedOpcodeJmp(PVMCPUCC pVCpu) IEM_NOEXCEPT_MAY_LONGJMP;
2864VBOXSTRICTRC iemRaiseDeviceNotAvailable(PVMCPUCC pVCpu) RT_NOEXCEPT;
2865DECL_NO_RETURN(void) iemRaiseDeviceNotAvailableJmp(PVMCPUCC pVCpu) IEM_NOEXCEPT_MAY_LONGJMP;
2866VBOXSTRICTRC iemRaiseTaskSwitchFaultWithErr(PVMCPUCC pVCpu, uint16_t uErr) RT_NOEXCEPT;
2867VBOXSTRICTRC iemRaiseTaskSwitchFaultCurrentTSS(PVMCPUCC pVCpu) RT_NOEXCEPT;
2868VBOXSTRICTRC iemRaiseTaskSwitchFault0(PVMCPUCC pVCpu) RT_NOEXCEPT;
2869VBOXSTRICTRC iemRaiseTaskSwitchFaultBySelector(PVMCPUCC pVCpu, uint16_t uSel) RT_NOEXCEPT;
2870/*VBOXSTRICTRC iemRaiseSelectorNotPresent(PVMCPUCC pVCpu, uint32_t iSegReg, uint32_t fAccess) RT_NOEXCEPT;*/
2871VBOXSTRICTRC iemRaiseSelectorNotPresentWithErr(PVMCPUCC pVCpu, uint16_t uErr) RT_NOEXCEPT;
2872VBOXSTRICTRC iemRaiseSelectorNotPresentBySelector(PVMCPUCC pVCpu, uint16_t uSel) RT_NOEXCEPT;
2873VBOXSTRICTRC iemRaiseStackSelectorNotPresentBySelector(PVMCPUCC pVCpu, uint16_t uSel) RT_NOEXCEPT;
2874VBOXSTRICTRC iemRaiseStackSelectorNotPresentWithErr(PVMCPUCC pVCpu, uint16_t uErr) RT_NOEXCEPT;
2875VBOXSTRICTRC iemRaiseGeneralProtectionFault(PVMCPUCC pVCpu, uint16_t uErr) RT_NOEXCEPT;
2876VBOXSTRICTRC iemRaiseGeneralProtectionFault0(PVMCPUCC pVCpu) RT_NOEXCEPT;
2877DECL_NO_RETURN(void) iemRaiseGeneralProtectionFault0Jmp(PVMCPUCC pVCpu) IEM_NOEXCEPT_MAY_LONGJMP;
2878VBOXSTRICTRC iemRaiseGeneralProtectionFaultBySelector(PVMCPUCC pVCpu, RTSEL Sel) RT_NOEXCEPT;
2879VBOXSTRICTRC iemRaiseNotCanonical(PVMCPUCC pVCpu) RT_NOEXCEPT;
2880VBOXSTRICTRC iemRaiseSelectorBounds(PVMCPUCC pVCpu, uint32_t iSegReg, uint32_t fAccess) RT_NOEXCEPT;
2881DECL_NO_RETURN(void) iemRaiseSelectorBoundsJmp(PVMCPUCC pVCpu, uint32_t iSegReg, uint32_t fAccess) IEM_NOEXCEPT_MAY_LONGJMP;
2882VBOXSTRICTRC iemRaiseSelectorBoundsBySelector(PVMCPUCC pVCpu, RTSEL Sel) RT_NOEXCEPT;
2883DECL_NO_RETURN(void) iemRaiseSelectorBoundsBySelectorJmp(PVMCPUCC pVCpu, RTSEL Sel) IEM_NOEXCEPT_MAY_LONGJMP;
2884VBOXSTRICTRC iemRaiseSelectorInvalidAccess(PVMCPUCC pVCpu, uint32_t iSegReg, uint32_t fAccess) RT_NOEXCEPT;
2885DECL_NO_RETURN(void) iemRaiseSelectorInvalidAccessJmp(PVMCPUCC pVCpu, uint32_t iSegReg, uint32_t fAccess) IEM_NOEXCEPT_MAY_LONGJMP;
2886VBOXSTRICTRC iemRaisePageFault(PVMCPUCC pVCpu, RTGCPTR GCPtrWhere, uint32_t cbAccess, uint32_t fAccess, int rc) RT_NOEXCEPT;
2887DECL_NO_RETURN(void) iemRaisePageFaultJmp(PVMCPUCC pVCpu, RTGCPTR GCPtrWhere, uint32_t cbAccess, uint32_t fAccess, int rc) IEM_NOEXCEPT_MAY_LONGJMP;
2888VBOXSTRICTRC iemRaiseMathFault(PVMCPUCC pVCpu) RT_NOEXCEPT;
2889DECL_NO_RETURN(void) iemRaiseMathFaultJmp(PVMCPUCC pVCpu) IEM_NOEXCEPT_MAY_LONGJMP;
2890VBOXSTRICTRC iemRaiseAlignmentCheckException(PVMCPUCC pVCpu) RT_NOEXCEPT;
2891DECL_NO_RETURN(void) iemRaiseAlignmentCheckExceptionJmp(PVMCPUCC pVCpu) IEM_NOEXCEPT_MAY_LONGJMP;
2892VBOXSTRICTRC iemRaiseSimdFpException(PVMCPUCC pVCpu) RT_NOEXCEPT;
2893DECL_NO_RETURN(void) iemRaiseSimdFpExceptionJmp(PVMCPUCC pVCpu) IEM_NOEXCEPT_MAY_LONGJMP;
2894
2895void iemLogSyscallRealModeInt(PVMCPUCC pVCpu, uint8_t u8Vector, uint8_t cbInstr);
2896void iemLogSyscallProtModeInt(PVMCPUCC pVCpu, uint8_t u8Vector, uint8_t cbInstr);
2897
2898IEM_CIMPL_PROTO_0(iemCImplRaiseDivideError);
2899IEM_CIMPL_PROTO_0(iemCImplRaiseInvalidLockPrefix);
2900IEM_CIMPL_PROTO_0(iemCImplRaiseInvalidOpcode);
2901
2902/**
2903 * Macro for calling iemCImplRaiseDivideError().
2904 *
2905 * This is for things that will _always_ decode to an \#DE, taking the
2906 * recompiler into consideration and everything.
2907 *
2908 * @return Strict VBox status code.
2909 */
2910#define IEMOP_RAISE_DIVIDE_ERROR_RET() IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_XCPT, 0, iemCImplRaiseDivideError)
2911
2912/**
2913 * Macro for calling iemCImplRaiseInvalidLockPrefix().
2914 *
2915 * This is for things that will _always_ decode to an \#UD, taking the
2916 * recompiler into consideration and everything.
2917 *
2918 * @return Strict VBox status code.
2919 */
2920#define IEMOP_RAISE_INVALID_LOCK_PREFIX_RET() IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_XCPT, 0, iemCImplRaiseInvalidLockPrefix)
2921
2922/**
2923 * Macro for calling iemCImplRaiseInvalidOpcode() for decode/static \#UDs.
2924 *
2925 * This is for things that will _always_ decode to an \#UD, taking the
2926 * recompiler into consideration and everything.
2927 *
2928 * @return Strict VBox status code.
2929 */
2930#define IEMOP_RAISE_INVALID_OPCODE_RET() IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_XCPT, 0, iemCImplRaiseInvalidOpcode)
2931
2932/**
2933 * Macro for calling iemCImplRaiseInvalidOpcode() for runtime-style \#UDs.
2934 *
2935 * Using this macro means you've got _buggy_ _code_ and are doing things that
2936 * belongs exclusively in IEMAllCImpl.cpp during decoding.
2937 *
2938 * @return Strict VBox status code.
2939 * @see IEMOP_RAISE_INVALID_OPCODE_RET
2940 */
2941#define IEMOP_RAISE_INVALID_OPCODE_RUNTIME_RET() IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_XCPT, 0, iemCImplRaiseInvalidOpcode)
2942
2943/** @} */
2944
2945/** @name Register Access.
2946 * @{ */
2947VBOXSTRICTRC iemRegRipRelativeJumpS8AndFinishClearingRF(PVMCPUCC pVCpu, uint8_t cbInstr, int8_t offNextInstr,
2948 IEMMODE enmEffOpSize) RT_NOEXCEPT;
2949VBOXSTRICTRC iemRegRipRelativeJumpS16AndFinishClearingRF(PVMCPUCC pVCpu, uint8_t cbInstr, int16_t offNextInstr) RT_NOEXCEPT;
2950VBOXSTRICTRC iemRegRipRelativeJumpS32AndFinishClearingRF(PVMCPUCC pVCpu, uint8_t cbInstr, int32_t offNextInstr,
2951 IEMMODE enmEffOpSize) RT_NOEXCEPT;
2952/** @} */
2953
2954/** @name FPU access and helpers.
2955 * @{ */
2956void iemFpuPushResult(PVMCPUCC pVCpu, PIEMFPURESULT pResult, uint16_t uFpuOpcode) RT_NOEXCEPT;
2957void iemFpuPushResultWithMemOp(PVMCPUCC pVCpu, PIEMFPURESULT pResult, uint8_t iEffSeg, RTGCPTR GCPtrEff, uint16_t uFpuOpcode) RT_NOEXCEPT;
2958void iemFpuPushResultTwo(PVMCPUCC pVCpu, PIEMFPURESULTTWO pResult, uint16_t uFpuOpcode) RT_NOEXCEPT;
2959void iemFpuStoreResult(PVMCPUCC pVCpu, PIEMFPURESULT pResult, uint8_t iStReg, uint16_t uFpuOpcode) RT_NOEXCEPT;
2960void iemFpuStoreResultThenPop(PVMCPUCC pVCpu, PIEMFPURESULT pResult, uint8_t iStReg, uint16_t uFpuOpcode) RT_NOEXCEPT;
2961void iemFpuStoreResultWithMemOp(PVMCPUCC pVCpu, PIEMFPURESULT pResult, uint8_t iStReg,
2962 uint8_t iEffSeg, RTGCPTR GCPtrEff, uint16_t uFpuOpcode) RT_NOEXCEPT;
2963void iemFpuStoreResultWithMemOpThenPop(PVMCPUCC pVCpu, PIEMFPURESULT pResult, uint8_t iStReg,
2964 uint8_t iEffSeg, RTGCPTR GCPtrEff, uint16_t uFpuOpcode) RT_NOEXCEPT;
2965void iemFpuUpdateOpcodeAndIp(PVMCPUCC pVCpu, uint16_t uFpuOpcode) RT_NOEXCEPT;
2966void iemFpuUpdateFSW(PVMCPUCC pVCpu, uint16_t u16FSW, uint16_t uFpuOpcode) RT_NOEXCEPT;
2967void iemFpuUpdateFSWThenPop(PVMCPUCC pVCpu, uint16_t u16FSW, uint16_t uFpuOpcode) RT_NOEXCEPT;
2968void iemFpuUpdateFSWWithMemOp(PVMCPUCC pVCpu, uint16_t u16FSW, uint8_t iEffSeg, RTGCPTR GCPtrEff, uint16_t uFpuOpcode) RT_NOEXCEPT;
2969void iemFpuUpdateFSWThenPopPop(PVMCPUCC pVCpu, uint16_t u16FSW, uint16_t uFpuOpcode) RT_NOEXCEPT;
2970void iemFpuUpdateFSWWithMemOpThenPop(PVMCPUCC pVCpu, uint16_t u16FSW, uint8_t iEffSeg, RTGCPTR GCPtrEff, uint16_t uFpuOpcode) RT_NOEXCEPT;
2971void iemFpuStackUnderflow(PVMCPUCC pVCpu, uint8_t iStReg, uint16_t uFpuOpcode) RT_NOEXCEPT;
2972void iemFpuStackUnderflowWithMemOp(PVMCPUCC pVCpu, uint8_t iStReg, uint8_t iEffSeg, RTGCPTR GCPtrEff, uint16_t uFpuOpcode) RT_NOEXCEPT;
2973void iemFpuStackUnderflowThenPop(PVMCPUCC pVCpu, uint8_t iStReg, uint16_t uFpuOpcode) RT_NOEXCEPT;
2974void iemFpuStackUnderflowWithMemOpThenPop(PVMCPUCC pVCpu, uint8_t iStReg, uint8_t iEffSeg, RTGCPTR GCPtrEff, uint16_t uFpuOpcode) RT_NOEXCEPT;
2975void iemFpuStackUnderflowThenPopPop(PVMCPUCC pVCpu, uint16_t uFpuOpcode) RT_NOEXCEPT;
2976void iemFpuStackPushUnderflow(PVMCPUCC pVCpu, uint16_t uFpuOpcode) RT_NOEXCEPT;
2977void iemFpuStackPushUnderflowTwo(PVMCPUCC pVCpu, uint16_t uFpuOpcode) RT_NOEXCEPT;
2978void iemFpuStackPushOverflow(PVMCPUCC pVCpu, uint16_t uFpuOpcode) RT_NOEXCEPT;
2979void iemFpuStackPushOverflowWithMemOp(PVMCPUCC pVCpu, uint8_t iEffSeg, RTGCPTR GCPtrEff, uint16_t uFpuOpcode) RT_NOEXCEPT;
2980/** @} */
2981
2982/** @name SSE+AVX SIMD access and helpers.
2983 * @{ */
2984void iemSseUpdateMxcsr(PVMCPUCC pVCpu, uint32_t fMxcsr) RT_NOEXCEPT;
2985/** @} */
2986
2987/** @name Memory access.
2988 * @{ */
2989
2990/** Report a \#GP instead of \#AC and do not restrict to ring-3 */
2991#define IEM_MEMMAP_F_ALIGN_GP RT_BIT_32(16)
2992/** SSE access that should report a \#GP instead of \#AC, unless MXCSR.MM=1
2993 * when it works like normal \#AC. Always used with IEM_MEMMAP_F_ALIGN_GP. */
2994#define IEM_MEMMAP_F_ALIGN_SSE RT_BIT_32(17)
2995/** If \#AC is applicable, raise it. Always used with IEM_MEMMAP_F_ALIGN_GP.
2996 * Users include FXSAVE & FXRSTOR. */
2997#define IEM_MEMMAP_F_ALIGN_GP_OR_AC RT_BIT_32(18)
2998
2999VBOXSTRICTRC iemMemMap(PVMCPUCC pVCpu, void **ppvMem, uint8_t *pbUnmapInfo, size_t cbMem, uint8_t iSegReg, RTGCPTR GCPtrMem,
3000 uint32_t fAccess, uint32_t uAlignCtl) RT_NOEXCEPT;
3001#ifndef IN_RING3
3002VBOXSTRICTRC iemMemCommitAndUnmapPostponeTroubleToR3(PVMCPUCC pVCpu, uint8_t bUnmapInfo) RT_NOEXCEPT;
3003#endif
3004VBOXSTRICTRC iemMemApplySegment(PVMCPUCC pVCpu, uint32_t fAccess, uint8_t iSegReg, size_t cbMem, PRTGCPTR pGCPtrMem) RT_NOEXCEPT;
3005VBOXSTRICTRC iemMemMarkSelDescAccessed(PVMCPUCC pVCpu, uint16_t uSel) RT_NOEXCEPT;
3006
3007void iemOpcodeFlushLight(PVMCPUCC pVCpu, uint8_t cbInstr);
3008void iemOpcodeFlushHeavy(PVMCPUCC pVCpu, uint8_t cbInstr);
3009#ifdef IEM_WITH_CODE_TLB
3010void iemOpcodeFetchBytesJmp(PVMCPUCC pVCpu, size_t cbDst, void *pvDst) IEM_NOEXCEPT_MAY_LONGJMP;
3011#else
3012VBOXSTRICTRC iemOpcodeFetchMoreBytes(PVMCPUCC pVCpu, size_t cbMin) RT_NOEXCEPT;
3013#endif
3014uint8_t iemOpcodeGetNextU8SlowJmp(PVMCPUCC pVCpu) IEM_NOEXCEPT_MAY_LONGJMP;
3015uint16_t iemOpcodeGetNextU16SlowJmp(PVMCPUCC pVCpu) IEM_NOEXCEPT_MAY_LONGJMP;
3016uint32_t iemOpcodeGetNextU32SlowJmp(PVMCPUCC pVCpu) IEM_NOEXCEPT_MAY_LONGJMP;
3017uint64_t iemOpcodeGetNextU64SlowJmp(PVMCPUCC pVCpu) IEM_NOEXCEPT_MAY_LONGJMP;
3018
3019VBOXSTRICTRC iemMemFetchDataU8(PVMCPUCC pVCpu, uint8_t *pu8Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
3020VBOXSTRICTRC iemMemFetchDataU16(PVMCPUCC pVCpu, uint16_t *pu16Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
3021VBOXSTRICTRC iemMemFetchDataU32(PVMCPUCC pVCpu, uint32_t *pu32Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
3022VBOXSTRICTRC iemMemFetchDataU32NoAc(PVMCPUCC pVCpu, uint32_t *pu32Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
3023VBOXSTRICTRC iemMemFetchDataU32_ZX_U64(PVMCPUCC pVCpu, uint64_t *pu64Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
3024VBOXSTRICTRC iemMemFetchDataU64(PVMCPUCC pVCpu, uint64_t *pu64Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
3025VBOXSTRICTRC iemMemFetchDataU64NoAc(PVMCPUCC pVCpu, uint64_t *pu64Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
3026VBOXSTRICTRC iemMemFetchDataU64AlignedU128(PVMCPUCC pVCpu, uint64_t *pu64Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
3027VBOXSTRICTRC iemMemFetchDataR80(PVMCPUCC pVCpu, PRTFLOAT80U pr80Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
3028VBOXSTRICTRC iemMemFetchDataD80(PVMCPUCC pVCpu, PRTPBCD80U pd80Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
3029VBOXSTRICTRC iemMemFetchDataU128(PVMCPUCC pVCpu, PRTUINT128U pu128Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
3030VBOXSTRICTRC iemMemFetchDataU128NoAc(PVMCPUCC pVCpu, PRTUINT128U pu128Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
3031VBOXSTRICTRC iemMemFetchDataU128AlignedSse(PVMCPUCC pVCpu, PRTUINT128U pu128Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
3032VBOXSTRICTRC iemMemFetchDataU256(PVMCPUCC pVCpu, PRTUINT256U pu256Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
3033VBOXSTRICTRC iemMemFetchDataU256NoAc(PVMCPUCC pVCpu, PRTUINT256U pu256Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
3034VBOXSTRICTRC iemMemFetchDataU256AlignedAvx(PVMCPUCC pVCpu, PRTUINT256U pu256Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
3035VBOXSTRICTRC iemMemFetchDataXdtr(PVMCPUCC pVCpu, uint16_t *pcbLimit, PRTGCPTR pGCPtrBase, uint8_t iSegReg,
3036 RTGCPTR GCPtrMem, IEMMODE enmOpSize) RT_NOEXCEPT;
3037uint8_t iemMemFetchDataU8SafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3038uint16_t iemMemFetchDataU16SafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3039uint32_t iemMemFetchDataU32SafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3040uint32_t iemMemFetchDataU32NoAcSafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3041uint32_t iemMemFlatFetchDataU32SafeJmp(PVMCPUCC pVCpu, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3042uint64_t iemMemFetchDataU64SafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3043uint64_t iemMemFetchDataU64NoAcSafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3044uint64_t iemMemFetchDataU64AlignedU128SafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3045void iemMemFetchDataR80SafeJmp(PVMCPUCC pVCpu, PRTFLOAT80U pr80Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3046void iemMemFetchDataD80SafeJmp(PVMCPUCC pVCpu, PRTPBCD80U pd80Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3047void iemMemFetchDataU128SafeJmp(PVMCPUCC pVCpu, PRTUINT128U pu128Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3048void iemMemFetchDataU128NoAcSafeJmp(PVMCPUCC pVCpu, PRTUINT128U pu128Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3049void iemMemFetchDataU128AlignedSseSafeJmp(PVMCPUCC pVCpu, PRTUINT128U pu128Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3050void iemMemFetchDataU256SafeJmp(PVMCPUCC pVCpu, PRTUINT256U pu256Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3051void iemMemFetchDataU256NoAcSafeJmp(PVMCPUCC pVCpu, PRTUINT256U pu256Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3052void iemMemFetchDataU256AlignedAvxSafeJmp(PVMCPUCC pVCpu, PRTUINT256U pu256Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3053#if 0 /* these are inlined now */
3054uint8_t iemMemFetchDataU8Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3055uint16_t iemMemFetchDataU16Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3056uint32_t iemMemFetchDataU32Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3057uint32_t iemMemFlatFetchDataU32Jmp(PVMCPUCC pVCpu, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3058uint64_t iemMemFetchDataU64Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3059uint64_t iemMemFetchDataU64AlignedU128Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3060void iemMemFetchDataR80Jmp(PVMCPUCC pVCpu, PRTFLOAT80U pr80Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3061void iemMemFetchDataD80Jmp(PVMCPUCC pVCpu, PRTPBCD80U pd80Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3062void iemMemFetchDataU128Jmp(PVMCPUCC pVCpu, PRTUINT128U pu128Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3063void iemMemFetchDataU128NoAcJmp(PVMCPUCC pVCpu, PRTUINT128U pu128Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3064void iemMemFetchDataU128AlignedSseJmp(PVMCPUCC pVCpu, PRTUINT128U pu128Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3065void iemMemFetchDataU256NoAcJmp(PVMCPUCC pVCpu, PRTUINT256U pu256Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3066void iemMemFetchDataU256AlignedAvxJmp(PVMCPUCC pVCpu, PRTUINT256U pu256Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3067#endif
3068void iemMemFetchDataU256Jmp(PVMCPUCC pVCpu, PRTUINT256U pu256Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3069
3070VBOXSTRICTRC iemMemFetchSysU8(PVMCPUCC pVCpu, uint8_t *pu8Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
3071VBOXSTRICTRC iemMemFetchSysU16(PVMCPUCC pVCpu, uint16_t *pu16Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
3072VBOXSTRICTRC iemMemFetchSysU32(PVMCPUCC pVCpu, uint32_t *pu32Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
3073VBOXSTRICTRC iemMemFetchSysU64(PVMCPUCC pVCpu, uint64_t *pu64Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
3074VBOXSTRICTRC iemMemFetchSelDesc(PVMCPUCC pVCpu, PIEMSELDESC pDesc, uint16_t uSel, uint8_t uXcpt) RT_NOEXCEPT;
3075VBOXSTRICTRC iemMemFetchSelDescWithErr(PVMCPUCC pVCpu, PIEMSELDESC pDesc, uint16_t uSel,
3076 uint8_t uXcpt, uint16_t uErrorCode) RT_NOEXCEPT;
3077
3078VBOXSTRICTRC iemMemStoreDataU8(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint8_t u8Value) RT_NOEXCEPT;
3079VBOXSTRICTRC iemMemStoreDataU16(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint16_t u16Value) RT_NOEXCEPT;
3080VBOXSTRICTRC iemMemStoreDataU32(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint32_t u32Value) RT_NOEXCEPT;
3081VBOXSTRICTRC iemMemStoreDataU64(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint64_t u64Value) RT_NOEXCEPT;
3082VBOXSTRICTRC iemMemStoreDataU128(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, RTUINT128U u128Value) RT_NOEXCEPT;
3083VBOXSTRICTRC iemMemStoreDataU128NoAc(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, RTUINT128U u128Value) RT_NOEXCEPT;
3084VBOXSTRICTRC iemMemStoreDataU128AlignedSse(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, RTUINT128U u128Value) RT_NOEXCEPT;
3085VBOXSTRICTRC iemMemStoreDataU256(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTUINT256U pu256Value) RT_NOEXCEPT;
3086VBOXSTRICTRC iemMemStoreDataU256NoAc(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTUINT256U pu256Value) RT_NOEXCEPT;
3087VBOXSTRICTRC iemMemStoreDataU256AlignedAvx(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTUINT256U pu256Value) RT_NOEXCEPT;
3088VBOXSTRICTRC iemMemStoreDataXdtr(PVMCPUCC pVCpu, uint16_t cbLimit, RTGCPTR GCPtrBase, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
3089void iemMemStoreDataU8SafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint8_t u8Value) IEM_NOEXCEPT_MAY_LONGJMP;
3090void iemMemStoreDataU16SafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint16_t u16Value) IEM_NOEXCEPT_MAY_LONGJMP;
3091void iemMemStoreDataU32SafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint32_t u32Value) IEM_NOEXCEPT_MAY_LONGJMP;
3092void iemMemStoreDataU64SafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint64_t u64Value) IEM_NOEXCEPT_MAY_LONGJMP;
3093void iemMemStoreDataU128SafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTUINT128U u128Value) IEM_NOEXCEPT_MAY_LONGJMP;
3094void iemMemStoreDataU128NoAcSafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTUINT128U pu128Value) IEM_NOEXCEPT_MAY_LONGJMP;
3095void iemMemStoreDataU128AlignedSseSafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTUINT128U pu128Value) IEM_NOEXCEPT_MAY_LONGJMP;
3096void iemMemStoreDataU256SafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTUINT256U pu256Value) IEM_NOEXCEPT_MAY_LONGJMP;
3097void iemMemStoreDataU256NoAcSafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTUINT256U pu256Value) IEM_NOEXCEPT_MAY_LONGJMP;
3098void iemMemStoreDataU256AlignedAvxSafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTUINT256U pu256Value) IEM_NOEXCEPT_MAY_LONGJMP;
3099void iemMemStoreDataR80SafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTFLOAT80U pr80Value) IEM_NOEXCEPT_MAY_LONGJMP;
3100void iemMemStoreDataD80SafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTPBCD80U pd80Value) IEM_NOEXCEPT_MAY_LONGJMP;
3101#if 0 /* inlined */
3102void iemMemStoreDataU8Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint8_t u8Value) IEM_NOEXCEPT_MAY_LONGJMP;
3103void iemMemStoreDataU16Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint16_t u16Value) IEM_NOEXCEPT_MAY_LONGJMP;
3104void iemMemStoreDataU32Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint32_t u32Value) IEM_NOEXCEPT_MAY_LONGJMP;
3105void iemMemStoreDataU64Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint64_t u64Value) IEM_NOEXCEPT_MAY_LONGJMP;
3106void iemMemStoreDataU128Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, RTUINT128U u128Value) IEM_NOEXCEPT_MAY_LONGJMP;
3107void iemMemStoreDataNoAcU128Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, RTUINT128U u128Value) IEM_NOEXCEPT_MAY_LONGJMP;
3108void iemMemStoreDataU256NoAcJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTUINT256U pu256Value) IEM_NOEXCEPT_MAY_LONGJMP;
3109void iemMemStoreDataU256AlignedAvxJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTUINT256U pu256Value) IEM_NOEXCEPT_MAY_LONGJMP;
3110#endif
3111void iemMemStoreDataU128AlignedSseJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, RTUINT128U u128Value) IEM_NOEXCEPT_MAY_LONGJMP;
3112void iemMemStoreDataU256Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTUINT256U pu256Value) IEM_NOEXCEPT_MAY_LONGJMP;
3113
3114uint8_t *iemMemMapDataU8RwSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3115uint8_t *iemMemMapDataU8AtSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3116uint8_t *iemMemMapDataU8WoSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3117uint8_t const *iemMemMapDataU8RoSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3118uint16_t *iemMemMapDataU16RwSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3119uint16_t *iemMemMapDataU16AtSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3120uint16_t *iemMemMapDataU16WoSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3121uint16_t const *iemMemMapDataU16RoSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3122uint32_t *iemMemMapDataU32RwSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3123uint32_t *iemMemMapDataU32AtSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3124uint32_t *iemMemMapDataU32WoSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3125uint32_t const *iemMemMapDataU32RoSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3126uint64_t *iemMemMapDataU64RwSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3127uint64_t *iemMemMapDataU64AtSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3128uint64_t *iemMemMapDataU64WoSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3129uint64_t const *iemMemMapDataU64RoSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3130PRTFLOAT80U iemMemMapDataR80RwSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3131PRTFLOAT80U iemMemMapDataR80WoSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3132PCRTFLOAT80U iemMemMapDataR80RoSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3133PRTPBCD80U iemMemMapDataD80RwSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3134PRTPBCD80U iemMemMapDataD80WoSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3135PCRTPBCD80U iemMemMapDataD80RoSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3136PRTUINT128U iemMemMapDataU128RwSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3137PRTUINT128U iemMemMapDataU128AtSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3138PRTUINT128U iemMemMapDataU128WoSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3139PCRTUINT128U iemMemMapDataU128RoSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3140
3141VBOXSTRICTRC iemMemStackPushBeginSpecial(PVMCPUCC pVCpu, size_t cbMem, uint32_t cbAlign,
3142 void **ppvMem, uint8_t *pbUnmapInfo, uint64_t *puNewRsp) RT_NOEXCEPT;
3143VBOXSTRICTRC iemMemStackPushCommitSpecial(PVMCPUCC pVCpu, uint8_t bUnmapInfo, uint64_t uNewRsp) RT_NOEXCEPT;
3144VBOXSTRICTRC iemMemStackPushU16(PVMCPUCC pVCpu, uint16_t u16Value) RT_NOEXCEPT;
3145VBOXSTRICTRC iemMemStackPushU32(PVMCPUCC pVCpu, uint32_t u32Value) RT_NOEXCEPT;
3146VBOXSTRICTRC iemMemStackPushU64(PVMCPUCC pVCpu, uint64_t u64Value) RT_NOEXCEPT;
3147VBOXSTRICTRC iemMemStackPushU16Ex(PVMCPUCC pVCpu, uint16_t u16Value, PRTUINT64U pTmpRsp) RT_NOEXCEPT;
3148VBOXSTRICTRC iemMemStackPushU32Ex(PVMCPUCC pVCpu, uint32_t u32Value, PRTUINT64U pTmpRsp) RT_NOEXCEPT;
3149VBOXSTRICTRC iemMemStackPushU64Ex(PVMCPUCC pVCpu, uint64_t u64Value, PRTUINT64U pTmpRsp) RT_NOEXCEPT;
3150VBOXSTRICTRC iemMemStackPushU32SReg(PVMCPUCC pVCpu, uint32_t u32Value) RT_NOEXCEPT;
3151VBOXSTRICTRC iemMemStackPopBeginSpecial(PVMCPUCC pVCpu, size_t cbMem, uint32_t cbAlign,
3152 void const **ppvMem, uint8_t *pbUnmapInfo, uint64_t *puNewRsp) RT_NOEXCEPT;
3153VBOXSTRICTRC iemMemStackPopContinueSpecial(PVMCPUCC pVCpu, size_t off, size_t cbMem,
3154 void const **ppvMem, uint8_t *pbUnmapInfo, uint64_t uCurNewRsp) RT_NOEXCEPT;
3155VBOXSTRICTRC iemMemStackPopDoneSpecial(PVMCPUCC pVCpu, uint8_t bUnmapInfo) RT_NOEXCEPT;
3156VBOXSTRICTRC iemMemStackPopU16(PVMCPUCC pVCpu, uint16_t *pu16Value) RT_NOEXCEPT;
3157VBOXSTRICTRC iemMemStackPopU32(PVMCPUCC pVCpu, uint32_t *pu32Value) RT_NOEXCEPT;
3158VBOXSTRICTRC iemMemStackPopU64(PVMCPUCC pVCpu, uint64_t *pu64Value) RT_NOEXCEPT;
3159VBOXSTRICTRC iemMemStackPopU16Ex(PVMCPUCC pVCpu, uint16_t *pu16Value, PRTUINT64U pTmpRsp) RT_NOEXCEPT;
3160VBOXSTRICTRC iemMemStackPopU32Ex(PVMCPUCC pVCpu, uint32_t *pu32Value, PRTUINT64U pTmpRsp) RT_NOEXCEPT;
3161VBOXSTRICTRC iemMemStackPopU64Ex(PVMCPUCC pVCpu, uint64_t *pu64Value, PRTUINT64U pTmpRsp) RT_NOEXCEPT;
3162
3163void iemMemStackPushU16SafeJmp(PVMCPUCC pVCpu, uint16_t uValue) IEM_NOEXCEPT_MAY_LONGJMP;
3164void iemMemStackPushU32SafeJmp(PVMCPUCC pVCpu, uint32_t uValue) IEM_NOEXCEPT_MAY_LONGJMP;
3165void iemMemStackPushU32SRegSafeJmp(PVMCPUCC pVCpu, uint32_t uValue) IEM_NOEXCEPT_MAY_LONGJMP;
3166void iemMemStackPushU64SafeJmp(PVMCPUCC pVCpu, uint64_t uValue) IEM_NOEXCEPT_MAY_LONGJMP;
3167void iemMemStackPopGRegU16SafeJmp(PVMCPUCC pVCpu, uint8_t iGReg) IEM_NOEXCEPT_MAY_LONGJMP;
3168void iemMemStackPopGRegU32SafeJmp(PVMCPUCC pVCpu, uint8_t iGReg) IEM_NOEXCEPT_MAY_LONGJMP;
3169void iemMemStackPopGRegU64SafeJmp(PVMCPUCC pVCpu, uint8_t iGReg) IEM_NOEXCEPT_MAY_LONGJMP;
3170
3171void iemMemFlat32StackPushU16SafeJmp(PVMCPUCC pVCpu, uint16_t uValue) IEM_NOEXCEPT_MAY_LONGJMP;
3172void iemMemFlat32StackPushU32SafeJmp(PVMCPUCC pVCpu, uint32_t uValue) IEM_NOEXCEPT_MAY_LONGJMP;
3173void iemMemFlat32StackPushU32SRegSafeJmp(PVMCPUCC pVCpu, uint32_t uValue) IEM_NOEXCEPT_MAY_LONGJMP;
3174void iemMemFlat32StackPopGRegU16SafeJmp(PVMCPUCC pVCpu, uint8_t iGReg) IEM_NOEXCEPT_MAY_LONGJMP;
3175void iemMemFlat32StackPopGRegU32SafeJmp(PVMCPUCC pVCpu, uint8_t iGReg) IEM_NOEXCEPT_MAY_LONGJMP;
3176
3177void iemMemFlat64StackPushU16SafeJmp(PVMCPUCC pVCpu, uint16_t uValue) IEM_NOEXCEPT_MAY_LONGJMP;
3178void iemMemFlat64StackPushU64SafeJmp(PVMCPUCC pVCpu, uint64_t uValue) IEM_NOEXCEPT_MAY_LONGJMP;
3179void iemMemFlat64StackPopGRegU16SafeJmp(PVMCPUCC pVCpu, uint8_t iGReg) IEM_NOEXCEPT_MAY_LONGJMP;
3180void iemMemFlat64StackPopGRegU64SafeJmp(PVMCPUCC pVCpu, uint8_t iGReg) IEM_NOEXCEPT_MAY_LONGJMP;
3181
3182void iemMemStoreStackU16SafeJmp(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint16_t uValue) IEM_NOEXCEPT_MAY_LONGJMP;
3183void iemMemStoreStackU32SafeJmp(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint32_t uValue) IEM_NOEXCEPT_MAY_LONGJMP;
3184void iemMemStoreStackU32SRegSafeJmp(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint32_t uValue) IEM_NOEXCEPT_MAY_LONGJMP;
3185void iemMemStoreStackU64SafeJmp(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint64_t uValue) IEM_NOEXCEPT_MAY_LONGJMP;
3186
3187uint16_t iemMemFetchStackU16SafeJmp(PVMCPUCC pVCpu, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3188uint32_t iemMemFetchStackU32SafeJmp(PVMCPUCC pVCpu, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3189uint64_t iemMemFetchStackU64SafeJmp(PVMCPUCC pVCpu, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
3190
3191/** @} */
3192
3193/** @name IEMAllCImpl.cpp
3194 * @note sed -e '/IEM_CIMPL_DEF_/!d' -e 's/IEM_CIMPL_DEF_/IEM_CIMPL_PROTO_/' -e 's/$/;/'
3195 * @{ */
3196
3197/**
3198 * INT instruction types - iemCImpl_int().
3199 * @note x86 specific
3200 */
3201typedef enum IEMINT
3202{
3203 /** INT n instruction (opcode 0xcd imm). */
3204 IEMINT_INTN = 0,
3205 /** Single byte INT3 instruction (opcode 0xcc). */
3206 IEMINT_INT3 = IEM_XCPT_FLAGS_BP_INSTR,
3207 /** Single byte INTO instruction (opcode 0xce). */
3208 IEMINT_INTO = IEM_XCPT_FLAGS_OF_INSTR,
3209 /** Single byte INT1 (ICEBP) instruction (opcode 0xf1). */
3210 IEMINT_INT1 = IEM_XCPT_FLAGS_ICEBP_INSTR
3211} IEMINT;
3212AssertCompileSize(IEMINT, 4);
3213
3214IEM_CIMPL_PROTO_2(iemCImpl_pop_mem16, uint16_t, iEffSeg, RTGCPTR, GCPtrEffDst);
3215IEM_CIMPL_PROTO_2(iemCImpl_pop_mem32, uint16_t, iEffSeg, RTGCPTR, GCPtrEffDst);
3216IEM_CIMPL_PROTO_2(iemCImpl_pop_mem64, uint16_t, iEffSeg, RTGCPTR, GCPtrEffDst);
3217IEM_CIMPL_PROTO_0(iemCImpl_popa_16);
3218IEM_CIMPL_PROTO_0(iemCImpl_popa_32);
3219IEM_CIMPL_PROTO_0(iemCImpl_pusha_16);
3220IEM_CIMPL_PROTO_0(iemCImpl_pusha_32);
3221IEM_CIMPL_PROTO_1(iemCImpl_pushf, IEMMODE, enmEffOpSize);
3222IEM_CIMPL_PROTO_1(iemCImpl_popf, IEMMODE, enmEffOpSize);
3223IEM_CIMPL_PROTO_3(iemCImpl_FarJmp, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmEffOpSize);
3224IEM_CIMPL_PROTO_3(iemCImpl_callf, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmEffOpSize);
3225typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmEffOpSize);
3226typedef FNIEMCIMPLFARBRANCH *PFNIEMCIMPLFARBRANCH;
3227IEM_CIMPL_PROTO_2(iemCImpl_retf, IEMMODE, enmEffOpSize, uint16_t, cbPop);
3228IEM_CIMPL_PROTO_3(iemCImpl_enter, IEMMODE, enmEffOpSize, uint16_t, cbFrame, uint8_t, cParameters);
3229IEM_CIMPL_PROTO_1(iemCImpl_leave, IEMMODE, enmEffOpSize);
3230IEM_CIMPL_PROTO_2(iemCImpl_int, uint8_t, u8Int, IEMINT, enmInt);
3231IEM_CIMPL_PROTO_1(iemCImpl_iret_real_v8086, IEMMODE, enmEffOpSize);
3232IEM_CIMPL_PROTO_4(iemCImpl_iret_prot_v8086, uint32_t, uNewEip, uint16_t, uNewCs, uint32_t, uNewFlags, uint64_t, uNewRsp);
3233IEM_CIMPL_PROTO_1(iemCImpl_iret_prot_NestedTask, IEMMODE, enmEffOpSize);
3234IEM_CIMPL_PROTO_1(iemCImpl_iret_prot, IEMMODE, enmEffOpSize);
3235IEM_CIMPL_PROTO_1(iemCImpl_iret_64bit, IEMMODE, enmEffOpSize);
3236IEM_CIMPL_PROTO_1(iemCImpl_iret, IEMMODE, enmEffOpSize);
3237IEM_CIMPL_PROTO_0(iemCImpl_loadall286);
3238IEM_CIMPL_PROTO_0(iemCImpl_syscall);
3239IEM_CIMPL_PROTO_1(iemCImpl_sysret, IEMMODE, enmEffOpSize);
3240IEM_CIMPL_PROTO_0(iemCImpl_sysenter);
3241IEM_CIMPL_PROTO_1(iemCImpl_sysexit, IEMMODE, enmEffOpSize);
3242IEM_CIMPL_PROTO_2(iemCImpl_LoadSReg, uint8_t, iSegReg, uint16_t, uSel);
3243IEM_CIMPL_PROTO_2(iemCImpl_load_SReg, uint8_t, iSegReg, uint16_t, uSel);
3244IEM_CIMPL_PROTO_2(iemCImpl_pop_Sreg, uint8_t, iSegReg, IEMMODE, enmEffOpSize);
3245IEM_CIMPL_PROTO_5(iemCImpl_load_SReg_Greg, uint16_t, uSel, uint64_t, offSeg, uint8_t, iSegReg, uint8_t, iGReg, IEMMODE, enmEffOpSize);
3246IEM_CIMPL_PROTO_2(iemCImpl_VerX, uint16_t, uSel, bool, fWrite);
3247IEM_CIMPL_PROTO_3(iemCImpl_LarLsl_u64, uint64_t *, pu64Dst, uint16_t, uSel, bool, fIsLar);
3248IEM_CIMPL_PROTO_3(iemCImpl_LarLsl_u16, uint16_t *, pu16Dst, uint16_t, uSel, bool, fIsLar);
3249IEM_CIMPL_PROTO_3(iemCImpl_lgdt, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc, IEMMODE, enmEffOpSize);
3250IEM_CIMPL_PROTO_2(iemCImpl_sgdt, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst);
3251IEM_CIMPL_PROTO_3(iemCImpl_lidt, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc, IEMMODE, enmEffOpSize);
3252IEM_CIMPL_PROTO_2(iemCImpl_sidt, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst);
3253IEM_CIMPL_PROTO_1(iemCImpl_lldt, uint16_t, uNewLdt);
3254IEM_CIMPL_PROTO_2(iemCImpl_sldt_reg, uint8_t, iGReg, uint8_t, enmEffOpSize);
3255IEM_CIMPL_PROTO_2(iemCImpl_sldt_mem, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst);
3256IEM_CIMPL_PROTO_1(iemCImpl_ltr, uint16_t, uNewTr);
3257IEM_CIMPL_PROTO_2(iemCImpl_str_reg, uint8_t, iGReg, uint8_t, enmEffOpSize);
3258IEM_CIMPL_PROTO_2(iemCImpl_str_mem, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst);
3259IEM_CIMPL_PROTO_2(iemCImpl_mov_Rd_Cd, uint8_t, iGReg, uint8_t, iCrReg);
3260IEM_CIMPL_PROTO_2(iemCImpl_smsw_reg, uint8_t, iGReg, uint8_t, enmEffOpSize);
3261IEM_CIMPL_PROTO_2(iemCImpl_smsw_mem, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst);
3262IEM_CIMPL_PROTO_4(iemCImpl_load_CrX, uint8_t, iCrReg, uint64_t, uNewCrX, IEMACCESSCRX, enmAccessCrX, uint8_t, iGReg);
3263IEM_CIMPL_PROTO_2(iemCImpl_mov_Cd_Rd, uint8_t, iCrReg, uint8_t, iGReg);
3264IEM_CIMPL_PROTO_2(iemCImpl_lmsw, uint16_t, u16NewMsw, RTGCPTR, GCPtrEffDst);
3265IEM_CIMPL_PROTO_0(iemCImpl_clts);
3266IEM_CIMPL_PROTO_2(iemCImpl_mov_Rd_Dd, uint8_t, iGReg, uint8_t, iDrReg);
3267IEM_CIMPL_PROTO_2(iemCImpl_mov_Dd_Rd, uint8_t, iDrReg, uint8_t, iGReg);
3268IEM_CIMPL_PROTO_2(iemCImpl_mov_Rd_Td, uint8_t, iGReg, uint8_t, iTrReg);
3269IEM_CIMPL_PROTO_2(iemCImpl_mov_Td_Rd, uint8_t, iTrReg, uint8_t, iGReg);
3270IEM_CIMPL_PROTO_1(iemCImpl_invlpg, RTGCPTR, GCPtrPage);
3271IEM_CIMPL_PROTO_3(iemCImpl_invpcid, uint8_t, iEffSeg, RTGCPTR, GCPtrInvpcidDesc, uint64_t, uInvpcidType);
3272IEM_CIMPL_PROTO_0(iemCImpl_invd);
3273IEM_CIMPL_PROTO_0(iemCImpl_wbinvd);
3274IEM_CIMPL_PROTO_0(iemCImpl_rsm);
3275IEM_CIMPL_PROTO_0(iemCImpl_rdtsc);
3276IEM_CIMPL_PROTO_0(iemCImpl_rdtscp);
3277IEM_CIMPL_PROTO_0(iemCImpl_rdpmc);
3278IEM_CIMPL_PROTO_0(iemCImpl_rdmsr);
3279IEM_CIMPL_PROTO_0(iemCImpl_wrmsr);
3280IEM_CIMPL_PROTO_3(iemCImpl_in, uint16_t, u16Port, uint8_t, cbReg, uint8_t, bImmAndEffAddrMode);
3281IEM_CIMPL_PROTO_2(iemCImpl_in_eAX_DX, uint8_t, cbReg, IEMMODE, enmEffAddrMode);
3282IEM_CIMPL_PROTO_3(iemCImpl_out, uint16_t, u16Port, uint8_t, cbReg, uint8_t, bImmAndEffAddrMode);
3283IEM_CIMPL_PROTO_2(iemCImpl_out_DX_eAX, uint8_t, cbReg, IEMMODE, enmEffAddrMode);
3284IEM_CIMPL_PROTO_0(iemCImpl_cli);
3285IEM_CIMPL_PROTO_0(iemCImpl_sti);
3286IEM_CIMPL_PROTO_0(iemCImpl_hlt);
3287IEM_CIMPL_PROTO_1(iemCImpl_monitor, uint8_t, iEffSeg);
3288IEM_CIMPL_PROTO_0(iemCImpl_mwait);
3289IEM_CIMPL_PROTO_0(iemCImpl_swapgs);
3290IEM_CIMPL_PROTO_0(iemCImpl_cpuid);
3291IEM_CIMPL_PROTO_1(iemCImpl_aad, uint8_t, bImm);
3292IEM_CIMPL_PROTO_1(iemCImpl_aam, uint8_t, bImm);
3293IEM_CIMPL_PROTO_0(iemCImpl_daa);
3294IEM_CIMPL_PROTO_0(iemCImpl_das);
3295IEM_CIMPL_PROTO_0(iemCImpl_aaa);
3296IEM_CIMPL_PROTO_0(iemCImpl_aas);
3297IEM_CIMPL_PROTO_3(iemCImpl_bound_16, int16_t, idxArray, int16_t, idxLowerBound, int16_t, idxUpperBound);
3298IEM_CIMPL_PROTO_3(iemCImpl_bound_32, int32_t, idxArray, int32_t, idxLowerBound, int32_t, idxUpperBound);
3299IEM_CIMPL_PROTO_0(iemCImpl_xgetbv);
3300IEM_CIMPL_PROTO_0(iemCImpl_xsetbv);
3301IEM_CIMPL_PROTO_5(iemCImpl_cmpxchg16b_fallback_rendezvous, PRTUINT128U, pu128Dst, PRTUINT128U, pu128RaxRdx,
3302 PRTUINT128U, pu128RbxRcx, uint32_t *, pEFlags, uint8_t, bUnmapInfo);
3303IEM_CIMPL_PROTO_2(iemCImpl_clflush_clflushopt, uint8_t, iEffSeg, RTGCPTR, GCPtrEff);
3304IEM_CIMPL_PROTO_1(iemCImpl_finit, bool, fCheckXcpts);
3305IEM_CIMPL_PROTO_3(iemCImpl_fxsave, uint8_t, iEffSeg, RTGCPTR, GCPtrEff, IEMMODE, enmEffOpSize);
3306IEM_CIMPL_PROTO_3(iemCImpl_fxrstor, uint8_t, iEffSeg, RTGCPTR, GCPtrEff, IEMMODE, enmEffOpSize);
3307IEM_CIMPL_PROTO_3(iemCImpl_xsave, uint8_t, iEffSeg, RTGCPTR, GCPtrEff, IEMMODE, enmEffOpSize);
3308IEM_CIMPL_PROTO_3(iemCImpl_xrstor, uint8_t, iEffSeg, RTGCPTR, GCPtrEff, IEMMODE, enmEffOpSize);
3309IEM_CIMPL_PROTO_2(iemCImpl_stmxcsr, uint8_t, iEffSeg, RTGCPTR, GCPtrEff);
3310IEM_CIMPL_PROTO_2(iemCImpl_vstmxcsr, uint8_t, iEffSeg, RTGCPTR, GCPtrEff);
3311IEM_CIMPL_PROTO_2(iemCImpl_ldmxcsr, uint8_t, iEffSeg, RTGCPTR, GCPtrEff);
3312IEM_CIMPL_PROTO_2(iemCImpl_vldmxcsr, uint8_t, iEffSeg, RTGCPTR, GCPtrEff);
3313IEM_CIMPL_PROTO_3(iemCImpl_fnstenv, IEMMODE, enmEffOpSize, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst);
3314IEM_CIMPL_PROTO_3(iemCImpl_fnsave, IEMMODE, enmEffOpSize, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst);
3315IEM_CIMPL_PROTO_3(iemCImpl_fldenv, IEMMODE, enmEffOpSize, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc);
3316IEM_CIMPL_PROTO_3(iemCImpl_frstor, IEMMODE, enmEffOpSize, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc);
3317IEM_CIMPL_PROTO_1(iemCImpl_fldcw, uint16_t, u16Fcw);
3318IEM_CIMPL_PROTO_2(iemCImpl_fxch_underflow, uint8_t, iStReg, uint16_t, uFpuOpcode);
3319IEM_CIMPL_PROTO_3(iemCImpl_fcomi_fucomi, uint8_t, iStReg, bool, fUCmp, uint32_t, uPopAndFpuOpcode);
3320IEM_CIMPL_PROTO_2(iemCImpl_rdseed, uint8_t, iReg, IEMMODE, enmEffOpSize);
3321IEM_CIMPL_PROTO_2(iemCImpl_rdrand, uint8_t, iReg, IEMMODE, enmEffOpSize);
3322IEM_CIMPL_PROTO_4(iemCImpl_vmaskmovps_load_u128, uint8_t, iXRegDst, uint8_t, iXRegMsk, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc);
3323IEM_CIMPL_PROTO_4(iemCImpl_vmaskmovps_load_u256, uint8_t, iYRegDst, uint8_t, iYRegMsk, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc);
3324IEM_CIMPL_PROTO_4(iemCImpl_vmaskmovps_store_u128, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst, uint8_t, iXRegMsk, uint8_t, iXRegSrc);
3325IEM_CIMPL_PROTO_4(iemCImpl_vmaskmovps_store_u256, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst, uint8_t, iYRegMsk, uint8_t, iYRegSrc);
3326IEM_CIMPL_PROTO_4(iemCImpl_vpmaskmovd_load_u128, uint8_t, iXRegDst, uint8_t, iXRegMsk, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc);
3327IEM_CIMPL_PROTO_4(iemCImpl_vpmaskmovd_load_u256, uint8_t, iYRegDst, uint8_t, iYRegMsk, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc);
3328IEM_CIMPL_PROTO_4(iemCImpl_vpmaskmovd_store_u128, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst, uint8_t, iXRegMsk, uint8_t, iXRegSrc);
3329IEM_CIMPL_PROTO_4(iemCImpl_vpmaskmovd_store_u256, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst, uint8_t, iYRegMsk, uint8_t, iYRegSrc);
3330IEM_CIMPL_PROTO_4(iemCImpl_vmaskmovpd_load_u128, uint8_t, iXRegDst, uint8_t, iXRegMsk, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc);
3331IEM_CIMPL_PROTO_4(iemCImpl_vmaskmovpd_load_u256, uint8_t, iYRegDst, uint8_t, iYRegMsk, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc);
3332IEM_CIMPL_PROTO_4(iemCImpl_vmaskmovpd_store_u128, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst, uint8_t, iXRegMsk, uint8_t, iXRegSrc);
3333IEM_CIMPL_PROTO_4(iemCImpl_vmaskmovpd_store_u256, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst, uint8_t, iYRegMsk, uint8_t, iYRegSrc);
3334IEM_CIMPL_PROTO_4(iemCImpl_vpmaskmovq_load_u128, uint8_t, iXRegDst, uint8_t, iXRegMsk, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc);
3335IEM_CIMPL_PROTO_4(iemCImpl_vpmaskmovq_load_u256, uint8_t, iYRegDst, uint8_t, iYRegMsk, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc);
3336IEM_CIMPL_PROTO_4(iemCImpl_vpmaskmovq_store_u128, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst, uint8_t, iXRegMsk, uint8_t, iXRegSrc);
3337IEM_CIMPL_PROTO_4(iemCImpl_vpmaskmovq_store_u256, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst, uint8_t, iYRegMsk, uint8_t, iYRegSrc);
3338IEM_CIMPL_PROTO_2(iemCImpl_vpgather_worker_xx, uint32_t, u32PackedArgs, uint32_t, u32Disp);
3339
3340/** Packed 32-bit argument for iemCImpl_vpgather_worker_xx. */
3341typedef union IEMGATHERARGS
3342{
3343 /** Integer view. */
3344 uint32_t u;
3345 /** Bitfield view. */
3346 struct
3347 {
3348 uint32_t iYRegDst : 4; /**< 0 - XMM or YMM register number (destination) */
3349 uint32_t iYRegIdc : 4; /**< 4 - XMM or YMM register number (indices) */
3350 uint32_t iYRegMsk : 4; /**< 8 - XMM or YMM register number (mask) */
3351 uint32_t iGRegBase : 4; /**< 12 - general register number (base ptr) */
3352 uint32_t iScale : 2; /**< 16 - scale factor (1/2/4/8) */
3353 uint32_t enmEffOpSize : 2; /**< 18 - operand size (16/32/64/--) */
3354 uint32_t enmEffAddrMode : 2; /**< 20 - addressing mode (16/32/64/--) */
3355 uint32_t iEffSeg : 3; /**< 22 - effective segment (ES/CS/SS/DS/FS/GS) */
3356 uint32_t fVex256 : 1; /**< 25 - overall instruction width (128/256 bits) */
3357 uint32_t fIdxQword : 1; /**< 26 - individual index width (4/8 bytes) */
3358 uint32_t fValQword : 1; /**< 27 - individual value width (4/8 bytes) */
3359 } s;
3360} IEMGATHERARGS;
3361AssertCompileSize(IEMGATHERARGS, sizeof(uint32_t));
3362
3363/** @} */
3364
3365/** @name IEMAllCImplStrInstr.cpp.h
3366 * @note sed -e '/IEM_CIMPL_DEF_/!d' -e 's/IEM_CIMPL_DEF_/IEM_CIMPL_PROTO_/' -e 's/$/;/' -e 's/RT_CONCAT4(//' \
3367 * -e 's/,ADDR_SIZE)/64/g' -e 's/,OP_SIZE,/64/g' -e 's/,OP_rAX,/rax/g' IEMAllCImplStrInstr.cpp.h
3368 * @{ */
3369IEM_CIMPL_PROTO_1(iemCImpl_repe_cmps_op8_addr16, uint8_t, iEffSeg);
3370IEM_CIMPL_PROTO_1(iemCImpl_repne_cmps_op8_addr16, uint8_t, iEffSeg);
3371IEM_CIMPL_PROTO_0(iemCImpl_repe_scas_al_m16);
3372IEM_CIMPL_PROTO_0(iemCImpl_repne_scas_al_m16);
3373IEM_CIMPL_PROTO_1(iemCImpl_rep_movs_op8_addr16, uint8_t, iEffSeg);
3374IEM_CIMPL_PROTO_0(iemCImpl_stos_al_m16);
3375IEM_CIMPL_PROTO_1(iemCImpl_lods_al_m16, int8_t, iEffSeg);
3376IEM_CIMPL_PROTO_1(iemCImpl_ins_op8_addr16, bool, fIoChecked);
3377IEM_CIMPL_PROTO_1(iemCImpl_rep_ins_op8_addr16, bool, fIoChecked);
3378IEM_CIMPL_PROTO_2(iemCImpl_outs_op8_addr16, uint8_t, iEffSeg, bool, fIoChecked);
3379IEM_CIMPL_PROTO_2(iemCImpl_rep_outs_op8_addr16, uint8_t, iEffSeg, bool, fIoChecked);
3380
3381IEM_CIMPL_PROTO_1(iemCImpl_repe_cmps_op16_addr16, uint8_t, iEffSeg);
3382IEM_CIMPL_PROTO_1(iemCImpl_repne_cmps_op16_addr16, uint8_t, iEffSeg);
3383IEM_CIMPL_PROTO_0(iemCImpl_repe_scas_ax_m16);
3384IEM_CIMPL_PROTO_0(iemCImpl_repne_scas_ax_m16);
3385IEM_CIMPL_PROTO_1(iemCImpl_rep_movs_op16_addr16, uint8_t, iEffSeg);
3386IEM_CIMPL_PROTO_0(iemCImpl_stos_ax_m16);
3387IEM_CIMPL_PROTO_1(iemCImpl_lods_ax_m16, int8_t, iEffSeg);
3388IEM_CIMPL_PROTO_1(iemCImpl_ins_op16_addr16, bool, fIoChecked);
3389IEM_CIMPL_PROTO_1(iemCImpl_rep_ins_op16_addr16, bool, fIoChecked);
3390IEM_CIMPL_PROTO_2(iemCImpl_outs_op16_addr16, uint8_t, iEffSeg, bool, fIoChecked);
3391IEM_CIMPL_PROTO_2(iemCImpl_rep_outs_op16_addr16, uint8_t, iEffSeg, bool, fIoChecked);
3392
3393IEM_CIMPL_PROTO_1(iemCImpl_repe_cmps_op32_addr16, uint8_t, iEffSeg);
3394IEM_CIMPL_PROTO_1(iemCImpl_repne_cmps_op32_addr16, uint8_t, iEffSeg);
3395IEM_CIMPL_PROTO_0(iemCImpl_repe_scas_eax_m16);
3396IEM_CIMPL_PROTO_0(iemCImpl_repne_scas_eax_m16);
3397IEM_CIMPL_PROTO_1(iemCImpl_rep_movs_op32_addr16, uint8_t, iEffSeg);
3398IEM_CIMPL_PROTO_0(iemCImpl_stos_eax_m16);
3399IEM_CIMPL_PROTO_1(iemCImpl_lods_eax_m16, int8_t, iEffSeg);
3400IEM_CIMPL_PROTO_1(iemCImpl_ins_op32_addr16, bool, fIoChecked);
3401IEM_CIMPL_PROTO_1(iemCImpl_rep_ins_op32_addr16, bool, fIoChecked);
3402IEM_CIMPL_PROTO_2(iemCImpl_outs_op32_addr16, uint8_t, iEffSeg, bool, fIoChecked);
3403IEM_CIMPL_PROTO_2(iemCImpl_rep_outs_op32_addr16, uint8_t, iEffSeg, bool, fIoChecked);
3404
3405
3406IEM_CIMPL_PROTO_1(iemCImpl_repe_cmps_op8_addr32, uint8_t, iEffSeg);
3407IEM_CIMPL_PROTO_1(iemCImpl_repne_cmps_op8_addr32, uint8_t, iEffSeg);
3408IEM_CIMPL_PROTO_0(iemCImpl_repe_scas_al_m32);
3409IEM_CIMPL_PROTO_0(iemCImpl_repne_scas_al_m32);
3410IEM_CIMPL_PROTO_1(iemCImpl_rep_movs_op8_addr32, uint8_t, iEffSeg);
3411IEM_CIMPL_PROTO_0(iemCImpl_stos_al_m32);
3412IEM_CIMPL_PROTO_1(iemCImpl_lods_al_m32, int8_t, iEffSeg);
3413IEM_CIMPL_PROTO_1(iemCImpl_ins_op8_addr32, bool, fIoChecked);
3414IEM_CIMPL_PROTO_1(iemCImpl_rep_ins_op8_addr32, bool, fIoChecked);
3415IEM_CIMPL_PROTO_2(iemCImpl_outs_op8_addr32, uint8_t, iEffSeg, bool, fIoChecked);
3416IEM_CIMPL_PROTO_2(iemCImpl_rep_outs_op8_addr32, uint8_t, iEffSeg, bool, fIoChecked);
3417
3418IEM_CIMPL_PROTO_1(iemCImpl_repe_cmps_op16_addr32, uint8_t, iEffSeg);
3419IEM_CIMPL_PROTO_1(iemCImpl_repne_cmps_op16_addr32, uint8_t, iEffSeg);
3420IEM_CIMPL_PROTO_0(iemCImpl_repe_scas_ax_m32);
3421IEM_CIMPL_PROTO_0(iemCImpl_repne_scas_ax_m32);
3422IEM_CIMPL_PROTO_1(iemCImpl_rep_movs_op16_addr32, uint8_t, iEffSeg);
3423IEM_CIMPL_PROTO_0(iemCImpl_stos_ax_m32);
3424IEM_CIMPL_PROTO_1(iemCImpl_lods_ax_m32, int8_t, iEffSeg);
3425IEM_CIMPL_PROTO_1(iemCImpl_ins_op16_addr32, bool, fIoChecked);
3426IEM_CIMPL_PROTO_1(iemCImpl_rep_ins_op16_addr32, bool, fIoChecked);
3427IEM_CIMPL_PROTO_2(iemCImpl_outs_op16_addr32, uint8_t, iEffSeg, bool, fIoChecked);
3428IEM_CIMPL_PROTO_2(iemCImpl_rep_outs_op16_addr32, uint8_t, iEffSeg, bool, fIoChecked);
3429
3430IEM_CIMPL_PROTO_1(iemCImpl_repe_cmps_op32_addr32, uint8_t, iEffSeg);
3431IEM_CIMPL_PROTO_1(iemCImpl_repne_cmps_op32_addr32, uint8_t, iEffSeg);
3432IEM_CIMPL_PROTO_0(iemCImpl_repe_scas_eax_m32);
3433IEM_CIMPL_PROTO_0(iemCImpl_repne_scas_eax_m32);
3434IEM_CIMPL_PROTO_1(iemCImpl_rep_movs_op32_addr32, uint8_t, iEffSeg);
3435IEM_CIMPL_PROTO_0(iemCImpl_stos_eax_m32);
3436IEM_CIMPL_PROTO_1(iemCImpl_lods_eax_m32, int8_t, iEffSeg);
3437IEM_CIMPL_PROTO_1(iemCImpl_ins_op32_addr32, bool, fIoChecked);
3438IEM_CIMPL_PROTO_1(iemCImpl_rep_ins_op32_addr32, bool, fIoChecked);
3439IEM_CIMPL_PROTO_2(iemCImpl_outs_op32_addr32, uint8_t, iEffSeg, bool, fIoChecked);
3440IEM_CIMPL_PROTO_2(iemCImpl_rep_outs_op32_addr32, uint8_t, iEffSeg, bool, fIoChecked);
3441
3442IEM_CIMPL_PROTO_1(iemCImpl_repe_cmps_op64_addr32, uint8_t, iEffSeg);
3443IEM_CIMPL_PROTO_1(iemCImpl_repne_cmps_op64_addr32, uint8_t, iEffSeg);
3444IEM_CIMPL_PROTO_0(iemCImpl_repe_scas_rax_m32);
3445IEM_CIMPL_PROTO_0(iemCImpl_repne_scas_rax_m32);
3446IEM_CIMPL_PROTO_1(iemCImpl_rep_movs_op64_addr32, uint8_t, iEffSeg);
3447IEM_CIMPL_PROTO_0(iemCImpl_stos_rax_m32);
3448IEM_CIMPL_PROTO_1(iemCImpl_lods_rax_m32, int8_t, iEffSeg);
3449IEM_CIMPL_PROTO_1(iemCImpl_ins_op64_addr32, bool, fIoChecked);
3450IEM_CIMPL_PROTO_1(iemCImpl_rep_ins_op64_addr32, bool, fIoChecked);
3451IEM_CIMPL_PROTO_2(iemCImpl_outs_op64_addr32, uint8_t, iEffSeg, bool, fIoChecked);
3452IEM_CIMPL_PROTO_2(iemCImpl_rep_outs_op64_addr32, uint8_t, iEffSeg, bool, fIoChecked);
3453
3454
3455IEM_CIMPL_PROTO_1(iemCImpl_repe_cmps_op8_addr64, uint8_t, iEffSeg);
3456IEM_CIMPL_PROTO_1(iemCImpl_repne_cmps_op8_addr64, uint8_t, iEffSeg);
3457IEM_CIMPL_PROTO_0(iemCImpl_repe_scas_al_m64);
3458IEM_CIMPL_PROTO_0(iemCImpl_repne_scas_al_m64);
3459IEM_CIMPL_PROTO_1(iemCImpl_rep_movs_op8_addr64, uint8_t, iEffSeg);
3460IEM_CIMPL_PROTO_0(iemCImpl_stos_al_m64);
3461IEM_CIMPL_PROTO_1(iemCImpl_lods_al_m64, int8_t, iEffSeg);
3462IEM_CIMPL_PROTO_1(iemCImpl_ins_op8_addr64, bool, fIoChecked);
3463IEM_CIMPL_PROTO_1(iemCImpl_rep_ins_op8_addr64, bool, fIoChecked);
3464IEM_CIMPL_PROTO_2(iemCImpl_outs_op8_addr64, uint8_t, iEffSeg, bool, fIoChecked);
3465IEM_CIMPL_PROTO_2(iemCImpl_rep_outs_op8_addr64, uint8_t, iEffSeg, bool, fIoChecked);
3466
3467IEM_CIMPL_PROTO_1(iemCImpl_repe_cmps_op16_addr64, uint8_t, iEffSeg);
3468IEM_CIMPL_PROTO_1(iemCImpl_repne_cmps_op16_addr64, uint8_t, iEffSeg);
3469IEM_CIMPL_PROTO_0(iemCImpl_repe_scas_ax_m64);
3470IEM_CIMPL_PROTO_0(iemCImpl_repne_scas_ax_m64);
3471IEM_CIMPL_PROTO_1(iemCImpl_rep_movs_op16_addr64, uint8_t, iEffSeg);
3472IEM_CIMPL_PROTO_0(iemCImpl_stos_ax_m64);
3473IEM_CIMPL_PROTO_1(iemCImpl_lods_ax_m64, int8_t, iEffSeg);
3474IEM_CIMPL_PROTO_1(iemCImpl_ins_op16_addr64, bool, fIoChecked);
3475IEM_CIMPL_PROTO_1(iemCImpl_rep_ins_op16_addr64, bool, fIoChecked);
3476IEM_CIMPL_PROTO_2(iemCImpl_outs_op16_addr64, uint8_t, iEffSeg, bool, fIoChecked);
3477IEM_CIMPL_PROTO_2(iemCImpl_rep_outs_op16_addr64, uint8_t, iEffSeg, bool, fIoChecked);
3478
3479IEM_CIMPL_PROTO_1(iemCImpl_repe_cmps_op32_addr64, uint8_t, iEffSeg);
3480IEM_CIMPL_PROTO_1(iemCImpl_repne_cmps_op32_addr64, uint8_t, iEffSeg);
3481IEM_CIMPL_PROTO_0(iemCImpl_repe_scas_eax_m64);
3482IEM_CIMPL_PROTO_0(iemCImpl_repne_scas_eax_m64);
3483IEM_CIMPL_PROTO_1(iemCImpl_rep_movs_op32_addr64, uint8_t, iEffSeg);
3484IEM_CIMPL_PROTO_0(iemCImpl_stos_eax_m64);
3485IEM_CIMPL_PROTO_1(iemCImpl_lods_eax_m64, int8_t, iEffSeg);
3486IEM_CIMPL_PROTO_1(iemCImpl_ins_op32_addr64, bool, fIoChecked);
3487IEM_CIMPL_PROTO_1(iemCImpl_rep_ins_op32_addr64, bool, fIoChecked);
3488IEM_CIMPL_PROTO_2(iemCImpl_outs_op32_addr64, uint8_t, iEffSeg, bool, fIoChecked);
3489IEM_CIMPL_PROTO_2(iemCImpl_rep_outs_op32_addr64, uint8_t, iEffSeg, bool, fIoChecked);
3490
3491IEM_CIMPL_PROTO_1(iemCImpl_repe_cmps_op64_addr64, uint8_t, iEffSeg);
3492IEM_CIMPL_PROTO_1(iemCImpl_repne_cmps_op64_addr64, uint8_t, iEffSeg);
3493IEM_CIMPL_PROTO_0(iemCImpl_repe_scas_rax_m64);
3494IEM_CIMPL_PROTO_0(iemCImpl_repne_scas_rax_m64);
3495IEM_CIMPL_PROTO_1(iemCImpl_rep_movs_op64_addr64, uint8_t, iEffSeg);
3496IEM_CIMPL_PROTO_0(iemCImpl_stos_rax_m64);
3497IEM_CIMPL_PROTO_1(iemCImpl_lods_rax_m64, int8_t, iEffSeg);
3498IEM_CIMPL_PROTO_1(iemCImpl_ins_op64_addr64, bool, fIoChecked);
3499IEM_CIMPL_PROTO_1(iemCImpl_rep_ins_op64_addr64, bool, fIoChecked);
3500IEM_CIMPL_PROTO_2(iemCImpl_outs_op64_addr64, uint8_t, iEffSeg, bool, fIoChecked);
3501IEM_CIMPL_PROTO_2(iemCImpl_rep_outs_op64_addr64, uint8_t, iEffSeg, bool, fIoChecked);
3502/** @} */
3503
3504#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
3505VBOXSTRICTRC iemVmxVmexit(PVMCPUCC pVCpu, uint32_t uExitReason, uint64_t u64ExitQual) RT_NOEXCEPT;
3506VBOXSTRICTRC iemVmxVmexitInstr(PVMCPUCC pVCpu, uint32_t uExitReason, uint8_t cbInstr) RT_NOEXCEPT;
3507VBOXSTRICTRC iemVmxVmexitInstrNeedsInfo(PVMCPUCC pVCpu, uint32_t uExitReason, VMXINSTRID uInstrId, uint8_t cbInstr) RT_NOEXCEPT;
3508VBOXSTRICTRC iemVmxVmexitTaskSwitch(PVMCPUCC pVCpu, IEMTASKSWITCH enmTaskSwitch, RTSEL SelNewTss, uint8_t cbInstr) RT_NOEXCEPT;
3509VBOXSTRICTRC iemVmxVmexitEvent(PVMCPUCC pVCpu, uint8_t uVector, uint32_t fFlags, uint32_t uErrCode, uint64_t uCr2, uint8_t cbInstr) RT_NOEXCEPT;
3510VBOXSTRICTRC iemVmxVmexitEventDoubleFault(PVMCPUCC pVCpu) RT_NOEXCEPT;
3511VBOXSTRICTRC iemVmxVmexitEpt(PVMCPUCC pVCpu, PPGMPTWALKFAST pWalk, uint32_t fAccess, uint32_t fSlatFail, uint8_t cbInstr) RT_NOEXCEPT;
3512VBOXSTRICTRC iemVmxVmexitPreemptTimer(PVMCPUCC pVCpu) RT_NOEXCEPT;
3513VBOXSTRICTRC iemVmxVmexitInstrMwait(PVMCPUCC pVCpu, bool fMonitorHwArmed, uint8_t cbInstr) RT_NOEXCEPT;
3514VBOXSTRICTRC iemVmxVmexitInstrIo(PVMCPUCC pVCpu, VMXINSTRID uInstrId, uint16_t u16Port,
3515 bool fImm, uint8_t cbAccess, uint8_t cbInstr) RT_NOEXCEPT;
3516VBOXSTRICTRC iemVmxVmexitInstrStrIo(PVMCPUCC pVCpu, VMXINSTRID uInstrId, uint16_t u16Port, uint8_t cbAccess,
3517 bool fRep, VMXEXITINSTRINFO ExitInstrInfo, uint8_t cbInstr) RT_NOEXCEPT;
3518VBOXSTRICTRC iemVmxVmexitInstrMovDrX(PVMCPUCC pVCpu, VMXINSTRID uInstrId, uint8_t iDrReg, uint8_t iGReg, uint8_t cbInstr) RT_NOEXCEPT;
3519VBOXSTRICTRC iemVmxVmexitInstrMovToCr8(PVMCPUCC pVCpu, uint8_t iGReg, uint8_t cbInstr) RT_NOEXCEPT;
3520VBOXSTRICTRC iemVmxVmexitInstrMovFromCr8(PVMCPUCC pVCpu, uint8_t iGReg, uint8_t cbInstr) RT_NOEXCEPT;
3521VBOXSTRICTRC iemVmxVmexitInstrMovToCr3(PVMCPUCC pVCpu, uint64_t uNewCr3, uint8_t iGReg, uint8_t cbInstr) RT_NOEXCEPT;
3522VBOXSTRICTRC iemVmxVmexitInstrMovFromCr3(PVMCPUCC pVCpu, uint8_t iGReg, uint8_t cbInstr) RT_NOEXCEPT;
3523VBOXSTRICTRC iemVmxVmexitInstrMovToCr0Cr4(PVMCPUCC pVCpu, uint8_t iCrReg, uint64_t *puNewCrX, uint8_t iGReg, uint8_t cbInstr) RT_NOEXCEPT;
3524VBOXSTRICTRC iemVmxVmexitInstrClts(PVMCPUCC pVCpu, uint8_t cbInstr) RT_NOEXCEPT;
3525VBOXSTRICTRC iemVmxVmexitInstrLmsw(PVMCPUCC pVCpu, uint32_t uGuestCr0, uint16_t *pu16NewMsw,
3526 RTGCPTR GCPtrEffDst, uint8_t cbInstr) RT_NOEXCEPT;
3527VBOXSTRICTRC iemVmxVmexitInstrInvlpg(PVMCPUCC pVCpu, RTGCPTR GCPtrPage, uint8_t cbInstr) RT_NOEXCEPT;
3528VBOXSTRICTRC iemVmxApicWriteEmulation(PVMCPUCC pVCpu) RT_NOEXCEPT;
3529VBOXSTRICTRC iemVmxVirtApicAccessUnused(PVMCPUCC pVCpu, PRTGCPHYS pGCPhysAccess, size_t cbAccess, uint32_t fAccess) RT_NOEXCEPT;
3530uint32_t iemVmxVirtApicReadRaw32(PVMCPUCC pVCpu, uint16_t offReg) RT_NOEXCEPT;
3531void iemVmxVirtApicWriteRaw32(PVMCPUCC pVCpu, uint16_t offReg, uint32_t uReg) RT_NOEXCEPT;
3532VBOXSTRICTRC iemVmxInvvpid(PVMCPUCC pVCpu, uint8_t cbInstr, uint8_t iEffSeg, RTGCPTR GCPtrInvvpidDesc,
3533 uint64_t u64InvvpidType, PCVMXVEXITINFO pExitInfo) RT_NOEXCEPT;
3534bool iemVmxIsRdmsrWrmsrInterceptSet(PCVMCPU pVCpu, uint32_t uExitReason, uint32_t idMsr) RT_NOEXCEPT;
3535IEM_CIMPL_PROTO_0(iemCImpl_vmxoff);
3536IEM_CIMPL_PROTO_2(iemCImpl_vmxon, uint8_t, iEffSeg, RTGCPTR, GCPtrVmxon);
3537IEM_CIMPL_PROTO_0(iemCImpl_vmlaunch);
3538IEM_CIMPL_PROTO_0(iemCImpl_vmresume);
3539IEM_CIMPL_PROTO_2(iemCImpl_vmptrld, uint8_t, iEffSeg, RTGCPTR, GCPtrVmcs);
3540IEM_CIMPL_PROTO_2(iemCImpl_vmptrst, uint8_t, iEffSeg, RTGCPTR, GCPtrVmcs);
3541IEM_CIMPL_PROTO_2(iemCImpl_vmclear, uint8_t, iEffSeg, RTGCPTR, GCPtrVmcs);
3542IEM_CIMPL_PROTO_2(iemCImpl_vmwrite_reg, uint64_t, u64Val, uint64_t, u64VmcsField);
3543IEM_CIMPL_PROTO_3(iemCImpl_vmwrite_mem, uint8_t, iEffSeg, RTGCPTR, GCPtrVal, uint32_t, u64VmcsField);
3544IEM_CIMPL_PROTO_2(iemCImpl_vmread_reg64, uint64_t *, pu64Dst, uint64_t, u64VmcsField);
3545IEM_CIMPL_PROTO_2(iemCImpl_vmread_reg32, uint64_t *, pu32Dst, uint32_t, u32VmcsField);
3546IEM_CIMPL_PROTO_3(iemCImpl_vmread_mem_reg64, uint8_t, iEffSeg, RTGCPTR, GCPtrDst, uint32_t, u64VmcsField);
3547IEM_CIMPL_PROTO_3(iemCImpl_vmread_mem_reg32, uint8_t, iEffSeg, RTGCPTR, GCPtrDst, uint32_t, u32VmcsField);
3548IEM_CIMPL_PROTO_3(iemCImpl_invvpid, uint8_t, iEffSeg, RTGCPTR, GCPtrInvvpidDesc, uint64_t, uInvvpidType);
3549IEM_CIMPL_PROTO_3(iemCImpl_invept, uint8_t, iEffSeg, RTGCPTR, GCPtrInveptDesc, uint64_t, uInveptType);
3550IEM_CIMPL_PROTO_0(iemCImpl_vmx_pause);
3551#endif
3552
3553#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
3554VBOXSTRICTRC iemSvmVmexit(PVMCPUCC pVCpu, uint64_t uExitCode, uint64_t uExitInfo1, uint64_t uExitInfo2) RT_NOEXCEPT;
3555VBOXSTRICTRC iemHandleSvmEventIntercept(PVMCPUCC pVCpu, uint8_t cbInstr, uint8_t u8Vector, uint32_t fFlags, uint32_t uErr, uint64_t uCr2) RT_NOEXCEPT;
3556VBOXSTRICTRC iemSvmHandleIOIntercept(PVMCPUCC pVCpu, uint16_t u16Port, SVMIOIOTYPE enmIoType, uint8_t cbReg,
3557 uint8_t cAddrSizeBits, uint8_t iEffSeg, bool fRep, bool fStrIo, uint8_t cbInstr) RT_NOEXCEPT;
3558VBOXSTRICTRC iemSvmHandleMsrIntercept(PVMCPUCC pVCpu, uint32_t idMsr, bool fWrite, uint8_t cbInstr) RT_NOEXCEPT;
3559IEM_CIMPL_PROTO_0(iemCImpl_vmrun);
3560IEM_CIMPL_PROTO_0(iemCImpl_vmload);
3561IEM_CIMPL_PROTO_0(iemCImpl_vmsave);
3562IEM_CIMPL_PROTO_0(iemCImpl_clgi);
3563IEM_CIMPL_PROTO_0(iemCImpl_stgi);
3564IEM_CIMPL_PROTO_0(iemCImpl_invlpga);
3565IEM_CIMPL_PROTO_0(iemCImpl_skinit);
3566IEM_CIMPL_PROTO_0(iemCImpl_svm_pause);
3567#endif
3568
3569IEM_CIMPL_PROTO_0(iemCImpl_vmcall); /* vmx */
3570IEM_CIMPL_PROTO_0(iemCImpl_vmmcall); /* svm */
3571IEM_CIMPL_PROTO_1(iemCImpl_Hypercall, uint16_t, uDisOpcode); /* both */
3572
3573extern const PFNIEMOP g_apfnIemInterpretOnlyOneByteMap[256];
3574extern const PFNIEMOP g_apfnIemInterpretOnlyTwoByteMap[1024];
3575extern const PFNIEMOP g_apfnIemInterpretOnlyThreeByte0f3a[1024];
3576extern const PFNIEMOP g_apfnIemInterpretOnlyThreeByte0f38[1024];
3577extern const PFNIEMOP g_apfnIemInterpretOnlyVecMap1[1024];
3578extern const PFNIEMOP g_apfnIemInterpretOnlyVecMap2[1024];
3579extern const PFNIEMOP g_apfnIemInterpretOnlyVecMap3[1024];
3580
3581/*
3582 * Recompiler related stuff.
3583 */
3584extern const PFNIEMOP g_apfnIemThreadedRecompilerOneByteMap[256];
3585extern const PFNIEMOP g_apfnIemThreadedRecompilerTwoByteMap[1024];
3586extern const PFNIEMOP g_apfnIemThreadedRecompilerThreeByte0f3a[1024];
3587extern const PFNIEMOP g_apfnIemThreadedRecompilerThreeByte0f38[1024];
3588extern const PFNIEMOP g_apfnIemThreadedRecompilerVecMap1[1024];
3589extern const PFNIEMOP g_apfnIemThreadedRecompilerVecMap2[1024];
3590extern const PFNIEMOP g_apfnIemThreadedRecompilerVecMap3[1024];
3591
3592
3593IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_Nop);
3594IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_LogCpuState);
3595
3596IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_DeferToCImpl0);
3597
3598IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckIrq);
3599IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckTimers);
3600IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckTimersAndIrq);
3601IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckMode);
3602IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckHwInstrBps);
3603IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckCsLim);
3604
3605IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckCsLimAndOpcodes);
3606IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckOpcodes);
3607IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckOpcodesConsiderCsLim);
3608
3609/* Branching: */
3610IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckCsLimAndPcAndOpcodes);
3611IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckPcAndOpcodes);
3612IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckPcAndOpcodesConsiderCsLim);
3613
3614IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckCsLimAndOpcodesLoadingTlb);
3615IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckOpcodesLoadingTlb);
3616IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckOpcodesLoadingTlbConsiderCsLim);
3617
3618/* Natural page crossing: */
3619IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckCsLimAndOpcodesAcrossPageLoadingTlb);
3620IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckOpcodesAcrossPageLoadingTlb);
3621IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckOpcodesAcrossPageLoadingTlbConsiderCsLim);
3622
3623IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckCsLimAndOpcodesOnNextPageLoadingTlb);
3624IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckOpcodesOnNextPageLoadingTlb);
3625IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckOpcodesOnNextPageLoadingTlbConsiderCsLim);
3626
3627IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckCsLimAndOpcodesOnNewPageLoadingTlb);
3628IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckOpcodesOnNewPageLoadingTlb);
3629IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckOpcodesOnNewPageLoadingTlbConsiderCsLim);
3630
3631IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_Jump);
3632
3633bool iemThreadedCompileEmitIrqCheckBefore(PVMCPUCC pVCpu, PIEMTB pTb);
3634bool iemThreadedCompileBeginEmitCallsComplications(PVMCPUCC pVCpu, PIEMTB pTb);
3635#ifdef IEM_WITH_INTRA_TB_JUMPS
3636DECLHIDDEN(int) iemThreadedCompileBackAtFirstInstruction(PVMCPU pVCpu, PIEMTB pTb) RT_NOEXCEPT;
3637#endif
3638
3639
3640/** @} */
3641
3642RT_C_DECLS_END
3643
3644
3645#endif /* !VMM_INCLUDED_SRC_VMMAll_target_x86_IEMInternal_x86_h */
3646
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette