VirtualBox

source: vbox/trunk/src/VBox/VMM/include/IEMInternal.h@ 105490

Last change on this file since 105490 was 105490, checked in by vboxsync, 4 months ago

VMM/IEM: Basic infrastructure to natively recompile SIMD floating point instructions, bugref:10652

SIMD floating point operation behavior depends on the guest MXCSR value which needs to be written to the
host's floating point control register (MXCSR on x86, FPCR on arm64 which needs conversion) and needs to be
restored to the host's value when the TB finished execution to avoid inconsistencies in case the guest
changes MXCSR. The ARM implementation does not conform to the x86 behavior because default NaN values have
the sign bit clear on arm64 while they are set on x86. There are rounding differences as well and earlier
ARMv8 revisions don't support the FPCR.FIZ and FPCR.AH features. Should still work out as long as the guest
doesn't try to do funny stuff.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 349.6 KB
Line 
1/* $Id: IEMInternal.h 105490 2024-07-24 14:49:29Z vboxsync $ */
2/** @file
3 * IEM - Internal header file.
4 */
5
6/*
7 * Copyright (C) 2011-2024 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28#ifndef VMM_INCLUDED_SRC_include_IEMInternal_h
29#define VMM_INCLUDED_SRC_include_IEMInternal_h
30#ifndef RT_WITHOUT_PRAGMA_ONCE
31# pragma once
32#endif
33
34#ifndef RT_IN_ASSEMBLER
35# include <VBox/vmm/cpum.h>
36# include <VBox/vmm/iem.h>
37# include <VBox/vmm/pgm.h>
38# include <VBox/vmm/stam.h>
39# include <VBox/param.h>
40
41# include <iprt/setjmp-without-sigmask.h>
42# include <iprt/list.h>
43#endif /* !RT_IN_ASSEMBLER */
44
45
46RT_C_DECLS_BEGIN
47
48
49/** @defgroup grp_iem_int Internals
50 * @ingroup grp_iem
51 * @internal
52 * @{
53 */
54
55/* Make doxygen happy w/o overcomplicating the #if checks. */
56#ifdef DOXYGEN_RUNNING
57# define IEM_WITH_THROW_CATCH
58# define VBOX_WITH_IEM_NATIVE_RECOMPILER_LONGJMP
59#endif
60
61/** For expanding symbol in slickedit and other products tagging and
62 * crossreferencing IEM symbols. */
63#ifndef IEM_STATIC
64# define IEM_STATIC static
65#endif
66
67/** @def IEM_WITH_SETJMP
68 * Enables alternative status code handling using setjmps.
69 *
70 * This adds a bit of expense via the setjmp() call since it saves all the
71 * non-volatile registers. However, it eliminates return code checks and allows
72 * for more optimal return value passing (return regs instead of stack buffer).
73 */
74#if defined(DOXYGEN_RUNNING) || defined(RT_OS_WINDOWS) || 1
75# define IEM_WITH_SETJMP
76#endif
77
78/** @def IEM_WITH_THROW_CATCH
79 * Enables using C++ throw/catch as an alternative to setjmp/longjmp in user
80 * mode code when IEM_WITH_SETJMP is in effect.
81 *
82 * With GCC 11.3.1 and code TLB on linux, using throw/catch instead of
83 * setjmp/long resulted in bs2-test-1 running 3.00% faster and all but on test
84 * result value improving by more than 1%. (Best out of three.)
85 *
86 * With Visual C++ 2019 and code TLB on windows, using throw/catch instead of
87 * setjmp/long resulted in bs2-test-1 running 3.68% faster and all but some of
88 * the MMIO and CPUID tests ran noticeably faster. Variation is greater than on
89 * Linux, but it should be quite a bit faster for normal code.
90 */
91#if defined(__cplusplus) && defined(IEM_WITH_SETJMP) && defined(IN_RING3) && (defined(__GNUC__) || defined(_MSC_VER)) /* ASM-NOINC-START */
92# define IEM_WITH_THROW_CATCH
93#endif /*ASM-NOINC-END*/
94
95/** @def IEMNATIVE_WITH_DELAYED_PC_UPDATING
96 * Enables the delayed PC updating optimization (see @bugref{10373}).
97 */
98#if defined(DOXYGEN_RUNNING) || 1
99# define IEMNATIVE_WITH_DELAYED_PC_UPDATING
100#endif
101
102/** Enables the SIMD register allocator @bugref{10614}. */
103#if defined(DOXYGEN_RUNNING) || 1
104# define IEMNATIVE_WITH_SIMD_REG_ALLOCATOR
105#endif
106/** Enables access to even callee saved registers. */
107//# define IEMNATIVE_WITH_SIMD_REG_ACCESS_ALL_REGISTERS
108
109#if defined(DOXYGEN_RUNNING) || 1
110/** @def IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK
111 * Delay the writeback or dirty registers as long as possible. */
112# define IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK
113#endif
114
115/** @def IEM_WITH_TLB_STATISTICS
116 * Enables all TLB statistics. */
117#if defined(VBOX_WITH_STATISTICS) || defined(DOXYGEN_RUNNING)
118# define IEM_WITH_TLB_STATISTICS
119#endif
120
121/** @def IEMNATIVE_WITH_SIMD_FP_NATIVE_EMITTERS
122 * Enable this to use native emitters for certain SIMD FP operations. */
123#if 1 || defined(DOXYGEN_RUNNING)
124# define IEMNATIVE_WITH_SIMD_FP_NATIVE_EMITTERS
125#endif
126
127/** @def VBOX_WITH_IEM_NATIVE_RECOMPILER_LONGJMP
128 * Enables a quicker alternative to throw/longjmp for IEM_DO_LONGJMP when
129 * executing native translation blocks.
130 *
131 * This exploits the fact that we save all non-volatile registers in the TB
132 * prologue and thus just need to do the same as the TB epilogue to get the
133 * effect of a longjmp/throw. Since MSC marks XMM6 thru XMM15 as
134 * non-volatile (and does something even more crazy for ARM), this probably
135 * won't work reliably on Windows. */
136#ifdef RT_ARCH_ARM64
137# ifndef RT_OS_WINDOWS
138# define VBOX_WITH_IEM_NATIVE_RECOMPILER_LONGJMP
139# endif
140#endif
141/* ASM-NOINC-START */
142#ifdef VBOX_WITH_IEM_NATIVE_RECOMPILER_LONGJMP
143# if !defined(IN_RING3) \
144 || !defined(VBOX_WITH_IEM_RECOMPILER) \
145 || !defined(VBOX_WITH_IEM_NATIVE_RECOMPILER)
146# undef VBOX_WITH_IEM_NATIVE_RECOMPILER_LONGJMP
147# elif defined(RT_OS_WINDOWS)
148# pragma message("VBOX_WITH_IEM_NATIVE_RECOMPILER_LONGJMP is not safe to use on windows")
149# endif
150#endif
151
152
153/** @def IEM_DO_LONGJMP
154 *
155 * Wrapper around longjmp / throw.
156 *
157 * @param a_pVCpu The CPU handle.
158 * @param a_rc The status code jump back with / throw.
159 */
160#if defined(IEM_WITH_SETJMP) || defined(DOXYGEN_RUNNING)
161# ifdef IEM_WITH_THROW_CATCH
162# ifdef VBOX_WITH_IEM_NATIVE_RECOMPILER_LONGJMP
163# define IEM_DO_LONGJMP(a_pVCpu, a_rc) do { \
164 if ((a_pVCpu)->iem.s.pvTbFramePointerR3) \
165 iemNativeTbLongJmp((a_pVCpu)->iem.s.pvTbFramePointerR3, (a_rc)); \
166 throw int(a_rc); \
167 } while (0)
168# else
169# define IEM_DO_LONGJMP(a_pVCpu, a_rc) throw int(a_rc)
170# endif
171# else
172# define IEM_DO_LONGJMP(a_pVCpu, a_rc) longjmp(*(a_pVCpu)->iem.s.CTX_SUFF(pJmpBuf), (a_rc))
173# endif
174#endif
175
176/** For use with IEM function that may do a longjmp (when enabled).
177 *
178 * Visual C++ has trouble longjmp'ing from/over functions with the noexcept
179 * attribute. So, we indicate that function that may be part of a longjmp may
180 * throw "exceptions" and that the compiler should definitely not generate and
181 * std::terminate calling unwind code.
182 *
183 * Here is one example of this ending in std::terminate:
184 * @code{.txt}
18500 00000041`cadfda10 00007ffc`5d5a1f9f ucrtbase!abort+0x4e
18601 00000041`cadfda40 00007ffc`57af229a ucrtbase!terminate+0x1f
18702 00000041`cadfda70 00007ffb`eec91030 VCRUNTIME140!__std_terminate+0xa [d:\agent\_work\1\s\src\vctools\crt\vcruntime\src\eh\ehhelpers.cpp @ 192]
18803 00000041`cadfdaa0 00007ffb`eec92c6d VCRUNTIME140_1!_CallSettingFrame+0x20 [d:\agent\_work\1\s\src\vctools\crt\vcruntime\src\eh\amd64\handlers.asm @ 50]
18904 00000041`cadfdad0 00007ffb`eec93ae5 VCRUNTIME140_1!__FrameHandler4::FrameUnwindToState+0x241 [d:\agent\_work\1\s\src\vctools\crt\vcruntime\src\eh\frame.cpp @ 1085]
19005 00000041`cadfdc00 00007ffb`eec92258 VCRUNTIME140_1!__FrameHandler4::FrameUnwindToEmptyState+0x2d [d:\agent\_work\1\s\src\vctools\crt\vcruntime\src\eh\risctrnsctrl.cpp @ 218]
19106 00000041`cadfdc30 00007ffb`eec940e9 VCRUNTIME140_1!__InternalCxxFrameHandler<__FrameHandler4>+0x194 [d:\agent\_work\1\s\src\vctools\crt\vcruntime\src\eh\frame.cpp @ 304]
19207 00000041`cadfdcd0 00007ffc`5f9f249f VCRUNTIME140_1!__CxxFrameHandler4+0xa9 [d:\agent\_work\1\s\src\vctools\crt\vcruntime\src\eh\risctrnsctrl.cpp @ 290]
19308 00000041`cadfdd40 00007ffc`5f980939 ntdll!RtlpExecuteHandlerForUnwind+0xf
19409 00000041`cadfdd70 00007ffc`5f9a0edd ntdll!RtlUnwindEx+0x339
1950a 00000041`cadfe490 00007ffc`57aff976 ntdll!RtlUnwind+0xcd
1960b 00000041`cadfea00 00007ffb`e1b5de01 VCRUNTIME140!__longjmp_internal+0xe6 [d:\agent\_work\1\s\src\vctools\crt\vcruntime\src\eh\amd64\longjmp.asm @ 140]
1970c (Inline Function) --------`-------- VBoxVMM!iemOpcodeGetNextU8SlowJmp+0x95 [L:\vbox-intern\src\VBox\VMM\VMMAll\IEMAll.cpp @ 1155]
1980d 00000041`cadfea50 00007ffb`e1b60f6b VBoxVMM!iemOpcodeGetNextU8Jmp+0xc1 [L:\vbox-intern\src\VBox\VMM\include\IEMInline.h @ 402]
1990e 00000041`cadfea90 00007ffb`e1cc6201 VBoxVMM!IEMExecForExits+0xdb [L:\vbox-intern\src\VBox\VMM\VMMAll\IEMAll.cpp @ 10185]
2000f 00000041`cadfec70 00007ffb`e1d0df8d VBoxVMM!EMHistoryExec+0x4f1 [L:\vbox-intern\src\VBox\VMM\VMMAll\EMAll.cpp @ 452]
20110 00000041`cadfed60 00007ffb`e1d0d4c0 VBoxVMM!nemR3WinHandleExitCpuId+0x79d [L:\vbox-intern\src\VBox\VMM\VMMAll\NEMAllNativeTemplate-win.cpp.h @ 1829] @encode
202 @endcode
203 *
204 * @see https://developercommunity.visualstudio.com/t/fragile-behavior-of-longjmp-called-from-noexcept-f/1532859
205 */
206#if defined(IEM_WITH_SETJMP) && (defined(_MSC_VER) || defined(IEM_WITH_THROW_CATCH))
207# define IEM_NOEXCEPT_MAY_LONGJMP RT_NOEXCEPT_EX(false)
208#else
209# define IEM_NOEXCEPT_MAY_LONGJMP RT_NOEXCEPT
210#endif
211/* ASM-NOINC-END */
212
213#define IEM_IMPLEMENTS_TASKSWITCH
214
215/** @def IEM_WITH_3DNOW
216 * Includes the 3DNow decoding. */
217#if !defined(IEM_WITH_3DNOW) || defined(DOXYGEN_RUNNING) /* For doxygen, set in Config.kmk. */
218# ifndef IEM_WITHOUT_3DNOW
219# define IEM_WITH_3DNOW
220# endif
221#endif
222
223/** @def IEM_WITH_THREE_0F_38
224 * Includes the three byte opcode map for instrs starting with 0x0f 0x38. */
225#if !defined(IEM_WITH_THREE_0F_38) || defined(DOXYGEN_RUNNING) /* For doxygen, set in Config.kmk. */
226# ifndef IEM_WITHOUT_THREE_0F_38
227# define IEM_WITH_THREE_0F_38
228# endif
229#endif
230
231/** @def IEM_WITH_THREE_0F_3A
232 * Includes the three byte opcode map for instrs starting with 0x0f 0x38. */
233#if !defined(IEM_WITH_THREE_0F_3A) || defined(DOXYGEN_RUNNING) /* For doxygen, set in Config.kmk. */
234# ifndef IEM_WITHOUT_THREE_0F_3A
235# define IEM_WITH_THREE_0F_3A
236# endif
237#endif
238
239/** @def IEM_WITH_VEX
240 * Includes the VEX decoding. */
241#if !defined(IEM_WITH_VEX) || defined(DOXYGEN_RUNNING) /* For doxygen, set in Config.kmk. */
242# ifndef IEM_WITHOUT_VEX
243# define IEM_WITH_VEX
244# endif
245#endif
246
247/** @def IEM_CFG_TARGET_CPU
248 * The minimum target CPU for the IEM emulation (IEMTARGETCPU_XXX value).
249 *
250 * By default we allow this to be configured by the user via the
251 * CPUM/GuestCpuName config string, but this comes at a slight cost during
252 * decoding. So, for applications of this code where there is no need to
253 * be dynamic wrt target CPU, just modify this define.
254 */
255#if !defined(IEM_CFG_TARGET_CPU) || defined(DOXYGEN_RUNNING)
256# define IEM_CFG_TARGET_CPU IEMTARGETCPU_DYNAMIC
257#endif
258
259//#define IEM_WITH_CODE_TLB // - work in progress
260//#define IEM_WITH_DATA_TLB // - work in progress
261
262
263/** @def IEM_USE_UNALIGNED_DATA_ACCESS
264 * Use unaligned accesses instead of elaborate byte assembly. */
265#if defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86) || defined(DOXYGEN_RUNNING) /*ASM-NOINC*/
266# define IEM_USE_UNALIGNED_DATA_ACCESS
267#endif /*ASM-NOINC*/
268
269//#define IEM_LOG_MEMORY_WRITES
270
271
272
273#ifndef RT_IN_ASSEMBLER /* ASM-NOINC-START - the rest of the file */
274
275# if !defined(IEM_WITHOUT_INSTRUCTION_STATS) && !defined(DOXYGEN_RUNNING)
276/** Instruction statistics. */
277typedef struct IEMINSTRSTATS
278{
279# define IEM_DO_INSTR_STAT(a_Name, a_szDesc) uint32_t a_Name;
280# include "IEMInstructionStatisticsTmpl.h"
281# undef IEM_DO_INSTR_STAT
282} IEMINSTRSTATS;
283#else
284struct IEMINSTRSTATS;
285typedef struct IEMINSTRSTATS IEMINSTRSTATS;
286#endif
287/** Pointer to IEM instruction statistics. */
288typedef IEMINSTRSTATS *PIEMINSTRSTATS;
289
290
291/** @name IEMTARGETCPU_EFL_BEHAVIOR_XXX - IEMCPU::aidxTargetCpuEflFlavour
292 * @{ */
293#define IEMTARGETCPU_EFL_BEHAVIOR_NATIVE 0 /**< Native x86 EFLAGS result; Intel EFLAGS when on non-x86 hosts. */
294#define IEMTARGETCPU_EFL_BEHAVIOR_INTEL 1 /**< Intel EFLAGS result. */
295#define IEMTARGETCPU_EFL_BEHAVIOR_AMD 2 /**< AMD EFLAGS result */
296#define IEMTARGETCPU_EFL_BEHAVIOR_RESERVED 3 /**< Reserved/dummy entry slot that's the same as 0. */
297#define IEMTARGETCPU_EFL_BEHAVIOR_MASK 3 /**< For masking the index before use. */
298/** Selects the right variant from a_aArray.
299 * pVCpu is implicit in the caller context. */
300#define IEMTARGETCPU_EFL_BEHAVIOR_SELECT(a_aArray) \
301 (a_aArray[pVCpu->iem.s.aidxTargetCpuEflFlavour[1] & IEMTARGETCPU_EFL_BEHAVIOR_MASK])
302/** Variation of IEMTARGETCPU_EFL_BEHAVIOR_SELECT for when no native worker can
303 * be used because the host CPU does not support the operation. */
304#define IEMTARGETCPU_EFL_BEHAVIOR_SELECT_NON_NATIVE(a_aArray) \
305 (a_aArray[pVCpu->iem.s.aidxTargetCpuEflFlavour[0] & IEMTARGETCPU_EFL_BEHAVIOR_MASK])
306/** Variation of IEMTARGETCPU_EFL_BEHAVIOR_SELECT for a two dimentional
307 * array paralleling IEMCPU::aidxTargetCpuEflFlavour and a single bit index
308 * into the two.
309 * @sa IEM_SELECT_NATIVE_OR_FALLBACK */
310#if defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)
311# define IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(a_aaArray, a_fNative) \
312 (a_aaArray[a_fNative][pVCpu->iem.s.aidxTargetCpuEflFlavour[a_fNative] & IEMTARGETCPU_EFL_BEHAVIOR_MASK])
313#else
314# define IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(a_aaArray, a_fNative) \
315 (a_aaArray[0][pVCpu->iem.s.aidxTargetCpuEflFlavour[0] & IEMTARGETCPU_EFL_BEHAVIOR_MASK])
316#endif
317/** @} */
318
319/**
320 * Picks @a a_pfnNative or @a a_pfnFallback according to the host CPU feature
321 * indicator given by @a a_fCpumFeatureMember (CPUMFEATURES member).
322 *
323 * On non-x86 hosts, this will shortcut to the fallback w/o checking the
324 * indicator.
325 *
326 * @sa IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX
327 */
328#if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
329# define IEM_SELECT_HOST_OR_FALLBACK(a_fCpumFeatureMember, a_pfnNative, a_pfnFallback) \
330 (g_CpumHostFeatures.s.a_fCpumFeatureMember ? a_pfnNative : a_pfnFallback)
331#else
332# define IEM_SELECT_HOST_OR_FALLBACK(a_fCpumFeatureMember, a_pfnNative, a_pfnFallback) (a_pfnFallback)
333#endif
334
335
336/**
337 * Branch types.
338 */
339typedef enum IEMBRANCH
340{
341 IEMBRANCH_JUMP = 1,
342 IEMBRANCH_CALL,
343 IEMBRANCH_TRAP,
344 IEMBRANCH_SOFTWARE_INT,
345 IEMBRANCH_HARDWARE_INT
346} IEMBRANCH;
347AssertCompileSize(IEMBRANCH, 4);
348
349
350/**
351 * INT instruction types.
352 */
353typedef enum IEMINT
354{
355 /** INT n instruction (opcode 0xcd imm). */
356 IEMINT_INTN = 0,
357 /** Single byte INT3 instruction (opcode 0xcc). */
358 IEMINT_INT3 = IEM_XCPT_FLAGS_BP_INSTR,
359 /** Single byte INTO instruction (opcode 0xce). */
360 IEMINT_INTO = IEM_XCPT_FLAGS_OF_INSTR,
361 /** Single byte INT1 (ICEBP) instruction (opcode 0xf1). */
362 IEMINT_INT1 = IEM_XCPT_FLAGS_ICEBP_INSTR
363} IEMINT;
364AssertCompileSize(IEMINT, 4);
365
366
367/**
368 * A FPU result.
369 */
370typedef struct IEMFPURESULT
371{
372 /** The output value. */
373 RTFLOAT80U r80Result;
374 /** The output status. */
375 uint16_t FSW;
376} IEMFPURESULT;
377AssertCompileMemberOffset(IEMFPURESULT, FSW, 10);
378/** Pointer to a FPU result. */
379typedef IEMFPURESULT *PIEMFPURESULT;
380/** Pointer to a const FPU result. */
381typedef IEMFPURESULT const *PCIEMFPURESULT;
382
383
384/**
385 * A FPU result consisting of two output values and FSW.
386 */
387typedef struct IEMFPURESULTTWO
388{
389 /** The first output value. */
390 RTFLOAT80U r80Result1;
391 /** The output status. */
392 uint16_t FSW;
393 /** The second output value. */
394 RTFLOAT80U r80Result2;
395} IEMFPURESULTTWO;
396AssertCompileMemberOffset(IEMFPURESULTTWO, FSW, 10);
397AssertCompileMemberOffset(IEMFPURESULTTWO, r80Result2, 12);
398/** Pointer to a FPU result consisting of two output values and FSW. */
399typedef IEMFPURESULTTWO *PIEMFPURESULTTWO;
400/** Pointer to a const FPU result consisting of two output values and FSW. */
401typedef IEMFPURESULTTWO const *PCIEMFPURESULTTWO;
402
403
404/**
405 * IEM TLB entry.
406 *
407 * Lookup assembly:
408 * @code{.asm}
409 ; Calculate tag.
410 mov rax, [VA]
411 shl rax, 16
412 shr rax, 16 + X86_PAGE_SHIFT
413 or rax, [uTlbRevision]
414
415 ; Do indexing.
416 movzx ecx, al
417 lea rcx, [pTlbEntries + rcx]
418
419 ; Check tag.
420 cmp [rcx + IEMTLBENTRY.uTag], rax
421 jne .TlbMiss
422
423 ; Check access.
424 mov rax, ACCESS_FLAGS | MAPPING_R3_NOT_VALID | 0xffffff00
425 and rax, [rcx + IEMTLBENTRY.fFlagsAndPhysRev]
426 cmp rax, [uTlbPhysRev]
427 jne .TlbMiss
428
429 ; Calc address and we're done.
430 mov eax, X86_PAGE_OFFSET_MASK
431 and eax, [VA]
432 or rax, [rcx + IEMTLBENTRY.pMappingR3]
433 %ifdef VBOX_WITH_STATISTICS
434 inc qword [cTlbHits]
435 %endif
436 jmp .Done
437
438 .TlbMiss:
439 mov r8d, ACCESS_FLAGS
440 mov rdx, [VA]
441 mov rcx, [pVCpu]
442 call iemTlbTypeMiss
443 .Done:
444
445 @endcode
446 *
447 */
448typedef struct IEMTLBENTRY
449{
450 /** The TLB entry tag.
451 * Bits 35 thru 0 are made up of the virtual address shifted right 12 bits, this
452 * is ASSUMING a virtual address width of 48 bits.
453 *
454 * Bits 63 thru 36 are made up of the TLB revision (zero means invalid).
455 *
456 * The TLB lookup code uses the current TLB revision, which won't ever be zero,
457 * enabling an extremely cheap TLB invalidation most of the time. When the TLB
458 * revision wraps around though, the tags needs to be zeroed.
459 *
460 * @note Try use SHRD instruction? After seeing
461 * https://gmplib.org/~tege/x86-timing.pdf, maybe not.
462 *
463 * @todo This will need to be reorganized for 57-bit wide virtual address and
464 * PCID (currently 12 bits) and ASID (currently 6 bits) support. We'll
465 * have to move the TLB entry versioning entirely to the
466 * fFlagsAndPhysRev member then, 57 bit wide VAs means we'll only have
467 * 19 bits left (64 - 57 + 12 = 19) and they'll almost entire be
468 * consumed by PCID and ASID (12 + 6 = 18).
469 */
470 uint64_t uTag;
471 /** Access flags and physical TLB revision.
472 *
473 * - Bit 0 - page tables - not executable (X86_PTE_PAE_NX).
474 * - Bit 1 - page tables - not writable (complemented X86_PTE_RW).
475 * - Bit 2 - page tables - not user (complemented X86_PTE_US).
476 * - Bit 3 - pgm phys/virt - not directly writable.
477 * - Bit 4 - pgm phys page - not directly readable.
478 * - Bit 5 - page tables - not accessed (complemented X86_PTE_A).
479 * - Bit 6 - page tables - not dirty (complemented X86_PTE_D).
480 * - Bit 7 - tlb entry - pMappingR3 member not valid.
481 * - Bits 63 thru 8 are used for the physical TLB revision number.
482 *
483 * We're using complemented bit meanings here because it makes it easy to check
484 * whether special action is required. For instance a user mode write access
485 * would do a "TEST fFlags, (X86_PTE_RW | X86_PTE_US | X86_PTE_D)" and a
486 * non-zero result would mean special handling needed because either it wasn't
487 * writable, or it wasn't user, or the page wasn't dirty. A user mode read
488 * access would do "TEST fFlags, X86_PTE_US"; and a kernel mode read wouldn't
489 * need to check any PTE flag.
490 */
491 uint64_t fFlagsAndPhysRev;
492 /** The guest physical page address. */
493 uint64_t GCPhys;
494 /** Pointer to the ring-3 mapping. */
495 R3PTRTYPE(uint8_t *) pbMappingR3;
496#if HC_ARCH_BITS == 32
497 uint32_t u32Padding1;
498#endif
499} IEMTLBENTRY;
500AssertCompileSize(IEMTLBENTRY, 32);
501/** Pointer to an IEM TLB entry. */
502typedef IEMTLBENTRY *PIEMTLBENTRY;
503/** Pointer to a const IEM TLB entry. */
504typedef IEMTLBENTRY const *PCIEMTLBENTRY;
505
506/** @name IEMTLBE_F_XXX - TLB entry flags (IEMTLBENTRY::fFlagsAndPhysRev)
507 * @{ */
508#define IEMTLBE_F_PT_NO_EXEC RT_BIT_64(0) /**< Page tables: Not executable. */
509#define IEMTLBE_F_PT_NO_WRITE RT_BIT_64(1) /**< Page tables: Not writable. */
510#define IEMTLBE_F_PT_NO_USER RT_BIT_64(2) /**< Page tables: Not user accessible (supervisor only). */
511#define IEMTLBE_F_PG_NO_WRITE RT_BIT_64(3) /**< Phys page: Not writable (access handler, ROM, whatever). */
512#define IEMTLBE_F_PG_NO_READ RT_BIT_64(4) /**< Phys page: Not readable (MMIO / access handler, ROM) */
513#define IEMTLBE_F_PT_NO_ACCESSED RT_BIT_64(5) /**< Phys tables: Not accessed (need to be marked accessed). */
514#define IEMTLBE_F_PT_NO_DIRTY RT_BIT_64(6) /**< Page tables: Not dirty (needs to be made dirty on write). */
515#define IEMTLBE_F_PT_LARGE_PAGE RT_BIT_64(7) /**< Page tables: Large 2 or 4 MiB page (for flushing). */
516#define IEMTLBE_F_NO_MAPPINGR3 RT_BIT_64(8) /**< TLB entry: The IEMTLBENTRY::pMappingR3 member is invalid. */
517#define IEMTLBE_F_PG_UNASSIGNED RT_BIT_64(9) /**< Phys page: Unassigned memory (not RAM, ROM, MMIO2 or MMIO). */
518#define IEMTLBE_F_PG_CODE_PAGE RT_BIT_64(10) /**< Phys page: Code page. */
519#define IEMTLBE_F_PHYS_REV UINT64_C(0xfffffffffffff800) /**< Physical revision mask. @sa IEMTLB_PHYS_REV_INCR */
520/** @} */
521AssertCompile(PGMIEMGCPHYS2PTR_F_NO_WRITE == IEMTLBE_F_PG_NO_WRITE);
522AssertCompile(PGMIEMGCPHYS2PTR_F_NO_READ == IEMTLBE_F_PG_NO_READ);
523AssertCompile(PGMIEMGCPHYS2PTR_F_NO_MAPPINGR3 == IEMTLBE_F_NO_MAPPINGR3);
524AssertCompile(PGMIEMGCPHYS2PTR_F_UNASSIGNED == IEMTLBE_F_PG_UNASSIGNED);
525AssertCompile(PGMIEMGCPHYS2PTR_F_CODE_PAGE == IEMTLBE_F_PG_CODE_PAGE);
526AssertCompile(PGM_WALKINFO_BIG_PAGE == IEMTLBE_F_PT_LARGE_PAGE);
527/** The bits set by PGMPhysIemGCPhys2PtrNoLock. */
528#define IEMTLBE_GCPHYS2PTR_MASK ( PGMIEMGCPHYS2PTR_F_NO_WRITE \
529 | PGMIEMGCPHYS2PTR_F_NO_READ \
530 | PGMIEMGCPHYS2PTR_F_NO_MAPPINGR3 \
531 | PGMIEMGCPHYS2PTR_F_UNASSIGNED \
532 | PGMIEMGCPHYS2PTR_F_CODE_PAGE \
533 | IEMTLBE_F_PHYS_REV )
534
535
536/** The TLB size (power of two).
537 * We initially chose 256 because that way we can obtain the result directly
538 * from a 8-bit register without an additional AND instruction.
539 * See also @bugref{10687}. */
540#if defined(RT_ARCH_AMD64)
541# define IEMTLB_ENTRY_COUNT 256
542# define IEMTLB_ENTRY_COUNT_AS_POWER_OF_TWO 8
543#else
544# define IEMTLB_ENTRY_COUNT 8192
545# define IEMTLB_ENTRY_COUNT_AS_POWER_OF_TWO 13
546#endif
547AssertCompile(RT_BIT_32(IEMTLB_ENTRY_COUNT_AS_POWER_OF_TWO) == IEMTLB_ENTRY_COUNT);
548
549/** TLB slot format spec (assumes uint32_t or unsigned value). */
550#if IEMTLB_ENTRY_COUNT <= 0x100 / 2
551# define IEMTLB_SLOT_FMT "%02x"
552#elif IEMTLB_ENTRY_COUNT <= 0x1000 / 2
553# define IEMTLB_SLOT_FMT "%03x"
554#elif IEMTLB_ENTRY_COUNT <= 0x10000 / 2
555# define IEMTLB_SLOT_FMT "%04x"
556#else
557# define IEMTLB_SLOT_FMT "%05x"
558#endif
559
560
561/**
562 * An IEM TLB.
563 *
564 * We've got two of these, one for data and one for instructions.
565 */
566typedef struct IEMTLB
567{
568 /** The non-global TLB revision.
569 * This is actually only 28 bits wide (see IEMTLBENTRY::uTag) and is incremented
570 * by adding RT_BIT_64(36) to it. When it wraps around and becomes zero, all
571 * the tags in the TLB must be zeroed and the revision set to RT_BIT_64(36).
572 * (The revision zero indicates an invalid TLB entry.)
573 *
574 * The initial value is choosen to cause an early wraparound. */
575 uint64_t uTlbRevision;
576 /** The TLB physical address revision - shadow of PGM variable.
577 *
578 * This is actually only 56 bits wide (see IEMTLBENTRY::fFlagsAndPhysRev) and is
579 * incremented by adding RT_BIT_64(8). When it wraps around and becomes zero,
580 * a rendezvous is called and each CPU wipe the IEMTLBENTRY::pMappingR3 as well
581 * as IEMTLBENTRY::fFlagsAndPhysRev bits 63 thru 8, 4, and 3.
582 *
583 * The initial value is choosen to cause an early wraparound.
584 *
585 * @note This is placed between the two TLB revisions because we
586 * load it in pair with one or the other on arm64. */
587 uint64_t volatile uTlbPhysRev;
588 /** The global TLB revision.
589 * Same as uTlbRevision, but only increased for global flushes. */
590 uint64_t uTlbRevisionGlobal;
591
592 /** Large page tag range.
593 *
594 * This is used to avoid scanning a large page's worth of TLB entries for each
595 * INVLPG instruction, and only to do so iff we've loaded any and when the
596 * address is in this range. This is kept up to date when we loading new TLB
597 * entries.
598 */
599 struct LARGEPAGERANGE
600 {
601 /** The lowest large page address tag, UINT64_MAX if none. */
602 uint64_t uFirstTag;
603 /** The highest large page address tag (with offset mask part set), 0 if none. */
604 uint64_t uLastTag;
605 }
606 /** Large page range for non-global pages. */
607 NonGlobalLargePageRange,
608 /** Large page range for global pages. */
609 GlobalLargePageRange;
610 /** Number of non-global entries for large pages loaded since last TLB flush. */
611 uint32_t cTlbNonGlobalLargePageCurLoads;
612 /** Number of global entries for large pages loaded since last TLB flush. */
613 uint32_t cTlbGlobalLargePageCurLoads;
614
615 /* Statistics: */
616
617 /** TLB hits in IEMAll.cpp code (IEM_WITH_TLB_STATISTICS only; both).
618 * @note For the data TLB this is only used in iemMemMap and and for direct (i.e.
619 * not via safe read/write path) calls to iemMemMapJmp. */
620 uint64_t cTlbCoreHits;
621 /** Safe read/write TLB hits in iemMemMapJmp (IEM_WITH_TLB_STATISTICS
622 * only; data tlb only). */
623 uint64_t cTlbSafeHits;
624 /** TLB hits in IEMAllMemRWTmplInline.cpp.h (data + IEM_WITH_TLB_STATISTICS only). */
625 uint64_t cTlbInlineCodeHits;
626
627 /** TLB misses in IEMAll.cpp code (both).
628 * @note For the data TLB this is only used in iemMemMap and for direct (i.e.
629 * not via safe read/write path) calls to iemMemMapJmp. So,
630 * for the data TLB this more like 'other misses', while for the code
631 * TLB is all misses. */
632 uint64_t cTlbCoreMisses;
633 /** Subset of cTlbCoreMisses that results in PTE.G=1 loads (odd entries). */
634 uint64_t cTlbCoreGlobalLoads;
635 /** Safe read/write TLB misses in iemMemMapJmp (so data only). */
636 uint64_t cTlbSafeMisses;
637 /** Subset of cTlbSafeMisses that results in PTE.G=1 loads (odd entries). */
638 uint64_t cTlbSafeGlobalLoads;
639 /** Safe read path taken (data only). */
640 uint64_t cTlbSafeReadPath;
641 /** Safe write path taken (data only). */
642 uint64_t cTlbSafeWritePath;
643
644 /** @name Details for native code TLB misses.
645 * @note These counts are included in the above counters (cTlbSafeReadPath,
646 * cTlbSafeWritePath, cTlbInlineCodeHits).
647 * @{ */
648 /** TLB misses in native code due to tag mismatch. */
649 STAMCOUNTER cTlbNativeMissTag;
650 /** TLB misses in native code due to flags or physical revision mismatch. */
651 STAMCOUNTER cTlbNativeMissFlagsAndPhysRev;
652 /** TLB misses in native code due to misaligned access. */
653 STAMCOUNTER cTlbNativeMissAlignment;
654 /** TLB misses in native code due to cross page access. */
655 uint32_t cTlbNativeMissCrossPage;
656 /** TLB misses in native code due to non-canonical address. */
657 uint32_t cTlbNativeMissNonCanonical;
658 /** @} */
659
660 /** Slow read path (code only). */
661 uint32_t cTlbSlowCodeReadPath;
662
663 /** Regular TLB flush count. */
664 uint32_t cTlsFlushes;
665 /** Global TLB flush count. */
666 uint32_t cTlsGlobalFlushes;
667 /** Revision rollovers. */
668 uint32_t cTlbRevisionRollovers;
669 /** Physical revision flushes. */
670 uint32_t cTlbPhysRevFlushes;
671 /** Physical revision rollovers. */
672 uint32_t cTlbPhysRevRollovers;
673
674 /*uint32_t au32Padding[2];*/
675
676 /** The TLB entries.
677 * Even entries are for PTE.G=0 and uses uTlbRevision.
678 * Odd entries are for PTE.G=1 and uses uTlbRevisionGlobal. */
679 IEMTLBENTRY aEntries[IEMTLB_ENTRY_COUNT * 2];
680} IEMTLB;
681AssertCompileSizeAlignment(IEMTLB, 64);
682/** The width (in bits) of the address portion of the TLB tag. */
683#define IEMTLB_TAG_ADDR_WIDTH 36
684/** IEMTLB::uTlbRevision increment. */
685#define IEMTLB_REVISION_INCR RT_BIT_64(IEMTLB_TAG_ADDR_WIDTH)
686/** IEMTLB::uTlbRevision mask. */
687#define IEMTLB_REVISION_MASK (~(RT_BIT_64(IEMTLB_TAG_ADDR_WIDTH) - 1))
688
689/** IEMTLB::uTlbPhysRev increment.
690 * @sa IEMTLBE_F_PHYS_REV */
691#define IEMTLB_PHYS_REV_INCR RT_BIT_64(11)
692AssertCompile(IEMTLBE_F_PHYS_REV == ~(IEMTLB_PHYS_REV_INCR - 1U));
693
694/**
695 * Calculates the TLB tag for a virtual address but without TLB revision.
696 * @returns Tag value for indexing and comparing with IEMTLB::uTag.
697 * @param a_GCPtr The virtual address. Must be RTGCPTR or same size or
698 * the clearing of the top 16 bits won't work (if 32-bit
699 * we'll end up with mostly zeros).
700 */
701#define IEMTLB_CALC_TAG_NO_REV(a_GCPtr) ( (((a_GCPtr) << 16) >> (GUEST_PAGE_SHIFT + 16)) )
702/**
703 * Converts a TLB tag value into a even TLB index.
704 * @returns Index into IEMTLB::aEntries.
705 * @param a_uTag Value returned by IEMTLB_CALC_TAG.
706 */
707#if IEMTLB_ENTRY_COUNT == 256
708# define IEMTLB_TAG_TO_EVEN_INDEX(a_uTag) ( (uint8_t)(a_uTag) * 2U )
709#else
710# define IEMTLB_TAG_TO_EVEN_INDEX(a_uTag) ( ((a_uTag) & (IEMTLB_ENTRY_COUNT - 1U)) * 2U )
711AssertCompile(RT_IS_POWER_OF_TWO(IEMTLB_ENTRY_COUNT));
712#endif
713/**
714 * Converts a TLB tag value into an even TLB index.
715 * @returns Pointer into IEMTLB::aEntries corresponding to .
716 * @param a_pTlb The TLB.
717 * @param a_uTag Value returned by IEMTLB_CALC_TAG or
718 * IEMTLB_CALC_TAG_NO_REV.
719 */
720#define IEMTLB_TAG_TO_EVEN_ENTRY(a_pTlb, a_uTag) ( &(a_pTlb)->aEntries[IEMTLB_TAG_TO_EVEN_INDEX(a_uTag)] )
721
722/** Converts a GC address to an even TLB index. */
723#define IEMTLB_ADDR_TO_EVEN_INDEX(a_GCPtr) IEMTLB_TAG_TO_EVEN_INDEX(IEMTLB_CALC_TAG_NO_REV(a_GCPtr))
724
725
726/** @def IEM_WITH_TLB_TRACE
727 * Enables the TLB tracing.
728 * Adjust buffer size in IEMR3Init. */
729#if defined(DOXYGEN_RUNNING) || 0
730# define IEM_WITH_TLB_TRACE
731#endif
732
733#ifdef IEM_WITH_TLB_TRACE
734
735/** TLB trace entry types. */
736typedef enum : uint8_t
737{
738 kIemTlbTraceType_Invalid,
739 kIemTlbTraceType_InvlPg,
740 kIemTlbTraceType_Flush,
741 kIemTlbTraceType_FlushGlobal,
742 kIemTlbTraceType_Load,
743 kIemTlbTraceType_LoadGlobal,
744 kIemTlbTraceType_Load_Cr0,
745 kIemTlbTraceType_Load_Cr3,
746 kIemTlbTraceType_Load_Cr4,
747 kIemTlbTraceType_Load_Efer,
748 kIemTlbTraceType_Irq,
749 kIemTlbTraceType_Xcpt,
750 kIemTlbTraceType_IRet
751} IEMTLBTRACETYPE;
752
753/** TLB trace entry. */
754typedef struct IEMTLBTRACEENTRY
755{
756 /** The flattened RIP for the event. */
757 uint64_t rip;
758 /** The event type. */
759 IEMTLBTRACETYPE enmType;
760 /** Byte parameter - typically used as 'bool fDataTlb'. */
761 uint8_t bParam;
762 /** 16-bit parameter value. */
763 uint16_t u16Param;
764 /** 32-bit parameter value. */
765 uint32_t u32Param;
766 /** 64-bit parameter value. */
767 uint64_t u64Param;
768 /** 64-bit parameter value. */
769 uint64_t u64Param2;
770} IEMTLBTRACEENTRY;
771AssertCompileSize(IEMTLBTRACEENTRY, 32);
772/** Pointer to a TLB trace entry. */
773typedef IEMTLBTRACEENTRY *PIEMTLBTRACEENTRY;
774/** Pointer to a const TLB trace entry. */
775typedef IEMTLBTRACEENTRY const *PCIEMTLBTRACEENTRY;
776#endif /* !IEM_WITH_TLB_TRACE */
777
778#if defined(IEM_WITH_TLB_TRACE) && defined(IN_RING3)
779# define IEMTLBTRACE_INVLPG(a_pVCpu, a_GCPtr) iemTlbTrace(a_pVCpu, kIemTlbTraceType_InvlPg, a_GCPtr)
780# define IEMTLBTRACE_FLUSH(a_pVCpu, a_uRev, a_fDataTlb) iemTlbTrace(a_pVCpu, kIemTlbTraceType_Flush, a_uRev, 0, a_fDataTlb)
781# define IEMTLBTRACE_FLUSH_GLOBAL(a_pVCpu, a_uRev, a_uGRev, a_fDataTlb) \
782 iemTlbTrace(a_pVCpu, kIemTlbTraceType_FlushGlobal, a_uRev, a_uGRev, a_fDataTlb)
783# define IEMTLBTRACE_LOAD(a_pVCpu, a_GCPtr, a_fDataTlb) iemTlbTrace(a_pVCpu, kIemTlbTraceType_Load, a_GCPtr, 0, a_fDataTlb)
784# define IEMTLBTRACE_LOAD_GLOBAL(a_pVCpu, a_GCPtr, a_fDataTlb) \
785 iemTlbTrace(a_pVCpu, kIemTlbTraceType_LoadGlobal, a_GCPtr, 0, a_fDataTlb)
786# define IEMTLBTRACE_LOAD_CR0(a_pVCpu, a_uNew, a_uOld) iemTlbTrace(a_pVCpu, kIemTlbTraceType_Load_Cr0, a_uNew, a_uOld)
787# define IEMTLBTRACE_LOAD_CR3(a_pVCpu, a_uNew, a_uOld) iemTlbTrace(a_pVCpu, kIemTlbTraceType_Load_Cr3, a_uNew, a_uOld)
788# define IEMTLBTRACE_LOAD_CR4(a_pVCpu, a_uNew, a_uOld) iemTlbTrace(a_pVCpu, kIemTlbTraceType_Load_Cr4, a_uNew, a_uOld)
789# define IEMTLBTRACE_LOAD_EFER(a_pVCpu, a_uNew, a_uOld) iemTlbTrace(a_pVCpu, kIemTlbTraceType_Load_Efer, a_uNew, a_uOld)
790# define IEMTLBTRACE_IRQ(a_pVCpu, a_uVector, a_fFlags, a_fEFlags) \
791 iemTlbTrace(a_pVCpu, kIemTlbTraceType_Irq, a_fEFlags, 0, a_uVector, a_fFlags)
792# define IEMTLBTRACE_XCPT(a_pVCpu, a_uVector, a_uErr, a_uCr2, a_fFlags) \
793 iemTlbTrace(a_pVCpu, kIemTlbTraceType_Xcpt, a_uErr, a_uCr2, a_uVector, a_fFlags)
794# define IEMTLBTRACE_IRET(a_pVCpu, a_uRetCs, a_uRetRip, a_fEFlags) \
795 iemTlbTrace(a_pVCpu, kIemTlbTraceType_IRet, a_uRetRip, a_fEFlags, 0, a_uRetCs)
796#else
797# define IEMTLBTRACE_INVLPG(a_pVCpu, a_GCPtr) do { } while (0)
798# define IEMTLBTRACE_FLUSH(a_pVCpu, a_uRev, a_fDataTlb) do { } while (0)
799# define IEMTLBTRACE_FLUSH_GLOBAL(a_pVCpu, a_uRev, a_uGRev, a_fDataTlb) do { } while (0)
800# define IEMTLBTRACE_LOAD(a_pVCpu, a_GCPtr, a_fDataTlb) do { } while (0)
801# define IEMTLBTRACE_LOAD_GLOBAL(a_pVCpu, a_GCPtr, a_fDataTlb) do { } while (0)
802# define IEMTLBTRACE_LOAD_CR0(a_pVCpu, a_uNew, a_uOld) do { } while (0)
803# define IEMTLBTRACE_LOAD_CR3(a_pVCpu, a_uNew, a_uOld) do { } while (0)
804# define IEMTLBTRACE_LOAD_CR4(a_pVCpu, a_uNew, a_uOld) do { } while (0)
805# define IEMTLBTRACE_LOAD_EFER(a_pVCpu, a_uNew, a_uOld) do { } while (0)
806# define IEMTLBTRACE_IRQ(a_pVCpu, a_uVector, a_fFlags, a_fEFlags) do { } while (0)
807# define IEMTLBTRACE_XCPT(a_pVCpu, a_uVector, a_uErr, a_uCr2, a_fFlags) do { } while (0)
808# define IEMTLBTRACE_IRET(a_pVCpu, a_uRetCs, a_uRetRip, a_fEFlags) do { } while (0)
809#endif
810
811
812/** @name IEM_MC_F_XXX - MC block flags/clues.
813 * @todo Merge with IEM_CIMPL_F_XXX
814 * @{ */
815#define IEM_MC_F_ONLY_8086 RT_BIT_32(0)
816#define IEM_MC_F_MIN_186 RT_BIT_32(1)
817#define IEM_MC_F_MIN_286 RT_BIT_32(2)
818#define IEM_MC_F_NOT_286_OR_OLDER IEM_MC_F_MIN_386
819#define IEM_MC_F_MIN_386 RT_BIT_32(3)
820#define IEM_MC_F_MIN_486 RT_BIT_32(4)
821#define IEM_MC_F_MIN_PENTIUM RT_BIT_32(5)
822#define IEM_MC_F_MIN_PENTIUM_II IEM_MC_F_MIN_PENTIUM
823#define IEM_MC_F_MIN_CORE IEM_MC_F_MIN_PENTIUM
824#define IEM_MC_F_64BIT RT_BIT_32(6)
825#define IEM_MC_F_NOT_64BIT RT_BIT_32(7)
826/** This is set by IEMAllN8vePython.py to indicate a variation without the
827 * flags-clearing-and-checking, when there is also a variation with that.
828 * @note Do not use this manully, it's only for python and for testing in
829 * the native recompiler! */
830#define IEM_MC_F_WITHOUT_FLAGS RT_BIT_32(8)
831/** @} */
832
833/** @name IEM_CIMPL_F_XXX - State change clues for CIMPL calls.
834 *
835 * These clues are mainly for the recompiler, so that it can emit correct code.
836 *
837 * They are processed by the python script and which also automatically
838 * calculates flags for MC blocks based on the statements, extending the use of
839 * these flags to describe MC block behavior to the recompiler core. The python
840 * script pass the flags to the IEM_MC2_END_EMIT_CALLS macro, but mainly for
841 * error checking purposes. The script emits the necessary fEndTb = true and
842 * similar statements as this reduces compile time a tiny bit.
843 *
844 * @{ */
845/** Flag set if direct branch, clear if absolute or indirect. */
846#define IEM_CIMPL_F_BRANCH_DIRECT RT_BIT_32(0)
847/** Flag set if indirect branch, clear if direct or relative.
848 * This is also used for all system control transfers (SYSCALL, SYSRET, INT, ++)
849 * as well as for return instructions (RET, IRET, RETF). */
850#define IEM_CIMPL_F_BRANCH_INDIRECT RT_BIT_32(1)
851/** Flag set if relative branch, clear if absolute or indirect. */
852#define IEM_CIMPL_F_BRANCH_RELATIVE RT_BIT_32(2)
853/** Flag set if conditional branch, clear if unconditional. */
854#define IEM_CIMPL_F_BRANCH_CONDITIONAL RT_BIT_32(3)
855/** Flag set if it's a far branch (changes CS). */
856#define IEM_CIMPL_F_BRANCH_FAR RT_BIT_32(4)
857/** Convenience: Testing any kind of branch. */
858#define IEM_CIMPL_F_BRANCH_ANY (IEM_CIMPL_F_BRANCH_DIRECT | IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_RELATIVE)
859
860/** Execution flags may change (IEMCPU::fExec). */
861#define IEM_CIMPL_F_MODE RT_BIT_32(5)
862/** May change significant portions of RFLAGS. */
863#define IEM_CIMPL_F_RFLAGS RT_BIT_32(6)
864/** May change the status bits (X86_EFL_STATUS_BITS) in RFLAGS. */
865#define IEM_CIMPL_F_STATUS_FLAGS RT_BIT_32(7)
866/** May trigger interrupt shadowing. */
867#define IEM_CIMPL_F_INHIBIT_SHADOW RT_BIT_32(8)
868/** May enable interrupts, so recheck IRQ immediately afterwards executing
869 * the instruction. */
870#define IEM_CIMPL_F_CHECK_IRQ_AFTER RT_BIT_32(9)
871/** May disable interrupts, so recheck IRQ immediately before executing the
872 * instruction. */
873#define IEM_CIMPL_F_CHECK_IRQ_BEFORE RT_BIT_32(10)
874/** Convenience: Check for IRQ both before and after an instruction. */
875#define IEM_CIMPL_F_CHECK_IRQ_BEFORE_AND_AFTER (IEM_CIMPL_F_CHECK_IRQ_BEFORE | IEM_CIMPL_F_CHECK_IRQ_AFTER)
876/** May trigger a VM exit (treated like IEM_CIMPL_F_MODE atm). */
877#define IEM_CIMPL_F_VMEXIT RT_BIT_32(11)
878/** May modify FPU state.
879 * @todo Not sure if this is useful yet. */
880#define IEM_CIMPL_F_FPU RT_BIT_32(12)
881/** REP prefixed instruction which may yield before updating PC.
882 * @todo Not sure if this is useful, REP functions now return non-zero
883 * status if they don't update the PC. */
884#define IEM_CIMPL_F_REP RT_BIT_32(13)
885/** I/O instruction.
886 * @todo Not sure if this is useful yet. */
887#define IEM_CIMPL_F_IO RT_BIT_32(14)
888/** Force end of TB after the instruction. */
889#define IEM_CIMPL_F_END_TB RT_BIT_32(15)
890/** Flag set if a branch may also modify the stack (push/pop return address). */
891#define IEM_CIMPL_F_BRANCH_STACK RT_BIT_32(16)
892/** Flag set if a branch may also modify the stack (push/pop return address)
893 * and switch it (load/restore SS:RSP). */
894#define IEM_CIMPL_F_BRANCH_STACK_FAR RT_BIT_32(17)
895/** Convenience: Raise exception (technically unnecessary, since it shouldn't return VINF_SUCCESS). */
896#define IEM_CIMPL_F_XCPT \
897 (IEM_CIMPL_F_BRANCH_INDIRECT | IEM_CIMPL_F_BRANCH_FAR | IEM_CIMPL_F_BRANCH_STACK_FAR \
898 | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT)
899
900/** The block calls a C-implementation instruction function with two implicit arguments.
901 * Mutually exclusive with IEM_CIMPL_F_CALLS_AIMPL and
902 * IEM_CIMPL_F_CALLS_AIMPL_WITH_FXSTATE.
903 * @note The python scripts will add this if missing. */
904#define IEM_CIMPL_F_CALLS_CIMPL RT_BIT_32(18)
905/** The block calls an ASM-implementation instruction function.
906 * Mutually exclusive with IEM_CIMPL_F_CALLS_CIMPL and
907 * IEM_CIMPL_F_CALLS_AIMPL_WITH_FXSTATE.
908 * @note The python scripts will add this if missing. */
909#define IEM_CIMPL_F_CALLS_AIMPL RT_BIT_32(19)
910/** The block calls an ASM-implementation instruction function with an implicit
911 * X86FXSTATE pointer argument.
912 * Mutually exclusive with IEM_CIMPL_F_CALLS_CIMPL, IEM_CIMPL_F_CALLS_AIMPL and
913 * IEM_CIMPL_F_CALLS_AIMPL_WITH_XSTATE.
914 * @note The python scripts will add this if missing. */
915#define IEM_CIMPL_F_CALLS_AIMPL_WITH_FXSTATE RT_BIT_32(20)
916/** The block calls an ASM-implementation instruction function with an implicit
917 * X86XSAVEAREA pointer argument.
918 * Mutually exclusive with IEM_CIMPL_F_CALLS_CIMPL, IEM_CIMPL_F_CALLS_AIMPL and
919 * IEM_CIMPL_F_CALLS_AIMPL_WITH_FXSTATE.
920 * @note No different from IEM_CIMPL_F_CALLS_AIMPL_WITH_FXSTATE, so same value.
921 * @note The python scripts will add this if missing. */
922#define IEM_CIMPL_F_CALLS_AIMPL_WITH_XSTATE IEM_CIMPL_F_CALLS_AIMPL_WITH_FXSTATE
923/** @} */
924
925
926/** @name IEM_F_XXX - Execution mode flags (IEMCPU::fExec, IEMTB::fFlags).
927 *
928 * These flags are set when entering IEM and adjusted as code is executed, such
929 * that they will always contain the current values as instructions are
930 * finished.
931 *
932 * In recompiled execution mode, (most of) these flags are included in the
933 * translation block selection key and stored in IEMTB::fFlags alongside the
934 * IEMTB_F_XXX flags. The latter flags uses bits 31 thru 24, which are all zero
935 * in IEMCPU::fExec.
936 *
937 * @{ */
938/** Mode: The block target mode mask. */
939#define IEM_F_MODE_MASK UINT32_C(0x0000001f)
940/** Mode: The IEMMODE part of the IEMTB_F_MODE_MASK value. */
941#define IEM_F_MODE_CPUMODE_MASK UINT32_C(0x00000003)
942/** X86 Mode: Bit used to indicating pre-386 CPU in 16-bit mode (for eliminating
943 * conditional in EIP/IP updating), and flat wide open CS, SS, DS, and ES in
944 * 32-bit mode (for simplifying most memory accesses). */
945#define IEM_F_MODE_X86_FLAT_OR_PRE_386_MASK UINT32_C(0x00000004)
946/** X86 Mode: Bit indicating protected mode, real mode (or SMM) when not set. */
947#define IEM_F_MODE_X86_PROT_MASK UINT32_C(0x00000008)
948/** X86 Mode: Bit used to indicate virtual 8086 mode (only 16-bit). */
949#define IEM_F_MODE_X86_V86_MASK UINT32_C(0x00000010)
950
951/** X86 Mode: 16-bit on 386 or later. */
952#define IEM_F_MODE_X86_16BIT UINT32_C(0x00000000)
953/** X86 Mode: 80286, 80186 and 8086/88 targetting blocks (EIP update opt). */
954#define IEM_F_MODE_X86_16BIT_PRE_386 UINT32_C(0x00000004)
955/** X86 Mode: 16-bit protected mode on 386 or later. */
956#define IEM_F_MODE_X86_16BIT_PROT UINT32_C(0x00000008)
957/** X86 Mode: 16-bit protected mode on 386 or later. */
958#define IEM_F_MODE_X86_16BIT_PROT_PRE_386 UINT32_C(0x0000000c)
959/** X86 Mode: 16-bit virtual 8086 protected mode (on 386 or later). */
960#define IEM_F_MODE_X86_16BIT_PROT_V86 UINT32_C(0x00000018)
961
962/** X86 Mode: 32-bit on 386 or later. */
963#define IEM_F_MODE_X86_32BIT UINT32_C(0x00000001)
964/** X86 Mode: 32-bit mode with wide open flat CS, SS, DS and ES. */
965#define IEM_F_MODE_X86_32BIT_FLAT UINT32_C(0x00000005)
966/** X86 Mode: 32-bit protected mode. */
967#define IEM_F_MODE_X86_32BIT_PROT UINT32_C(0x00000009)
968/** X86 Mode: 32-bit protected mode with wide open flat CS, SS, DS and ES. */
969#define IEM_F_MODE_X86_32BIT_PROT_FLAT UINT32_C(0x0000000d)
970
971/** X86 Mode: 64-bit (includes protected, but not the flat bit). */
972#define IEM_F_MODE_X86_64BIT UINT32_C(0x0000000a)
973
974/** X86 Mode: Checks if @a a_fExec represent a FLAT mode. */
975#define IEM_F_MODE_X86_IS_FLAT(a_fExec) ( ((a_fExec) & IEM_F_MODE_MASK) == IEM_F_MODE_X86_64BIT \
976 || ((a_fExec) & IEM_F_MODE_MASK) == IEM_F_MODE_X86_32BIT_PROT_FLAT \
977 || ((a_fExec) & IEM_F_MODE_MASK) == IEM_F_MODE_X86_32BIT_FLAT)
978
979/** Bypass access handlers when set. */
980#define IEM_F_BYPASS_HANDLERS UINT32_C(0x00010000)
981/** Have pending hardware instruction breakpoints. */
982#define IEM_F_PENDING_BRK_INSTR UINT32_C(0x00020000)
983/** Have pending hardware data breakpoints. */
984#define IEM_F_PENDING_BRK_DATA UINT32_C(0x00040000)
985
986/** X86: Have pending hardware I/O breakpoints. */
987#define IEM_F_PENDING_BRK_X86_IO UINT32_C(0x00000400)
988/** X86: Disregard the lock prefix (implied or not) when set. */
989#define IEM_F_X86_DISREGARD_LOCK UINT32_C(0x00000800)
990
991/** Pending breakpoint mask (what iemCalcExecDbgFlags works out). */
992#define IEM_F_PENDING_BRK_MASK (IEM_F_PENDING_BRK_INSTR | IEM_F_PENDING_BRK_DATA | IEM_F_PENDING_BRK_X86_IO)
993
994/** Caller configurable options. */
995#define IEM_F_USER_OPTS (IEM_F_BYPASS_HANDLERS | IEM_F_X86_DISREGARD_LOCK)
996
997/** X86: The current protection level (CPL) shift factor. */
998#define IEM_F_X86_CPL_SHIFT 8
999/** X86: The current protection level (CPL) mask. */
1000#define IEM_F_X86_CPL_MASK UINT32_C(0x00000300)
1001/** X86: The current protection level (CPL) shifted mask. */
1002#define IEM_F_X86_CPL_SMASK UINT32_C(0x00000003)
1003
1004/** X86: Alignment checks enabled (CR0.AM=1 & EFLAGS.AC=1). */
1005#define IEM_F_X86_AC UINT32_C(0x00080000)
1006
1007/** X86 execution context.
1008 * The IEM_F_X86_CTX_XXX values are individual flags that can be combined (with
1009 * the exception of IEM_F_X86_CTX_NORMAL). This allows running VMs from SMM
1010 * mode. */
1011#define IEM_F_X86_CTX_MASK UINT32_C(0x0000f000)
1012/** X86 context: Plain regular execution context. */
1013#define IEM_F_X86_CTX_NORMAL UINT32_C(0x00000000)
1014/** X86 context: VT-x enabled. */
1015#define IEM_F_X86_CTX_VMX UINT32_C(0x00001000)
1016/** X86 context: AMD-V enabled. */
1017#define IEM_F_X86_CTX_SVM UINT32_C(0x00002000)
1018/** X86 context: In AMD-V or VT-x guest mode. */
1019#define IEM_F_X86_CTX_IN_GUEST UINT32_C(0x00004000)
1020/** X86 context: System management mode (SMM). */
1021#define IEM_F_X86_CTX_SMM UINT32_C(0x00008000)
1022
1023/** @todo Add TF+RF+INHIBIT indicator(s), so we can eliminate the conditional in
1024 * iemRegFinishClearingRF() most for most situations (CPUMCTX_DBG_HIT_DRX_MASK
1025 * and CPUMCTX_DBG_DBGF_MASK are covered by the IEM_F_PENDING_BRK_XXX bits
1026 * alread). */
1027
1028/** @todo Add TF+RF+INHIBIT indicator(s), so we can eliminate the conditional in
1029 * iemRegFinishClearingRF() most for most situations
1030 * (CPUMCTX_DBG_HIT_DRX_MASK and CPUMCTX_DBG_DBGF_MASK are covered by
1031 * the IEM_F_PENDING_BRK_XXX bits alread). */
1032
1033/** @} */
1034
1035
1036/** @name IEMTB_F_XXX - Translation block flags (IEMTB::fFlags).
1037 *
1038 * Extends the IEM_F_XXX flags (subject to IEMTB_F_IEM_F_MASK) to make up the
1039 * translation block flags. The combined flag mask (subject to
1040 * IEMTB_F_KEY_MASK) is used as part of the lookup key for translation blocks.
1041 *
1042 * @{ */
1043/** Mask of IEM_F_XXX flags included in IEMTB_F_XXX. */
1044#define IEMTB_F_IEM_F_MASK UINT32_C(0x00ffffff)
1045
1046/** Type: The block type mask. */
1047#define IEMTB_F_TYPE_MASK UINT32_C(0x03000000)
1048/** Type: Purly threaded recompiler (via tables). */
1049#define IEMTB_F_TYPE_THREADED UINT32_C(0x01000000)
1050/** Type: Native recompilation. */
1051#define IEMTB_F_TYPE_NATIVE UINT32_C(0x02000000)
1052
1053/** Set when we're starting the block in an "interrupt shadow".
1054 * We don't need to distingish between the two types of this mask, thus the one.
1055 * @see CPUMCTX_INHIBIT_SHADOW, CPUMIsInInterruptShadow() */
1056#define IEMTB_F_INHIBIT_SHADOW UINT32_C(0x04000000)
1057/** Set when we're currently inhibiting NMIs
1058 * @see CPUMCTX_INHIBIT_NMI, CPUMAreInterruptsInhibitedByNmi() */
1059#define IEMTB_F_INHIBIT_NMI UINT32_C(0x08000000)
1060
1061/** Checks that EIP/IP is wihin CS.LIM before each instruction. Used when
1062 * we're close the limit before starting a TB, as determined by
1063 * iemGetTbFlagsForCurrentPc(). */
1064#define IEMTB_F_CS_LIM_CHECKS UINT32_C(0x10000000)
1065
1066/** Mask of the IEMTB_F_XXX flags that are part of the TB lookup key.
1067 *
1068 * @note We skip all of IEM_F_X86_CTX_MASK, with the exception of SMM (which we
1069 * don't implement), because we don't currently generate any context
1070 * specific code - that's all handled in CIMPL functions.
1071 *
1072 * For the threaded recompiler we don't generate any CPL specific code
1073 * either, but the native recompiler does for memory access (saves getting
1074 * the CPL from fExec and turning it into IEMTLBE_F_PT_NO_USER).
1075 * Since most OSes will not share code between rings, this shouldn't
1076 * have any real effect on TB/memory/recompiling load.
1077 */
1078#define IEMTB_F_KEY_MASK ((UINT32_MAX & ~(IEM_F_X86_CTX_MASK | IEMTB_F_TYPE_MASK)) | IEM_F_X86_CTX_SMM)
1079/** @} */
1080
1081AssertCompile( (IEM_F_MODE_X86_16BIT & IEM_F_MODE_CPUMODE_MASK) == IEMMODE_16BIT);
1082AssertCompile(!(IEM_F_MODE_X86_16BIT & IEM_F_MODE_X86_FLAT_OR_PRE_386_MASK));
1083AssertCompile(!(IEM_F_MODE_X86_16BIT & IEM_F_MODE_X86_PROT_MASK));
1084AssertCompile(!(IEM_F_MODE_X86_16BIT & IEM_F_MODE_X86_V86_MASK));
1085AssertCompile( (IEM_F_MODE_X86_16BIT_PRE_386 & IEM_F_MODE_CPUMODE_MASK) == IEMMODE_16BIT);
1086AssertCompile( IEM_F_MODE_X86_16BIT_PRE_386 & IEM_F_MODE_X86_FLAT_OR_PRE_386_MASK);
1087AssertCompile(!(IEM_F_MODE_X86_16BIT_PRE_386 & IEM_F_MODE_X86_PROT_MASK));
1088AssertCompile(!(IEM_F_MODE_X86_16BIT_PRE_386 & IEM_F_MODE_X86_V86_MASK));
1089AssertCompile( (IEM_F_MODE_X86_16BIT_PROT & IEM_F_MODE_CPUMODE_MASK) == IEMMODE_16BIT);
1090AssertCompile(!(IEM_F_MODE_X86_16BIT_PROT & IEM_F_MODE_X86_FLAT_OR_PRE_386_MASK));
1091AssertCompile( IEM_F_MODE_X86_16BIT_PROT & IEM_F_MODE_X86_PROT_MASK);
1092AssertCompile(!(IEM_F_MODE_X86_16BIT_PROT & IEM_F_MODE_X86_V86_MASK));
1093AssertCompile( (IEM_F_MODE_X86_16BIT_PROT_PRE_386 & IEM_F_MODE_CPUMODE_MASK) == IEMMODE_16BIT);
1094AssertCompile( IEM_F_MODE_X86_16BIT_PROT_PRE_386 & IEM_F_MODE_X86_FLAT_OR_PRE_386_MASK);
1095AssertCompile( IEM_F_MODE_X86_16BIT_PROT_PRE_386 & IEM_F_MODE_X86_PROT_MASK);
1096AssertCompile(!(IEM_F_MODE_X86_16BIT_PROT_PRE_386 & IEM_F_MODE_X86_V86_MASK));
1097AssertCompile( IEM_F_MODE_X86_16BIT_PROT_V86 & IEM_F_MODE_X86_PROT_MASK);
1098AssertCompile(!(IEM_F_MODE_X86_16BIT_PROT_V86 & IEM_F_MODE_X86_FLAT_OR_PRE_386_MASK));
1099AssertCompile( IEM_F_MODE_X86_16BIT_PROT_V86 & IEM_F_MODE_X86_V86_MASK);
1100
1101AssertCompile( (IEM_F_MODE_X86_32BIT & IEM_F_MODE_CPUMODE_MASK) == IEMMODE_32BIT);
1102AssertCompile(!(IEM_F_MODE_X86_32BIT & IEM_F_MODE_X86_FLAT_OR_PRE_386_MASK));
1103AssertCompile(!(IEM_F_MODE_X86_32BIT & IEM_F_MODE_X86_PROT_MASK));
1104AssertCompile( (IEM_F_MODE_X86_32BIT_FLAT & IEM_F_MODE_CPUMODE_MASK) == IEMMODE_32BIT);
1105AssertCompile( IEM_F_MODE_X86_32BIT_FLAT & IEM_F_MODE_X86_FLAT_OR_PRE_386_MASK);
1106AssertCompile(!(IEM_F_MODE_X86_32BIT_FLAT & IEM_F_MODE_X86_PROT_MASK));
1107AssertCompile( (IEM_F_MODE_X86_32BIT_PROT & IEM_F_MODE_CPUMODE_MASK) == IEMMODE_32BIT);
1108AssertCompile(!(IEM_F_MODE_X86_32BIT_PROT & IEM_F_MODE_X86_FLAT_OR_PRE_386_MASK));
1109AssertCompile( IEM_F_MODE_X86_32BIT_PROT & IEM_F_MODE_X86_PROT_MASK);
1110AssertCompile( (IEM_F_MODE_X86_32BIT_PROT_FLAT & IEM_F_MODE_CPUMODE_MASK) == IEMMODE_32BIT);
1111AssertCompile( IEM_F_MODE_X86_32BIT_PROT_FLAT & IEM_F_MODE_X86_FLAT_OR_PRE_386_MASK);
1112AssertCompile( IEM_F_MODE_X86_32BIT_PROT_FLAT & IEM_F_MODE_X86_PROT_MASK);
1113
1114AssertCompile( (IEM_F_MODE_X86_64BIT & IEM_F_MODE_CPUMODE_MASK) == IEMMODE_64BIT);
1115AssertCompile( IEM_F_MODE_X86_64BIT & IEM_F_MODE_X86_PROT_MASK);
1116AssertCompile(!(IEM_F_MODE_X86_64BIT & IEM_F_MODE_X86_FLAT_OR_PRE_386_MASK));
1117
1118/** Native instruction type for use with the native code generator.
1119 * This is a byte (uint8_t) for x86 and amd64 and uint32_t for the other(s). */
1120#if defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86)
1121typedef uint8_t IEMNATIVEINSTR;
1122#else
1123typedef uint32_t IEMNATIVEINSTR;
1124#endif
1125/** Pointer to a native instruction unit. */
1126typedef IEMNATIVEINSTR *PIEMNATIVEINSTR;
1127/** Pointer to a const native instruction unit. */
1128typedef IEMNATIVEINSTR const *PCIEMNATIVEINSTR;
1129
1130/**
1131 * A call for the threaded call table.
1132 */
1133typedef struct IEMTHRDEDCALLENTRY
1134{
1135 /** The function to call (IEMTHREADEDFUNCS). */
1136 uint16_t enmFunction;
1137
1138 /** Instruction number in the TB (for statistics). */
1139 uint8_t idxInstr;
1140 /** The opcode length. */
1141 uint8_t cbOpcode;
1142 /** Offset into IEMTB::pabOpcodes. */
1143 uint16_t offOpcode;
1144
1145 /** TB lookup table index (7 bits) and large size (1 bits).
1146 *
1147 * The default size is 1 entry, but for indirect calls and returns we set the
1148 * top bit and allocate 4 (IEM_TB_LOOKUP_TAB_LARGE_SIZE) entries. The large
1149 * tables uses RIP for selecting the entry to use, as it is assumed a hash table
1150 * lookup isn't that slow compared to sequentially trying out 4 TBs.
1151 *
1152 * By default lookup table entry 0 for a TB is reserved as a fallback for
1153 * calltable entries w/o explicit entreis, so this member will be non-zero if
1154 * there is a lookup entry associated with this call.
1155 *
1156 * @sa IEM_TB_LOOKUP_TAB_GET_SIZE, IEM_TB_LOOKUP_TAB_GET_IDX
1157 */
1158 uint8_t uTbLookup;
1159
1160 /** Unused atm. */
1161 uint8_t uUnused0;
1162
1163 /** Generic parameters. */
1164 uint64_t auParams[3];
1165} IEMTHRDEDCALLENTRY;
1166AssertCompileSize(IEMTHRDEDCALLENTRY, sizeof(uint64_t) * 4);
1167/** Pointer to a threaded call entry. */
1168typedef struct IEMTHRDEDCALLENTRY *PIEMTHRDEDCALLENTRY;
1169/** Pointer to a const threaded call entry. */
1170typedef IEMTHRDEDCALLENTRY const *PCIEMTHRDEDCALLENTRY;
1171
1172/** The number of TB lookup table entries for a large allocation
1173 * (IEMTHRDEDCALLENTRY::uTbLookup bit 7 set). */
1174#define IEM_TB_LOOKUP_TAB_LARGE_SIZE 4
1175/** Get the lookup table size from IEMTHRDEDCALLENTRY::uTbLookup. */
1176#define IEM_TB_LOOKUP_TAB_GET_SIZE(a_uTbLookup) (!((a_uTbLookup) & 0x80) ? 1 : IEM_TB_LOOKUP_TAB_LARGE_SIZE)
1177/** Get the first lookup table index from IEMTHRDEDCALLENTRY::uTbLookup. */
1178#define IEM_TB_LOOKUP_TAB_GET_IDX(a_uTbLookup) ((a_uTbLookup) & 0x7f)
1179/** Get the lookup table index from IEMTHRDEDCALLENTRY::uTbLookup and RIP. */
1180#define IEM_TB_LOOKUP_TAB_GET_IDX_WITH_RIP(a_uTbLookup, a_Rip) \
1181 (!((a_uTbLookup) & 0x80) ? (a_uTbLookup) & 0x7f : ((a_uTbLookup) & 0x7f) + ((a_Rip) & (IEM_TB_LOOKUP_TAB_LARGE_SIZE - 1)) )
1182
1183/** Make a IEMTHRDEDCALLENTRY::uTbLookup value. */
1184#define IEM_TB_LOOKUP_TAB_MAKE(a_idxTable, a_fLarge) ((a_idxTable) | ((a_fLarge) ? 0x80 : 0))
1185
1186/**
1187 * Native IEM TB 'function' typedef.
1188 *
1189 * This will throw/longjmp on occation.
1190 *
1191 * @note AMD64 doesn't have that many non-volatile registers and does sport
1192 * 32-bit address displacments, so we don't need pCtx.
1193 *
1194 * On ARM64 pCtx allows us to directly address the whole register
1195 * context without requiring a separate indexing register holding the
1196 * offset. This saves an instruction loading the offset for each guest
1197 * CPU context access, at the cost of a non-volatile register.
1198 * Fortunately, ARM64 has quite a lot more registers.
1199 */
1200typedef
1201#ifdef RT_ARCH_AMD64
1202int FNIEMTBNATIVE(PVMCPUCC pVCpu)
1203#else
1204int FNIEMTBNATIVE(PVMCPUCC pVCpu, PCPUMCTX pCtx)
1205#endif
1206#if RT_CPLUSPLUS_PREREQ(201700)
1207 IEM_NOEXCEPT_MAY_LONGJMP
1208#endif
1209 ;
1210/** Pointer to a native IEM TB entry point function.
1211 * This will throw/longjmp on occation. */
1212typedef FNIEMTBNATIVE *PFNIEMTBNATIVE;
1213
1214
1215/**
1216 * Translation block debug info entry type.
1217 */
1218typedef enum IEMTBDBGENTRYTYPE
1219{
1220 kIemTbDbgEntryType_Invalid = 0,
1221 /** The entry is for marking a native code position.
1222 * Entries following this all apply to this position. */
1223 kIemTbDbgEntryType_NativeOffset,
1224 /** The entry is for a new guest instruction. */
1225 kIemTbDbgEntryType_GuestInstruction,
1226 /** Marks the start of a threaded call. */
1227 kIemTbDbgEntryType_ThreadedCall,
1228 /** Marks the location of a label. */
1229 kIemTbDbgEntryType_Label,
1230 /** Info about a host register shadowing a guest register. */
1231 kIemTbDbgEntryType_GuestRegShadowing,
1232#ifdef IEMNATIVE_WITH_SIMD_REG_ALLOCATOR
1233 /** Info about a host SIMD register shadowing a guest SIMD register. */
1234 kIemTbDbgEntryType_GuestSimdRegShadowing,
1235#endif
1236#ifdef IEMNATIVE_WITH_DELAYED_PC_UPDATING
1237 /** Info about a delayed RIP update. */
1238 kIemTbDbgEntryType_DelayedPcUpdate,
1239#endif
1240#if defined(IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK) || defined(IEMNATIVE_WITH_SIMD_REG_ALLOCATOR)
1241 /** Info about a shadowed guest register becoming dirty. */
1242 kIemTbDbgEntryType_GuestRegDirty,
1243 /** Info about register writeback/flush oepration. */
1244 kIemTbDbgEntryType_GuestRegWriteback,
1245#endif
1246 kIemTbDbgEntryType_End
1247} IEMTBDBGENTRYTYPE;
1248
1249/**
1250 * Translation block debug info entry.
1251 */
1252typedef union IEMTBDBGENTRY
1253{
1254 /** Plain 32-bit view. */
1255 uint32_t u;
1256
1257 /** Generic view for getting at the type field. */
1258 struct
1259 {
1260 /** IEMTBDBGENTRYTYPE */
1261 uint32_t uType : 4;
1262 uint32_t uTypeSpecific : 28;
1263 } Gen;
1264
1265 struct
1266 {
1267 /** kIemTbDbgEntryType_ThreadedCall1. */
1268 uint32_t uType : 4;
1269 /** Native code offset. */
1270 uint32_t offNative : 28;
1271 } NativeOffset;
1272
1273 struct
1274 {
1275 /** kIemTbDbgEntryType_GuestInstruction. */
1276 uint32_t uType : 4;
1277 uint32_t uUnused : 4;
1278 /** The IEM_F_XXX flags. */
1279 uint32_t fExec : 24;
1280 } GuestInstruction;
1281
1282 struct
1283 {
1284 /* kIemTbDbgEntryType_ThreadedCall. */
1285 uint32_t uType : 4;
1286 /** Set if the call was recompiled to native code, clear if just calling
1287 * threaded function. */
1288 uint32_t fRecompiled : 1;
1289 uint32_t uUnused : 11;
1290 /** The threaded call number (IEMTHREADEDFUNCS). */
1291 uint32_t enmCall : 16;
1292 } ThreadedCall;
1293
1294 struct
1295 {
1296 /* kIemTbDbgEntryType_Label. */
1297 uint32_t uType : 4;
1298 uint32_t uUnused : 4;
1299 /** The label type (IEMNATIVELABELTYPE). */
1300 uint32_t enmLabel : 8;
1301 /** The label data. */
1302 uint32_t uData : 16;
1303 } Label;
1304
1305 struct
1306 {
1307 /* kIemTbDbgEntryType_GuestRegShadowing. */
1308 uint32_t uType : 4;
1309 uint32_t uUnused : 4;
1310 /** The guest register being shadowed (IEMNATIVEGSTREG). */
1311 uint32_t idxGstReg : 8;
1312 /** The host new register number, UINT8_MAX if dropped. */
1313 uint32_t idxHstReg : 8;
1314 /** The previous host register number, UINT8_MAX if new. */
1315 uint32_t idxHstRegPrev : 8;
1316 } GuestRegShadowing;
1317
1318#ifdef IEMNATIVE_WITH_SIMD_REG_ALLOCATOR
1319 struct
1320 {
1321 /* kIemTbDbgEntryType_GuestSimdRegShadowing. */
1322 uint32_t uType : 4;
1323 uint32_t uUnused : 4;
1324 /** The guest register being shadowed (IEMNATIVEGSTSIMDREG). */
1325 uint32_t idxGstSimdReg : 8;
1326 /** The host new register number, UINT8_MAX if dropped. */
1327 uint32_t idxHstSimdReg : 8;
1328 /** The previous host register number, UINT8_MAX if new. */
1329 uint32_t idxHstSimdRegPrev : 8;
1330 } GuestSimdRegShadowing;
1331#endif
1332
1333#ifdef IEMNATIVE_WITH_DELAYED_PC_UPDATING
1334 struct
1335 {
1336 /* kIemTbDbgEntryType_DelayedPcUpdate. */
1337 uint32_t uType : 4;
1338 /* The instruction offset added to the program counter. */
1339 uint32_t offPc : 14;
1340 /** Number of instructions skipped. */
1341 uint32_t cInstrSkipped : 14;
1342 } DelayedPcUpdate;
1343#endif
1344
1345#if defined(IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK) || defined(IEMNATIVE_WITH_SIMD_REG_ALLOCATOR)
1346 struct
1347 {
1348 /* kIemTbDbgEntryType_GuestRegDirty. */
1349 uint32_t uType : 4;
1350 uint32_t uUnused : 11;
1351 /** Flag whether this is about a SIMD (true) or general (false) register. */
1352 uint32_t fSimdReg : 1;
1353 /** The guest register index being marked as dirty. */
1354 uint32_t idxGstReg : 8;
1355 /** The host register number this register is shadowed in .*/
1356 uint32_t idxHstReg : 8;
1357 } GuestRegDirty;
1358
1359 struct
1360 {
1361 /* kIemTbDbgEntryType_GuestRegWriteback. */
1362 uint32_t uType : 4;
1363 /** Flag whether this is about a SIMD (true) or general (false) register flush. */
1364 uint32_t fSimdReg : 1;
1365 /** The mask shift. */
1366 uint32_t cShift : 2;
1367 /** The guest register mask being written back. */
1368 uint32_t fGstReg : 25;
1369 } GuestRegWriteback;
1370#endif
1371
1372} IEMTBDBGENTRY;
1373AssertCompileSize(IEMTBDBGENTRY, sizeof(uint32_t));
1374/** Pointer to a debug info entry. */
1375typedef IEMTBDBGENTRY *PIEMTBDBGENTRY;
1376/** Pointer to a const debug info entry. */
1377typedef IEMTBDBGENTRY const *PCIEMTBDBGENTRY;
1378
1379/**
1380 * Translation block debug info.
1381 */
1382typedef struct IEMTBDBG
1383{
1384 /** Number of entries in aEntries. */
1385 uint32_t cEntries;
1386 /** The offset of the last kIemTbDbgEntryType_NativeOffset record. */
1387 uint32_t offNativeLast;
1388 /** Debug info entries. */
1389 RT_FLEXIBLE_ARRAY_EXTENSION
1390 IEMTBDBGENTRY aEntries[RT_FLEXIBLE_ARRAY];
1391} IEMTBDBG;
1392/** Pointer to TB debug info. */
1393typedef IEMTBDBG *PIEMTBDBG;
1394/** Pointer to const TB debug info. */
1395typedef IEMTBDBG const *PCIEMTBDBG;
1396
1397
1398/**
1399 * Translation block.
1400 *
1401 * The current plan is to just keep TBs and associated lookup hash table private
1402 * to each VCpu as that simplifies TB removal greatly (no races) and generally
1403 * avoids using expensive atomic primitives for updating lists and stuff.
1404 */
1405#pragma pack(2) /* to prevent the Thrd structure from being padded unnecessarily */
1406typedef struct IEMTB
1407{
1408 /** Next block with the same hash table entry. */
1409 struct IEMTB *pNext;
1410 /** Usage counter. */
1411 uint32_t cUsed;
1412 /** The IEMCPU::msRecompilerPollNow last time it was used. */
1413 uint32_t msLastUsed;
1414
1415 /** @name What uniquely identifies the block.
1416 * @{ */
1417 RTGCPHYS GCPhysPc;
1418 /** IEMTB_F_XXX (i.e. IEM_F_XXX ++). */
1419 uint32_t fFlags;
1420 union
1421 {
1422 struct
1423 {
1424 /**< Relevant CS X86DESCATTR_XXX bits. */
1425 uint16_t fAttr;
1426 } x86;
1427 };
1428 /** @} */
1429
1430 /** Number of opcode ranges. */
1431 uint8_t cRanges;
1432 /** Statistics: Number of instructions in the block. */
1433 uint8_t cInstructions;
1434
1435 /** Type specific info. */
1436 union
1437 {
1438 struct
1439 {
1440 /** The call sequence table. */
1441 PIEMTHRDEDCALLENTRY paCalls;
1442 /** Number of calls in paCalls. */
1443 uint16_t cCalls;
1444 /** Number of calls allocated. */
1445 uint16_t cAllocated;
1446 } Thrd;
1447 struct
1448 {
1449 /** The native instructions (PFNIEMTBNATIVE). */
1450 PIEMNATIVEINSTR paInstructions;
1451 /** Number of instructions pointed to by paInstructions. */
1452 uint32_t cInstructions;
1453 } Native;
1454 /** Generic view for zeroing when freeing. */
1455 struct
1456 {
1457 uintptr_t uPtr;
1458 uint32_t uData;
1459 } Gen;
1460 };
1461
1462 /** The allocation chunk this TB belongs to. */
1463 uint8_t idxAllocChunk;
1464 /** The number of entries in the lookup table.
1465 * Because we're out of space, the TB lookup table is located before the
1466 * opcodes pointed to by pabOpcodes. */
1467 uint8_t cTbLookupEntries;
1468
1469 /** Number of bytes of opcodes stored in pabOpcodes.
1470 * @todo this field isn't really needed, aRanges keeps the actual info. */
1471 uint16_t cbOpcodes;
1472 /** Pointer to the opcode bytes this block was recompiled from.
1473 * This also points to the TB lookup table, which starts cTbLookupEntries
1474 * entries before the opcodes (we don't have room atm for another point). */
1475 uint8_t *pabOpcodes;
1476
1477 /** Debug info if enabled.
1478 * This is only generated by the native recompiler. */
1479 PIEMTBDBG pDbgInfo;
1480
1481 /* --- 64 byte cache line end --- */
1482
1483 /** Opcode ranges.
1484 *
1485 * The opcode checkers and maybe TLB loading functions will use this to figure
1486 * out what to do. The parameter will specify an entry and the opcode offset to
1487 * start at and the minimum number of bytes to verify (instruction length).
1488 *
1489 * When VT-x and AMD-V looks up the opcode bytes for an exitting instruction,
1490 * they'll first translate RIP (+ cbInstr - 1) to a physical address using the
1491 * code TLB (must have a valid entry for that address) and scan the ranges to
1492 * locate the corresponding opcodes. Probably.
1493 */
1494 struct IEMTBOPCODERANGE
1495 {
1496 /** Offset within pabOpcodes. */
1497 uint16_t offOpcodes;
1498 /** Number of bytes. */
1499 uint16_t cbOpcodes;
1500 /** The page offset. */
1501 RT_GCC_EXTENSION
1502 uint16_t offPhysPage : 12;
1503 /** Unused bits. */
1504 RT_GCC_EXTENSION
1505 uint16_t u2Unused : 2;
1506 /** Index into GCPhysPc + aGCPhysPages for the physical page address. */
1507 RT_GCC_EXTENSION
1508 uint16_t idxPhysPage : 2;
1509 } aRanges[8];
1510
1511 /** Physical pages that this TB covers.
1512 * The GCPhysPc w/o page offset is element zero, so starting here with 1. */
1513 RTGCPHYS aGCPhysPages[2];
1514} IEMTB;
1515#pragma pack()
1516AssertCompileMemberAlignment(IEMTB, GCPhysPc, sizeof(RTGCPHYS));
1517AssertCompileMemberAlignment(IEMTB, Thrd, sizeof(void *));
1518AssertCompileMemberAlignment(IEMTB, pabOpcodes, sizeof(void *));
1519AssertCompileMemberAlignment(IEMTB, pDbgInfo, sizeof(void *));
1520AssertCompileMemberAlignment(IEMTB, aGCPhysPages, sizeof(RTGCPHYS));
1521AssertCompileMemberOffset(IEMTB, aRanges, 64);
1522AssertCompileMemberSize(IEMTB, aRanges[0], 6);
1523#if 1
1524AssertCompileSize(IEMTB, 128);
1525# define IEMTB_SIZE_IS_POWER_OF_TWO /**< The IEMTB size is a power of two. */
1526#else
1527AssertCompileSize(IEMTB, 168);
1528# undef IEMTB_SIZE_IS_POWER_OF_TWO
1529#endif
1530
1531/** Pointer to a translation block. */
1532typedef IEMTB *PIEMTB;
1533/** Pointer to a const translation block. */
1534typedef IEMTB const *PCIEMTB;
1535
1536/** Gets address of the given TB lookup table entry. */
1537#define IEMTB_GET_TB_LOOKUP_TAB_ENTRY(a_pTb, a_idx) \
1538 ((PIEMTB *)&(a_pTb)->pabOpcodes[-(int)((a_pTb)->cTbLookupEntries - (a_idx)) * sizeof(PIEMTB)])
1539
1540/**
1541 * Gets the physical address for a TB opcode range.
1542 */
1543DECL_FORCE_INLINE(RTGCPHYS) iemTbGetRangePhysPageAddr(PCIEMTB pTb, uint8_t idxRange)
1544{
1545 Assert(idxRange < RT_MIN(pTb->cRanges, RT_ELEMENTS(pTb->aRanges)));
1546 uint8_t const idxPage = pTb->aRanges[idxRange].idxPhysPage;
1547 Assert(idxPage <= RT_ELEMENTS(pTb->aGCPhysPages));
1548 if (idxPage == 0)
1549 return pTb->GCPhysPc & ~(RTGCPHYS)GUEST_PAGE_OFFSET_MASK;
1550 Assert(!(pTb->aGCPhysPages[idxPage - 1] & GUEST_PAGE_OFFSET_MASK));
1551 return pTb->aGCPhysPages[idxPage - 1];
1552}
1553
1554
1555/**
1556 * A chunk of memory in the TB allocator.
1557 */
1558typedef struct IEMTBCHUNK
1559{
1560 /** Pointer to the translation blocks in this chunk. */
1561 PIEMTB paTbs;
1562#ifdef IN_RING0
1563 /** Allocation handle. */
1564 RTR0MEMOBJ hMemObj;
1565#endif
1566} IEMTBCHUNK;
1567
1568/**
1569 * A per-CPU translation block allocator.
1570 *
1571 * Because of how the IEMTBCACHE uses the lower 6 bits of the TB address to keep
1572 * the length of the collision list, and of course also for cache line alignment
1573 * reasons, the TBs must be allocated with at least 64-byte alignment.
1574 * Memory is there therefore allocated using one of the page aligned allocators.
1575 *
1576 *
1577 * To avoid wasting too much memory, it is allocated piecemeal as needed,
1578 * in chunks (IEMTBCHUNK) of 2 MiB or more. The TB has an 8-bit chunk index
1579 * that enables us to quickly calculate the allocation bitmap position when
1580 * freeing the translation block.
1581 */
1582typedef struct IEMTBALLOCATOR
1583{
1584 /** Magic value (IEMTBALLOCATOR_MAGIC). */
1585 uint32_t uMagic;
1586
1587#ifdef IEMTB_SIZE_IS_POWER_OF_TWO
1588 /** Mask corresponding to cTbsPerChunk - 1. */
1589 uint32_t fChunkMask;
1590 /** Shift count corresponding to cTbsPerChunk. */
1591 uint8_t cChunkShift;
1592#else
1593 uint32_t uUnused;
1594 uint8_t bUnused;
1595#endif
1596 /** Number of chunks we're allowed to allocate. */
1597 uint8_t cMaxChunks;
1598 /** Number of chunks currently populated. */
1599 uint16_t cAllocatedChunks;
1600 /** Number of translation blocks per chunk. */
1601 uint32_t cTbsPerChunk;
1602 /** Chunk size. */
1603 uint32_t cbPerChunk;
1604
1605 /** The maximum number of TBs. */
1606 uint32_t cMaxTbs;
1607 /** Total number of TBs in the populated chunks.
1608 * (cAllocatedChunks * cTbsPerChunk) */
1609 uint32_t cTotalTbs;
1610 /** The current number of TBs in use.
1611 * The number of free TBs: cAllocatedTbs - cInUseTbs; */
1612 uint32_t cInUseTbs;
1613 /** Statistics: Number of the cInUseTbs that are native ones. */
1614 uint32_t cNativeTbs;
1615 /** Statistics: Number of the cInUseTbs that are threaded ones. */
1616 uint32_t cThreadedTbs;
1617
1618 /** Where to start pruning TBs from when we're out.
1619 * See iemTbAllocatorAllocSlow for details. */
1620 uint32_t iPruneFrom;
1621 /** Where to start pruning native TBs from when we're out of executable memory.
1622 * See iemTbAllocatorFreeupNativeSpace for details. */
1623 uint32_t iPruneNativeFrom;
1624 uint64_t u64Padding;
1625
1626 /** Statistics: Number of TB allocation calls. */
1627 STAMCOUNTER StatAllocs;
1628 /** Statistics: Number of TB free calls. */
1629 STAMCOUNTER StatFrees;
1630 /** Statistics: Time spend pruning. */
1631 STAMPROFILE StatPrune;
1632 /** Statistics: Time spend pruning native TBs. */
1633 STAMPROFILE StatPruneNative;
1634
1635 /** The delayed free list (see iemTbAlloctorScheduleForFree). */
1636 PIEMTB pDelayedFreeHead;
1637 /* Head of the list of free TBs. */
1638 PIEMTB pTbsFreeHead;
1639
1640 /** Allocation chunks. */
1641 IEMTBCHUNK aChunks[256];
1642} IEMTBALLOCATOR;
1643/** Pointer to a TB allocator. */
1644typedef struct IEMTBALLOCATOR *PIEMTBALLOCATOR;
1645
1646/** Magic value for the TB allocator (Emmet Harley Cohen). */
1647#define IEMTBALLOCATOR_MAGIC UINT32_C(0x19900525)
1648
1649
1650/**
1651 * A per-CPU translation block cache (hash table).
1652 *
1653 * The hash table is allocated once during IEM initialization and size double
1654 * the max TB count, rounded up to the nearest power of two (so we can use and
1655 * AND mask rather than a rest division when hashing).
1656 */
1657typedef struct IEMTBCACHE
1658{
1659 /** Magic value (IEMTBCACHE_MAGIC). */
1660 uint32_t uMagic;
1661 /** Size of the hash table. This is a power of two. */
1662 uint32_t cHash;
1663 /** The mask corresponding to cHash. */
1664 uint32_t uHashMask;
1665 uint32_t uPadding;
1666
1667 /** @name Statistics
1668 * @{ */
1669 /** Number of collisions ever. */
1670 STAMCOUNTER cCollisions;
1671
1672 /** Statistics: Number of TB lookup misses. */
1673 STAMCOUNTER cLookupMisses;
1674 /** Statistics: Number of TB lookup hits via hash table (debug only). */
1675 STAMCOUNTER cLookupHits;
1676 /** Statistics: Number of TB lookup hits via TB associated lookup table (debug only). */
1677 STAMCOUNTER cLookupHitsViaTbLookupTable;
1678 STAMCOUNTER auPadding2[2];
1679 /** Statistics: Collision list length pruning. */
1680 STAMPROFILE StatPrune;
1681 /** @} */
1682
1683 /** The hash table itself.
1684 * @note The lower 6 bits of the pointer is used for keeping the collision
1685 * list length, so we can take action when it grows too long.
1686 * This works because TBs are allocated using a 64 byte (or
1687 * higher) alignment from page aligned chunks of memory, so the lower
1688 * 6 bits of the address will always be zero.
1689 * See IEMTBCACHE_PTR_COUNT_MASK, IEMTBCACHE_PTR_MAKE and friends.
1690 */
1691 RT_FLEXIBLE_ARRAY_EXTENSION
1692 PIEMTB apHash[RT_FLEXIBLE_ARRAY];
1693} IEMTBCACHE;
1694/** Pointer to a per-CPU translation block cahce. */
1695typedef IEMTBCACHE *PIEMTBCACHE;
1696
1697/** Magic value for IEMTBCACHE (Johnny O'Neal). */
1698#define IEMTBCACHE_MAGIC UINT32_C(0x19561010)
1699
1700/** The collision count mask for IEMTBCACHE::apHash entries. */
1701#define IEMTBCACHE_PTR_COUNT_MASK ((uintptr_t)0x3f)
1702/** The max collision count for IEMTBCACHE::apHash entries before pruning. */
1703#define IEMTBCACHE_PTR_MAX_COUNT ((uintptr_t)0x30)
1704/** Combine a TB pointer and a collision list length into a value for an
1705 * IEMTBCACHE::apHash entry. */
1706#define IEMTBCACHE_PTR_MAKE(a_pTb, a_cCount) (PIEMTB)((uintptr_t)(a_pTb) | (a_cCount))
1707/** Combine a TB pointer and a collision list length into a value for an
1708 * IEMTBCACHE::apHash entry. */
1709#define IEMTBCACHE_PTR_GET_TB(a_pHashEntry) (PIEMTB)((uintptr_t)(a_pHashEntry) & ~IEMTBCACHE_PTR_COUNT_MASK)
1710/** Combine a TB pointer and a collision list length into a value for an
1711 * IEMTBCACHE::apHash entry. */
1712#define IEMTBCACHE_PTR_GET_COUNT(a_pHashEntry) ((uintptr_t)(a_pHashEntry) & IEMTBCACHE_PTR_COUNT_MASK)
1713
1714/**
1715 * Calculates the hash table slot for a TB from physical PC address and TB flags.
1716 */
1717#define IEMTBCACHE_HASH(a_paCache, a_fTbFlags, a_GCPhysPc) \
1718 IEMTBCACHE_HASH_NO_KEY_MASK(a_paCache, (a_fTbFlags) & IEMTB_F_KEY_MASK, a_GCPhysPc)
1719
1720/**
1721 * Calculates the hash table slot for a TB from physical PC address and TB
1722 * flags, ASSUMING the caller has applied IEMTB_F_KEY_MASK to @a a_fTbFlags.
1723 */
1724#define IEMTBCACHE_HASH_NO_KEY_MASK(a_paCache, a_fTbFlags, a_GCPhysPc) \
1725 (((uint32_t)(a_GCPhysPc) ^ (a_fTbFlags)) & (a_paCache)->uHashMask)
1726
1727
1728/** @name IEMBRANCHED_F_XXX - Branched indicator (IEMCPU::fTbBranched).
1729 *
1730 * These flags parallels the main IEM_CIMPL_F_BRANCH_XXX flags.
1731 *
1732 * @{ */
1733/** Value if no branching happened recently. */
1734#define IEMBRANCHED_F_NO UINT8_C(0x00)
1735/** Flag set if direct branch, clear if absolute or indirect. */
1736#define IEMBRANCHED_F_DIRECT UINT8_C(0x01)
1737/** Flag set if indirect branch, clear if direct or relative. */
1738#define IEMBRANCHED_F_INDIRECT UINT8_C(0x02)
1739/** Flag set if relative branch, clear if absolute or indirect. */
1740#define IEMBRANCHED_F_RELATIVE UINT8_C(0x04)
1741/** Flag set if conditional branch, clear if unconditional. */
1742#define IEMBRANCHED_F_CONDITIONAL UINT8_C(0x08)
1743/** Flag set if it's a far branch. */
1744#define IEMBRANCHED_F_FAR UINT8_C(0x10)
1745/** Flag set if the stack pointer is modified. */
1746#define IEMBRANCHED_F_STACK UINT8_C(0x20)
1747/** Flag set if the stack pointer and (maybe) the stack segment are modified. */
1748#define IEMBRANCHED_F_STACK_FAR UINT8_C(0x40)
1749/** Flag set (by IEM_MC_REL_JMP_XXX) if it's a zero bytes relative jump. */
1750#define IEMBRANCHED_F_ZERO UINT8_C(0x80)
1751/** @} */
1752
1753
1754/**
1755 * The per-CPU IEM state.
1756 */
1757typedef struct IEMCPU
1758{
1759 /** Info status code that needs to be propagated to the IEM caller.
1760 * This cannot be passed internally, as it would complicate all success
1761 * checks within the interpreter making the code larger and almost impossible
1762 * to get right. Instead, we'll store status codes to pass on here. Each
1763 * source of these codes will perform appropriate sanity checks. */
1764 int32_t rcPassUp; /* 0x00 */
1765 /** Execution flag, IEM_F_XXX. */
1766 uint32_t fExec; /* 0x04 */
1767
1768 /** @name Decoder state.
1769 * @{ */
1770#ifdef IEM_WITH_CODE_TLB
1771 /** The offset of the next instruction byte. */
1772 uint32_t offInstrNextByte; /* 0x08 */
1773 /** The number of bytes available at pbInstrBuf for the current instruction.
1774 * This takes the max opcode length into account so that doesn't need to be
1775 * checked separately. */
1776 uint32_t cbInstrBuf; /* 0x0c */
1777 /** Pointer to the page containing RIP, user specified buffer or abOpcode.
1778 * This can be NULL if the page isn't mappable for some reason, in which
1779 * case we'll do fallback stuff.
1780 *
1781 * If we're executing an instruction from a user specified buffer,
1782 * IEMExecOneWithPrefetchedByPC and friends, this is not necessarily a page
1783 * aligned pointer but pointer to the user data.
1784 *
1785 * For instructions crossing pages, this will start on the first page and be
1786 * advanced to the next page by the time we've decoded the instruction. This
1787 * therefore precludes stuff like <tt>pbInstrBuf[offInstrNextByte + cbInstrBuf - cbCurInstr]</tt>
1788 */
1789 uint8_t const *pbInstrBuf; /* 0x10 */
1790# if ARCH_BITS == 32
1791 uint32_t uInstrBufHigh; /** The high dword of the host context pbInstrBuf member. */
1792# endif
1793 /** The program counter corresponding to pbInstrBuf.
1794 * This is set to a non-canonical address when we need to invalidate it. */
1795 uint64_t uInstrBufPc; /* 0x18 */
1796 /** The guest physical address corresponding to pbInstrBuf. */
1797 RTGCPHYS GCPhysInstrBuf; /* 0x20 */
1798 /** The number of bytes available at pbInstrBuf in total (for IEMExecLots).
1799 * This takes the CS segment limit into account.
1800 * @note Set to zero when the code TLB is flushed to trigger TLB reload. */
1801 uint16_t cbInstrBufTotal; /* 0x28 */
1802 /** Offset into pbInstrBuf of the first byte of the current instruction.
1803 * Can be negative to efficiently handle cross page instructions. */
1804 int16_t offCurInstrStart; /* 0x2a */
1805
1806# ifndef IEM_WITH_OPAQUE_DECODER_STATE
1807 /** The prefix mask (IEM_OP_PRF_XXX). */
1808 uint32_t fPrefixes; /* 0x2c */
1809 /** The extra REX ModR/M register field bit (REX.R << 3). */
1810 uint8_t uRexReg; /* 0x30 */
1811 /** The extra REX ModR/M r/m field, SIB base and opcode reg bit
1812 * (REX.B << 3). */
1813 uint8_t uRexB; /* 0x31 */
1814 /** The extra REX SIB index field bit (REX.X << 3). */
1815 uint8_t uRexIndex; /* 0x32 */
1816
1817 /** The effective segment register (X86_SREG_XXX). */
1818 uint8_t iEffSeg; /* 0x33 */
1819
1820 /** The offset of the ModR/M byte relative to the start of the instruction. */
1821 uint8_t offModRm; /* 0x34 */
1822
1823# ifdef IEM_WITH_CODE_TLB_AND_OPCODE_BUF
1824 /** The current offset into abOpcode. */
1825 uint8_t offOpcode; /* 0x35 */
1826# else
1827 uint8_t bUnused; /* 0x35 */
1828# endif
1829# else /* IEM_WITH_OPAQUE_DECODER_STATE */
1830 uint8_t abOpaqueDecoderPart1[0x36 - 0x2c];
1831# endif /* IEM_WITH_OPAQUE_DECODER_STATE */
1832
1833#else /* !IEM_WITH_CODE_TLB */
1834# ifndef IEM_WITH_OPAQUE_DECODER_STATE
1835 /** The size of what has currently been fetched into abOpcode. */
1836 uint8_t cbOpcode; /* 0x08 */
1837 /** The current offset into abOpcode. */
1838 uint8_t offOpcode; /* 0x09 */
1839 /** The offset of the ModR/M byte relative to the start of the instruction. */
1840 uint8_t offModRm; /* 0x0a */
1841
1842 /** The effective segment register (X86_SREG_XXX). */
1843 uint8_t iEffSeg; /* 0x0b */
1844
1845 /** The prefix mask (IEM_OP_PRF_XXX). */
1846 uint32_t fPrefixes; /* 0x0c */
1847 /** The extra REX ModR/M register field bit (REX.R << 3). */
1848 uint8_t uRexReg; /* 0x10 */
1849 /** The extra REX ModR/M r/m field, SIB base and opcode reg bit
1850 * (REX.B << 3). */
1851 uint8_t uRexB; /* 0x11 */
1852 /** The extra REX SIB index field bit (REX.X << 3). */
1853 uint8_t uRexIndex; /* 0x12 */
1854
1855# else /* IEM_WITH_OPAQUE_DECODER_STATE */
1856 uint8_t abOpaqueDecoderPart1[0x13 - 0x08];
1857# endif /* IEM_WITH_OPAQUE_DECODER_STATE */
1858#endif /* !IEM_WITH_CODE_TLB */
1859
1860#ifndef IEM_WITH_OPAQUE_DECODER_STATE
1861 /** The effective operand mode. */
1862 IEMMODE enmEffOpSize; /* 0x36, 0x13 */
1863 /** The default addressing mode. */
1864 IEMMODE enmDefAddrMode; /* 0x37, 0x14 */
1865 /** The effective addressing mode. */
1866 IEMMODE enmEffAddrMode; /* 0x38, 0x15 */
1867 /** The default operand mode. */
1868 IEMMODE enmDefOpSize; /* 0x39, 0x16 */
1869
1870 /** Prefix index (VEX.pp) for two byte and three byte tables. */
1871 uint8_t idxPrefix; /* 0x3a, 0x17 */
1872 /** 3rd VEX/EVEX/XOP register.
1873 * Please use IEM_GET_EFFECTIVE_VVVV to access. */
1874 uint8_t uVex3rdReg; /* 0x3b, 0x18 */
1875 /** The VEX/EVEX/XOP length field. */
1876 uint8_t uVexLength; /* 0x3c, 0x19 */
1877 /** Additional EVEX stuff. */
1878 uint8_t fEvexStuff; /* 0x3d, 0x1a */
1879
1880# ifndef IEM_WITH_CODE_TLB
1881 /** Explicit alignment padding. */
1882 uint8_t abAlignment2a[1]; /* 0x1b */
1883# endif
1884 /** The FPU opcode (FOP). */
1885 uint16_t uFpuOpcode; /* 0x3e, 0x1c */
1886# ifndef IEM_WITH_CODE_TLB
1887 /** Explicit alignment padding. */
1888 uint8_t abAlignment2b[2]; /* 0x1e */
1889# endif
1890
1891 /** The opcode bytes. */
1892 uint8_t abOpcode[15]; /* 0x40, 0x20 */
1893 /** Explicit alignment padding. */
1894# ifdef IEM_WITH_CODE_TLB
1895 //uint8_t abAlignment2c[0x4f - 0x4f]; /* 0x4f */
1896# else
1897 uint8_t abAlignment2c[0x4f - 0x2f]; /* 0x2f */
1898# endif
1899
1900#else /* IEM_WITH_OPAQUE_DECODER_STATE */
1901# ifdef IEM_WITH_CODE_TLB
1902 uint8_t abOpaqueDecoderPart2[0x4f - 0x36];
1903# else
1904 uint8_t abOpaqueDecoderPart2[0x4f - 0x13];
1905# endif
1906#endif /* IEM_WITH_OPAQUE_DECODER_STATE */
1907 /** @} */
1908
1909
1910 /** The number of active guest memory mappings. */
1911 uint8_t cActiveMappings; /* 0x4f, 0x4f */
1912
1913 /** Records for tracking guest memory mappings. */
1914 struct
1915 {
1916 /** The address of the mapped bytes. */
1917 R3R0PTRTYPE(void *) pv;
1918 /** The access flags (IEM_ACCESS_XXX).
1919 * IEM_ACCESS_INVALID if the entry is unused. */
1920 uint32_t fAccess;
1921#if HC_ARCH_BITS == 64
1922 uint32_t u32Alignment4; /**< Alignment padding. */
1923#endif
1924 } aMemMappings[3]; /* 0x50 LB 0x30 */
1925
1926 /** Locking records for the mapped memory. */
1927 union
1928 {
1929 PGMPAGEMAPLOCK Lock;
1930 uint64_t au64Padding[2];
1931 } aMemMappingLocks[3]; /* 0x80 LB 0x30 */
1932
1933 /** Bounce buffer info.
1934 * This runs in parallel to aMemMappings. */
1935 struct
1936 {
1937 /** The physical address of the first byte. */
1938 RTGCPHYS GCPhysFirst;
1939 /** The physical address of the second page. */
1940 RTGCPHYS GCPhysSecond;
1941 /** The number of bytes in the first page. */
1942 uint16_t cbFirst;
1943 /** The number of bytes in the second page. */
1944 uint16_t cbSecond;
1945 /** Whether it's unassigned memory. */
1946 bool fUnassigned;
1947 /** Explicit alignment padding. */
1948 bool afAlignment5[3];
1949 } aMemBbMappings[3]; /* 0xb0 LB 0x48 */
1950
1951 /** The flags of the current exception / interrupt. */
1952 uint32_t fCurXcpt; /* 0xf8 */
1953 /** The current exception / interrupt. */
1954 uint8_t uCurXcpt; /* 0xfc */
1955 /** Exception / interrupt recursion depth. */
1956 int8_t cXcptRecursions; /* 0xfb */
1957
1958 /** The next unused mapping index.
1959 * @todo try find room for this up with cActiveMappings. */
1960 uint8_t iNextMapping; /* 0xfd */
1961 uint8_t abAlignment7[1];
1962
1963 /** Bounce buffer storage.
1964 * This runs in parallel to aMemMappings and aMemBbMappings. */
1965 struct
1966 {
1967 uint8_t ab[512];
1968 } aBounceBuffers[3]; /* 0x100 LB 0x600 */
1969
1970
1971 /** Pointer set jump buffer - ring-3 context. */
1972 R3PTRTYPE(jmp_buf *) pJmpBufR3;
1973 /** Pointer set jump buffer - ring-0 context. */
1974 R0PTRTYPE(jmp_buf *) pJmpBufR0;
1975
1976 /** @todo Should move this near @a fCurXcpt later. */
1977 /** The CR2 for the current exception / interrupt. */
1978 uint64_t uCurXcptCr2;
1979 /** The error code for the current exception / interrupt. */
1980 uint32_t uCurXcptErr;
1981
1982 /** @name Statistics
1983 * @{ */
1984 /** The number of instructions we've executed. */
1985 uint32_t cInstructions;
1986 /** The number of potential exits. */
1987 uint32_t cPotentialExits;
1988 /** The number of bytes data or stack written (mostly for IEMExecOneEx).
1989 * This may contain uncommitted writes. */
1990 uint32_t cbWritten;
1991 /** Counts the VERR_IEM_INSTR_NOT_IMPLEMENTED returns. */
1992 uint32_t cRetInstrNotImplemented;
1993 /** Counts the VERR_IEM_ASPECT_NOT_IMPLEMENTED returns. */
1994 uint32_t cRetAspectNotImplemented;
1995 /** Counts informational statuses returned (other than VINF_SUCCESS). */
1996 uint32_t cRetInfStatuses;
1997 /** Counts other error statuses returned. */
1998 uint32_t cRetErrStatuses;
1999 /** Number of times rcPassUp has been used. */
2000 uint32_t cRetPassUpStatus;
2001 /** Number of times RZ left with instruction commit pending for ring-3. */
2002 uint32_t cPendingCommit;
2003 /** Number of misaligned (host sense) atomic instruction accesses. */
2004 uint32_t cMisalignedAtomics;
2005 /** Number of long jumps. */
2006 uint32_t cLongJumps;
2007 /** @} */
2008
2009 /** @name Target CPU information.
2010 * @{ */
2011#if IEM_CFG_TARGET_CPU == IEMTARGETCPU_DYNAMIC
2012 /** The target CPU. */
2013 uint8_t uTargetCpu;
2014#else
2015 uint8_t bTargetCpuPadding;
2016#endif
2017 /** For selecting assembly works matching the target CPU EFLAGS behaviour, see
2018 * IEMTARGETCPU_EFL_BEHAVIOR_XXX for values, with the 1st entry for when no
2019 * native host support and the 2nd for when there is.
2020 *
2021 * The two values are typically indexed by a g_CpumHostFeatures bit.
2022 *
2023 * This is for instance used for the BSF & BSR instructions where AMD and
2024 * Intel CPUs produce different EFLAGS. */
2025 uint8_t aidxTargetCpuEflFlavour[2];
2026
2027 /** The CPU vendor. */
2028 CPUMCPUVENDOR enmCpuVendor;
2029 /** @} */
2030
2031 /** @name Host CPU information.
2032 * @{ */
2033 /** The CPU vendor. */
2034 CPUMCPUVENDOR enmHostCpuVendor;
2035 /** @} */
2036
2037 /** Counts RDMSR \#GP(0) LogRel(). */
2038 uint8_t cLogRelRdMsr;
2039 /** Counts WRMSR \#GP(0) LogRel(). */
2040 uint8_t cLogRelWrMsr;
2041 /** Alignment padding. */
2042 uint8_t abAlignment9[42];
2043
2044 /** @name Recompilation
2045 * @{ */
2046 /** Pointer to the current translation block.
2047 * This can either be one being executed or one being compiled. */
2048 R3PTRTYPE(PIEMTB) pCurTbR3;
2049#ifdef VBOX_WITH_IEM_NATIVE_RECOMPILER_LONGJMP
2050 /** Frame pointer for the last native TB to execute. */
2051 R3PTRTYPE(void *) pvTbFramePointerR3;
2052#else
2053 R3PTRTYPE(void *) pvUnusedR3;
2054#endif
2055#ifdef IEMNATIVE_WITH_SIMD_FP_NATIVE_EMITTERS
2056 /** The saved host floating point control register (MXCSR on x86, FPCR on arm64)
2057 * needing restore when the TB finished, IEMNATIVE_SIMD_FP_CTRL_REG_NOT_MODIFIED indicates the TB
2058 * didn't modify it so we don't need to restore it. */
2059# ifdef RT_ARCH_AMD64
2060 uint32_t uRegFpCtrl;
2061 /** Temporary copy of MXCSR for stmxcsr/ldmxcsr (so we don't have to fiddle with stack pointers). */
2062 uint32_t uRegMxcsrTmp;
2063# elif defined(RT_ARCH_ARM64)
2064 uint64_t uRegFpCtrl;
2065# else
2066# error "Port me"
2067# endif
2068#else
2069 uint64_t u64Unused;
2070#endif
2071 /** Fixed TB used for threaded recompilation.
2072 * This is allocated once with maxed-out sizes and re-used afterwards. */
2073 R3PTRTYPE(PIEMTB) pThrdCompileTbR3;
2074 /** Pointer to the ring-3 TB cache for this EMT. */
2075 R3PTRTYPE(PIEMTBCACHE) pTbCacheR3;
2076 /** Pointer to the ring-3 TB lookup entry.
2077 * This either points to pTbLookupEntryDummyR3 or an actually lookuptable
2078 * entry, thus it can always safely be used w/o NULL checking. */
2079 R3PTRTYPE(PIEMTB *) ppTbLookupEntryR3;
2080 /** The PC (RIP) at the start of pCurTbR3/pCurTbR0.
2081 * The TBs are based on physical addresses, so this is needed to correleated
2082 * RIP to opcode bytes stored in the TB (AMD-V / VT-x). */
2083 uint64_t uCurTbStartPc;
2084 /** Number of threaded TBs executed. */
2085 uint64_t cTbExecThreaded;
2086 /** Number of native TBs executed. */
2087 uint64_t cTbExecNative;
2088 /** Whether we need to check the opcode bytes for the current instruction.
2089 * This is set by a previous instruction if it modified memory or similar. */
2090 bool fTbCheckOpcodes;
2091 /** Indicates whether and how we just branched - IEMBRANCHED_F_XXX. */
2092 uint8_t fTbBranched;
2093 /** Set when GCPhysInstrBuf is updated because of a page crossing. */
2094 bool fTbCrossedPage;
2095 /** Whether to end the current TB. */
2096 bool fEndTb;
2097 /** Number of instructions before we need emit an IRQ check call again.
2098 * This helps making sure we don't execute too long w/o checking for
2099 * interrupts and immediately following instructions that may enable
2100 * interrupts (e.g. POPF, IRET, STI). With STI an additional hack is
2101 * required to make sure we check following the next instruction as well, see
2102 * fTbCurInstrIsSti. */
2103 uint8_t cInstrTillIrqCheck;
2104 /** Indicates that the current instruction is an STI. This is set by the
2105 * iemCImpl_sti code and subsequently cleared by the recompiler. */
2106 bool fTbCurInstrIsSti;
2107 /** The size of the IEMTB::pabOpcodes allocation in pThrdCompileTbR3. */
2108 uint16_t cbOpcodesAllocated;
2109 /** The current instruction number in a native TB.
2110 * This is set by code that may trigger an unexpected TB exit (throw/longjmp)
2111 * and will be picked up by the TB execution loop. Only used when
2112 * IEMNATIVE_WITH_INSTRUCTION_COUNTING is defined. */
2113 uint8_t idxTbCurInstr;
2114 /** Spaced reserved for recompiler data / alignment. */
2115 bool afRecompilerStuff1[3];
2116 /** The virtual sync time at the last timer poll call. */
2117 uint32_t msRecompilerPollNow;
2118 /** The IEMTB::cUsed value when to attempt native recompilation of a TB. */
2119 uint32_t uTbNativeRecompileAtUsedCount;
2120 /** The IEM_CIMPL_F_XXX mask for the current instruction. */
2121 uint32_t fTbCurInstr;
2122 /** The IEM_CIMPL_F_XXX mask for the previous instruction. */
2123 uint32_t fTbPrevInstr;
2124 /** Strict: Tracking skipped EFLAGS calculations. Any bits set here are
2125 * currently not up to date in EFLAGS. */
2126 uint32_t fSkippingEFlags;
2127 /** Previous GCPhysInstrBuf value - only valid if fTbCrossedPage is set. */
2128 RTGCPHYS GCPhysInstrBufPrev;
2129 /** Pointer to the ring-3 TB allocator for this EMT. */
2130 R3PTRTYPE(PIEMTBALLOCATOR) pTbAllocatorR3;
2131 /** Pointer to the ring-3 executable memory allocator for this EMT. */
2132 R3PTRTYPE(struct IEMEXECMEMALLOCATOR *) pExecMemAllocatorR3;
2133 /** Pointer to the native recompiler state for ring-3. */
2134 R3PTRTYPE(struct IEMRECOMPILERSTATE *) pNativeRecompilerStateR3;
2135 /** Dummy entry for ppTbLookupEntryR3. */
2136 R3PTRTYPE(PIEMTB) pTbLookupEntryDummyR3;
2137
2138 /** Dummy TLB entry used for accesses to pages with databreakpoints. */
2139 IEMTLBENTRY DataBreakpointTlbe;
2140
2141 /** Threaded TB statistics: Times TB execution was broken off before reaching the end. */
2142 STAMCOUNTER StatTbThreadedExecBreaks;
2143 /** Statistics: Times BltIn_CheckIrq breaks out of the TB. */
2144 STAMCOUNTER StatCheckIrqBreaks;
2145 /** Statistics: Times BltIn_CheckMode breaks out of the TB. */
2146 STAMCOUNTER StatCheckModeBreaks;
2147 /** Threaded TB statistics: Times execution break on call with lookup entries. */
2148 STAMCOUNTER StatTbThreadedExecBreaksWithLookup;
2149 /** Threaded TB statistics: Times execution break on call without lookup entries. */
2150 STAMCOUNTER StatTbThreadedExecBreaksWithoutLookup;
2151 /** Statistics: Times a post jump target check missed and had to find new TB. */
2152 STAMCOUNTER StatCheckBranchMisses;
2153 /** Statistics: Times a jump or page crossing required a TB with CS.LIM checking. */
2154 STAMCOUNTER StatCheckNeedCsLimChecking;
2155 /** Statistics: Times a loop was detected within a TB.. */
2156 STAMCOUNTER StatTbLoopInTbDetected;
2157 /** Exec memory allocator statistics: Number of times allocaintg executable memory failed. */
2158 STAMCOUNTER StatNativeExecMemInstrBufAllocFailed;
2159 /** Native TB statistics: Number of fully recompiled TBs. */
2160 STAMCOUNTER StatNativeFullyRecompiledTbs;
2161 /** TB statistics: Number of instructions per TB. */
2162 STAMPROFILE StatTbInstr;
2163 /** TB statistics: Number of TB lookup table entries per TB. */
2164 STAMPROFILE StatTbLookupEntries;
2165 /** Threaded TB statistics: Number of calls per TB. */
2166 STAMPROFILE StatTbThreadedCalls;
2167 /** Native TB statistics: Native code size per TB. */
2168 STAMPROFILE StatTbNativeCode;
2169 /** Native TB statistics: Profiling native recompilation. */
2170 STAMPROFILE StatNativeRecompilation;
2171 /** Native TB statistics: Number of calls per TB that were recompiled properly. */
2172 STAMPROFILE StatNativeCallsRecompiled;
2173 /** Native TB statistics: Number of threaded calls per TB that weren't recompiled. */
2174 STAMPROFILE StatNativeCallsThreaded;
2175 /** Native recompiled execution: TLB hits for data fetches. */
2176 STAMCOUNTER StatNativeTlbHitsForFetch;
2177 /** Native recompiled execution: TLB hits for data stores. */
2178 STAMCOUNTER StatNativeTlbHitsForStore;
2179 /** Native recompiled execution: TLB hits for stack accesses. */
2180 STAMCOUNTER StatNativeTlbHitsForStack;
2181 /** Native recompiled execution: TLB hits for mapped accesses. */
2182 STAMCOUNTER StatNativeTlbHitsForMapped;
2183 /** Native recompiled execution: Code TLB misses for new page. */
2184 STAMCOUNTER StatNativeCodeTlbMissesNewPage;
2185 /** Native recompiled execution: Code TLB hits for new page. */
2186 STAMCOUNTER StatNativeCodeTlbHitsForNewPage;
2187 /** Native recompiled execution: Code TLB misses for new page with offset. */
2188 STAMCOUNTER StatNativeCodeTlbMissesNewPageWithOffset;
2189 /** Native recompiled execution: Code TLB hits for new page with offset. */
2190 STAMCOUNTER StatNativeCodeTlbHitsForNewPageWithOffset;
2191
2192 /** Native recompiler: Number of calls to iemNativeRegAllocFindFree. */
2193 STAMCOUNTER StatNativeRegFindFree;
2194 /** Native recompiler: Number of times iemNativeRegAllocFindFree needed
2195 * to free a variable. */
2196 STAMCOUNTER StatNativeRegFindFreeVar;
2197 /** Native recompiler: Number of times iemNativeRegAllocFindFree did
2198 * not need to free any variables. */
2199 STAMCOUNTER StatNativeRegFindFreeNoVar;
2200 /** Native recompiler: Liveness info freed shadowed guest registers in
2201 * iemNativeRegAllocFindFree. */
2202 STAMCOUNTER StatNativeRegFindFreeLivenessUnshadowed;
2203 /** Native recompiler: Liveness info helped with the allocation in
2204 * iemNativeRegAllocFindFree. */
2205 STAMCOUNTER StatNativeRegFindFreeLivenessHelped;
2206
2207 /** Native recompiler: Number of times status flags calc has been skipped. */
2208 STAMCOUNTER StatNativeEflSkippedArithmetic;
2209 /** Native recompiler: Number of times status flags calc has been skipped. */
2210 STAMCOUNTER StatNativeEflSkippedLogical;
2211
2212 /** Native recompiler: Number of opportunities to skip EFLAGS.CF updating. */
2213 STAMCOUNTER StatNativeLivenessEflCfSkippable;
2214 /** Native recompiler: Number of opportunities to skip EFLAGS.PF updating. */
2215 STAMCOUNTER StatNativeLivenessEflPfSkippable;
2216 /** Native recompiler: Number of opportunities to skip EFLAGS.AF updating. */
2217 STAMCOUNTER StatNativeLivenessEflAfSkippable;
2218 /** Native recompiler: Number of opportunities to skip EFLAGS.ZF updating. */
2219 STAMCOUNTER StatNativeLivenessEflZfSkippable;
2220 /** Native recompiler: Number of opportunities to skip EFLAGS.SF updating. */
2221 STAMCOUNTER StatNativeLivenessEflSfSkippable;
2222 /** Native recompiler: Number of opportunities to skip EFLAGS.OF updating. */
2223 STAMCOUNTER StatNativeLivenessEflOfSkippable;
2224 /** Native recompiler: Number of required EFLAGS.CF updates. */
2225 STAMCOUNTER StatNativeLivenessEflCfRequired;
2226 /** Native recompiler: Number of required EFLAGS.PF updates. */
2227 STAMCOUNTER StatNativeLivenessEflPfRequired;
2228 /** Native recompiler: Number of required EFLAGS.AF updates. */
2229 STAMCOUNTER StatNativeLivenessEflAfRequired;
2230 /** Native recompiler: Number of required EFLAGS.ZF updates. */
2231 STAMCOUNTER StatNativeLivenessEflZfRequired;
2232 /** Native recompiler: Number of required EFLAGS.SF updates. */
2233 STAMCOUNTER StatNativeLivenessEflSfRequired;
2234 /** Native recompiler: Number of required EFLAGS.OF updates. */
2235 STAMCOUNTER StatNativeLivenessEflOfRequired;
2236 /** Native recompiler: Number of potentially delayable EFLAGS.CF updates. */
2237 STAMCOUNTER StatNativeLivenessEflCfDelayable;
2238 /** Native recompiler: Number of potentially delayable EFLAGS.PF updates. */
2239 STAMCOUNTER StatNativeLivenessEflPfDelayable;
2240 /** Native recompiler: Number of potentially delayable EFLAGS.AF updates. */
2241 STAMCOUNTER StatNativeLivenessEflAfDelayable;
2242 /** Native recompiler: Number of potentially delayable EFLAGS.ZF updates. */
2243 STAMCOUNTER StatNativeLivenessEflZfDelayable;
2244 /** Native recompiler: Number of potentially delayable EFLAGS.SF updates. */
2245 STAMCOUNTER StatNativeLivenessEflSfDelayable;
2246 /** Native recompiler: Number of potentially delayable EFLAGS.OF updates. */
2247 STAMCOUNTER StatNativeLivenessEflOfDelayable;
2248
2249 /** Native recompiler: Number of potential PC updates in total. */
2250 STAMCOUNTER StatNativePcUpdateTotal;
2251 /** Native recompiler: Number of PC updates which could be delayed. */
2252 STAMCOUNTER StatNativePcUpdateDelayed;
2253
2254//#ifdef IEMNATIVE_WITH_SIMD_REG_ALLOCATOR
2255 /** Native recompiler: Number of calls to iemNativeSimdRegAllocFindFree. */
2256 STAMCOUNTER StatNativeSimdRegFindFree;
2257 /** Native recompiler: Number of times iemNativeSimdRegAllocFindFree needed
2258 * to free a variable. */
2259 STAMCOUNTER StatNativeSimdRegFindFreeVar;
2260 /** Native recompiler: Number of times iemNativeSimdRegAllocFindFree did
2261 * not need to free any variables. */
2262 STAMCOUNTER StatNativeSimdRegFindFreeNoVar;
2263 /** Native recompiler: Liveness info freed shadowed guest registers in
2264 * iemNativeSimdRegAllocFindFree. */
2265 STAMCOUNTER StatNativeSimdRegFindFreeLivenessUnshadowed;
2266 /** Native recompiler: Liveness info helped with the allocation in
2267 * iemNativeSimdRegAllocFindFree. */
2268 STAMCOUNTER StatNativeSimdRegFindFreeLivenessHelped;
2269
2270 /** Native recompiler: Number of potential IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE() checks. */
2271 STAMCOUNTER StatNativeMaybeDeviceNotAvailXcptCheckPotential;
2272 /** Native recompiler: Number of potential IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE() checks. */
2273 STAMCOUNTER StatNativeMaybeWaitDeviceNotAvailXcptCheckPotential;
2274 /** Native recompiler: Number of potential IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT() checks. */
2275 STAMCOUNTER StatNativeMaybeSseXcptCheckPotential;
2276 /** Native recompiler: Number of potential IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT() checks. */
2277 STAMCOUNTER StatNativeMaybeAvxXcptCheckPotential;
2278
2279 /** Native recompiler: Number of IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE() checks omitted. */
2280 STAMCOUNTER StatNativeMaybeDeviceNotAvailXcptCheckOmitted;
2281 /** Native recompiler: Number of IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE() checks omitted. */
2282 STAMCOUNTER StatNativeMaybeWaitDeviceNotAvailXcptCheckOmitted;
2283 /** Native recompiler: Number of IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT() checks omitted. */
2284 STAMCOUNTER StatNativeMaybeSseXcptCheckOmitted;
2285 /** Native recompiler: Number of IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT() checks omitted. */
2286 STAMCOUNTER StatNativeMaybeAvxXcptCheckOmitted;
2287//#endif
2288
2289 /** Native recompiler: The TB finished executing completely without jumping to a an exit label.
2290 * Not availabe in release builds. */
2291 STAMCOUNTER StatNativeTbFinished;
2292 /** Native recompiler: The TB finished executing jumping to the ReturnBreak label. */
2293 STAMCOUNTER StatNativeTbExitReturnBreak;
2294 /** Native recompiler: The TB finished executing jumping to the ReturnBreakFF label. */
2295 STAMCOUNTER StatNativeTbExitReturnBreakFF;
2296 /** Native recompiler: The TB finished executing jumping to the ReturnWithFlags label. */
2297 STAMCOUNTER StatNativeTbExitReturnWithFlags;
2298 /** Native recompiler: The TB finished executing with other non-zero status. */
2299 STAMCOUNTER StatNativeTbExitReturnOtherStatus;
2300 /** Native recompiler: The TB finished executing via throw / long jump. */
2301 STAMCOUNTER StatNativeTbExitLongJump;
2302 /** Native recompiler: The TB finished executing jumping to the ReturnBreak
2303 * label, but directly jumped to the next TB, scenario \#1 w/o IRQ checks. */
2304 STAMCOUNTER StatNativeTbExitDirectLinking1NoIrq;
2305 /** Native recompiler: The TB finished executing jumping to the ReturnBreak
2306 * label, but directly jumped to the next TB, scenario \#1 with IRQ checks. */
2307 STAMCOUNTER StatNativeTbExitDirectLinking1Irq;
2308 /** Native recompiler: The TB finished executing jumping to the ReturnBreak
2309 * label, but directly jumped to the next TB, scenario \#1 w/o IRQ checks. */
2310 STAMCOUNTER StatNativeTbExitDirectLinking2NoIrq;
2311 /** Native recompiler: The TB finished executing jumping to the ReturnBreak
2312 * label, but directly jumped to the next TB, scenario \#2 with IRQ checks. */
2313 STAMCOUNTER StatNativeTbExitDirectLinking2Irq;
2314
2315 /** Native recompiler: The TB finished executing jumping to the RaiseDe label. */
2316 STAMCOUNTER StatNativeTbExitRaiseDe;
2317 /** Native recompiler: The TB finished executing jumping to the RaiseUd label. */
2318 STAMCOUNTER StatNativeTbExitRaiseUd;
2319 /** Native recompiler: The TB finished executing jumping to the RaiseSseRelated label. */
2320 STAMCOUNTER StatNativeTbExitRaiseSseRelated;
2321 /** Native recompiler: The TB finished executing jumping to the RaiseAvxRelated label. */
2322 STAMCOUNTER StatNativeTbExitRaiseAvxRelated;
2323 /** Native recompiler: The TB finished executing jumping to the RaiseSseAvxFpRelated label. */
2324 STAMCOUNTER StatNativeTbExitRaiseSseAvxFpRelated;
2325 /** Native recompiler: The TB finished executing jumping to the RaiseNm label. */
2326 STAMCOUNTER StatNativeTbExitRaiseNm;
2327 /** Native recompiler: The TB finished executing jumping to the RaiseGp0 label. */
2328 STAMCOUNTER StatNativeTbExitRaiseGp0;
2329 /** Native recompiler: The TB finished executing jumping to the RaiseMf label. */
2330 STAMCOUNTER StatNativeTbExitRaiseMf;
2331 /** Native recompiler: The TB finished executing jumping to the RaiseXf label. */
2332 STAMCOUNTER StatNativeTbExitRaiseXf;
2333 /** Native recompiler: The TB finished executing jumping to the ObsoleteTb label. */
2334 STAMCOUNTER StatNativeTbExitObsoleteTb;
2335
2336 /** Native recompiler: Failure situations with direct linking scenario \#1.
2337 * Counter with StatNativeTbExitReturnBreak. Not in release builds.
2338 * @{ */
2339 STAMCOUNTER StatNativeTbExitDirectLinking1NoTb;
2340 STAMCOUNTER StatNativeTbExitDirectLinking1MismatchGCPhysPc;
2341 STAMCOUNTER StatNativeTbExitDirectLinking1MismatchFlags;
2342 STAMCOUNTER StatNativeTbExitDirectLinking1PendingIrq;
2343 /** @} */
2344
2345 /** Native recompiler: Failure situations with direct linking scenario \#2.
2346 * Counter with StatNativeTbExitReturnBreak. Not in release builds.
2347 * @{ */
2348 STAMCOUNTER StatNativeTbExitDirectLinking2NoTb;
2349 STAMCOUNTER StatNativeTbExitDirectLinking2MismatchGCPhysPc;
2350 STAMCOUNTER StatNativeTbExitDirectLinking2MismatchFlags;
2351 STAMCOUNTER StatNativeTbExitDirectLinking2PendingIrq;
2352 /** @} */
2353
2354 /** iemMemMap and iemMemMapJmp statistics.
2355 * @{ */
2356 STAMCOUNTER StatMemMapJmp;
2357 STAMCOUNTER StatMemMapNoJmp;
2358 STAMCOUNTER StatMemBounceBufferCrossPage;
2359 STAMCOUNTER StatMemBounceBufferMapPhys;
2360 /** @} */
2361
2362#ifdef IEM_WITH_TLB_TRACE
2363 uint64_t au64Padding[2];
2364#else
2365 uint64_t au64Padding[4];
2366#endif
2367 /** @} */
2368
2369#ifdef IEM_WITH_TLB_TRACE
2370 /** The end (next) trace entry. */
2371 uint32_t idxTlbTraceEntry;
2372 /** Number of trace entries allocated expressed as a power of two. */
2373 uint32_t cTlbTraceEntriesShift;
2374 /** The trace entries. */
2375 PIEMTLBTRACEENTRY paTlbTraceEntries;
2376#endif
2377
2378 /** Data TLB.
2379 * @remarks Must be 64-byte aligned. */
2380 IEMTLB DataTlb;
2381 /** Instruction TLB.
2382 * @remarks Must be 64-byte aligned. */
2383 IEMTLB CodeTlb;
2384
2385 /** Exception statistics. */
2386 STAMCOUNTER aStatXcpts[32];
2387 /** Interrupt statistics. */
2388 uint32_t aStatInts[256];
2389
2390#if defined(VBOX_WITH_STATISTICS) && !defined(DOXYGEN_RUNNING) && !defined(IEM_WITHOUT_INSTRUCTION_STATS)
2391 /** Instruction statistics for ring-0/raw-mode. */
2392 IEMINSTRSTATS StatsRZ;
2393 /** Instruction statistics for ring-3. */
2394 IEMINSTRSTATS StatsR3;
2395# ifdef VBOX_WITH_IEM_RECOMPILER
2396 /** Statistics per threaded function call.
2397 * Updated by both the threaded and native recompilers. */
2398 uint32_t acThreadedFuncStats[0x6000 /*24576*/];
2399# endif
2400#endif
2401} IEMCPU;
2402AssertCompileMemberOffset(IEMCPU, cActiveMappings, 0x4f);
2403AssertCompileMemberAlignment(IEMCPU, aMemMappings, 16);
2404AssertCompileMemberAlignment(IEMCPU, aMemMappingLocks, 16);
2405AssertCompileMemberAlignment(IEMCPU, aBounceBuffers, 64);
2406AssertCompileMemberAlignment(IEMCPU, DataTlb, 64);
2407AssertCompileMemberAlignment(IEMCPU, CodeTlb, 64);
2408
2409/** Pointer to the per-CPU IEM state. */
2410typedef IEMCPU *PIEMCPU;
2411/** Pointer to the const per-CPU IEM state. */
2412typedef IEMCPU const *PCIEMCPU;
2413
2414/** @def IEMNATIVE_SIMD_FP_CTRL_REG_NOT_MODIFIED
2415 * Value indicating the TB didn't modified the floating point control register.
2416 * @note Neither FPCR nor MXCSR accept this as a valid value (MXCSR is not fully populated,
2417 * FPCR has the upper 32-bit reserved), so this is safe. */
2418#if defined(IEMNATIVE_WITH_SIMD_FP_NATIVE_EMITTERS) || defined(DOXYGEN_RUNNING)
2419# ifdef RT_ARCH_AMD64
2420# define IEMNATIVE_SIMD_FP_CTRL_REG_NOT_MODIFIED UINT32_MAX
2421# elif defined(RT_ARCH_ARM64)
2422# define IEMNATIVE_SIMD_FP_CTRL_REG_NOT_MODIFIED UINT64_MAX
2423# else
2424# error "Port me"
2425# endif
2426#endif
2427
2428/** @def IEM_GET_CTX
2429 * Gets the guest CPU context for the calling EMT.
2430 * @returns PCPUMCTX
2431 * @param a_pVCpu The cross context virtual CPU structure of the calling thread.
2432 */
2433#define IEM_GET_CTX(a_pVCpu) (&(a_pVCpu)->cpum.GstCtx)
2434
2435/** @def IEM_CTX_ASSERT
2436 * Asserts that the @a a_fExtrnMbz is present in the CPU context.
2437 * @param a_pVCpu The cross context virtual CPU structure of the calling thread.
2438 * @param a_fExtrnMbz The mask of CPUMCTX_EXTRN_XXX flags that must be zero.
2439 */
2440#define IEM_CTX_ASSERT(a_pVCpu, a_fExtrnMbz) \
2441 AssertMsg(!((a_pVCpu)->cpum.GstCtx.fExtrn & (a_fExtrnMbz)), \
2442 ("fExtrn=%#RX64 & fExtrnMbz=%#RX64 -> %#RX64\n", \
2443 (a_pVCpu)->cpum.GstCtx.fExtrn, (a_fExtrnMbz), (a_pVCpu)->cpum.GstCtx.fExtrn & (a_fExtrnMbz) ))
2444
2445/** @def IEM_CTX_IMPORT_RET
2446 * Makes sure the CPU context bits given by @a a_fExtrnImport are imported.
2447 *
2448 * Will call the keep to import the bits as needed.
2449 *
2450 * Returns on import failure.
2451 *
2452 * @param a_pVCpu The cross context virtual CPU structure of the calling thread.
2453 * @param a_fExtrnImport The mask of CPUMCTX_EXTRN_XXX flags to import.
2454 */
2455#define IEM_CTX_IMPORT_RET(a_pVCpu, a_fExtrnImport) \
2456 do { \
2457 if (!((a_pVCpu)->cpum.GstCtx.fExtrn & (a_fExtrnImport))) \
2458 { /* likely */ } \
2459 else \
2460 { \
2461 int rcCtxImport = CPUMImportGuestStateOnDemand(a_pVCpu, a_fExtrnImport); \
2462 AssertRCReturn(rcCtxImport, rcCtxImport); \
2463 } \
2464 } while (0)
2465
2466/** @def IEM_CTX_IMPORT_NORET
2467 * Makes sure the CPU context bits given by @a a_fExtrnImport are imported.
2468 *
2469 * Will call the keep to import the bits as needed.
2470 *
2471 * @param a_pVCpu The cross context virtual CPU structure of the calling thread.
2472 * @param a_fExtrnImport The mask of CPUMCTX_EXTRN_XXX flags to import.
2473 */
2474#define IEM_CTX_IMPORT_NORET(a_pVCpu, a_fExtrnImport) \
2475 do { \
2476 if (!((a_pVCpu)->cpum.GstCtx.fExtrn & (a_fExtrnImport))) \
2477 { /* likely */ } \
2478 else \
2479 { \
2480 int rcCtxImport = CPUMImportGuestStateOnDemand(a_pVCpu, a_fExtrnImport); \
2481 AssertLogRelRC(rcCtxImport); \
2482 } \
2483 } while (0)
2484
2485/** @def IEM_CTX_IMPORT_JMP
2486 * Makes sure the CPU context bits given by @a a_fExtrnImport are imported.
2487 *
2488 * Will call the keep to import the bits as needed.
2489 *
2490 * Jumps on import failure.
2491 *
2492 * @param a_pVCpu The cross context virtual CPU structure of the calling thread.
2493 * @param a_fExtrnImport The mask of CPUMCTX_EXTRN_XXX flags to import.
2494 */
2495#define IEM_CTX_IMPORT_JMP(a_pVCpu, a_fExtrnImport) \
2496 do { \
2497 if (!((a_pVCpu)->cpum.GstCtx.fExtrn & (a_fExtrnImport))) \
2498 { /* likely */ } \
2499 else \
2500 { \
2501 int rcCtxImport = CPUMImportGuestStateOnDemand(a_pVCpu, a_fExtrnImport); \
2502 AssertRCStmt(rcCtxImport, IEM_DO_LONGJMP(pVCpu, rcCtxImport)); \
2503 } \
2504 } while (0)
2505
2506
2507
2508/** @def IEM_GET_TARGET_CPU
2509 * Gets the current IEMTARGETCPU value.
2510 * @returns IEMTARGETCPU value.
2511 * @param a_pVCpu The cross context virtual CPU structure of the calling thread.
2512 */
2513#if IEM_CFG_TARGET_CPU != IEMTARGETCPU_DYNAMIC
2514# define IEM_GET_TARGET_CPU(a_pVCpu) (IEM_CFG_TARGET_CPU)
2515#else
2516# define IEM_GET_TARGET_CPU(a_pVCpu) ((a_pVCpu)->iem.s.uTargetCpu)
2517#endif
2518
2519/** @def IEM_GET_INSTR_LEN
2520 * Gets the instruction length. */
2521#ifdef IEM_WITH_CODE_TLB
2522# define IEM_GET_INSTR_LEN(a_pVCpu) ((a_pVCpu)->iem.s.offInstrNextByte - (uint32_t)(int32_t)(a_pVCpu)->iem.s.offCurInstrStart)
2523#else
2524# define IEM_GET_INSTR_LEN(a_pVCpu) ((a_pVCpu)->iem.s.offOpcode)
2525#endif
2526
2527/** @def IEM_TRY_SETJMP
2528 * Wrapper around setjmp / try, hiding all the ugly differences.
2529 *
2530 * @note Use with extreme care as this is a fragile macro.
2531 * @param a_pVCpu The cross context virtual CPU structure of the calling EMT.
2532 * @param a_rcTarget The variable that should receive the status code in case
2533 * of a longjmp/throw.
2534 */
2535/** @def IEM_TRY_SETJMP_AGAIN
2536 * For when setjmp / try is used again in the same variable scope as a previous
2537 * IEM_TRY_SETJMP invocation.
2538 */
2539/** @def IEM_CATCH_LONGJMP_BEGIN
2540 * Start wrapper for catch / setjmp-else.
2541 *
2542 * This will set up a scope.
2543 *
2544 * @note Use with extreme care as this is a fragile macro.
2545 * @param a_pVCpu The cross context virtual CPU structure of the calling EMT.
2546 * @param a_rcTarget The variable that should receive the status code in case
2547 * of a longjmp/throw.
2548 */
2549/** @def IEM_CATCH_LONGJMP_END
2550 * End wrapper for catch / setjmp-else.
2551 *
2552 * This will close the scope set up by IEM_CATCH_LONGJMP_BEGIN and clean up the
2553 * state.
2554 *
2555 * @note Use with extreme care as this is a fragile macro.
2556 * @param a_pVCpu The cross context virtual CPU structure of the calling EMT.
2557 */
2558#if defined(IEM_WITH_SETJMP) || defined(DOXYGEN_RUNNING)
2559# ifdef IEM_WITH_THROW_CATCH
2560# define IEM_TRY_SETJMP(a_pVCpu, a_rcTarget) \
2561 a_rcTarget = VINF_SUCCESS; \
2562 try
2563# define IEM_TRY_SETJMP_AGAIN(a_pVCpu, a_rcTarget) \
2564 IEM_TRY_SETJMP(a_pVCpu, a_rcTarget)
2565# define IEM_CATCH_LONGJMP_BEGIN(a_pVCpu, a_rcTarget) \
2566 catch (int rcThrown) \
2567 { \
2568 a_rcTarget = rcThrown
2569# define IEM_CATCH_LONGJMP_END(a_pVCpu) \
2570 } \
2571 ((void)0)
2572# else /* !IEM_WITH_THROW_CATCH */
2573# define IEM_TRY_SETJMP(a_pVCpu, a_rcTarget) \
2574 jmp_buf JmpBuf; \
2575 jmp_buf * volatile pSavedJmpBuf = (a_pVCpu)->iem.s.CTX_SUFF(pJmpBuf); \
2576 (a_pVCpu)->iem.s.CTX_SUFF(pJmpBuf) = &JmpBuf; \
2577 if ((rcStrict = setjmp(JmpBuf)) == 0)
2578# define IEM_TRY_SETJMP_AGAIN(a_pVCpu, a_rcTarget) \
2579 pSavedJmpBuf = (a_pVCpu)->iem.s.CTX_SUFF(pJmpBuf); \
2580 (a_pVCpu)->iem.s.CTX_SUFF(pJmpBuf) = &JmpBuf; \
2581 if ((rcStrict = setjmp(JmpBuf)) == 0)
2582# define IEM_CATCH_LONGJMP_BEGIN(a_pVCpu, a_rcTarget) \
2583 else \
2584 { \
2585 ((void)0)
2586# define IEM_CATCH_LONGJMP_END(a_pVCpu) \
2587 } \
2588 (a_pVCpu)->iem.s.CTX_SUFF(pJmpBuf) = pSavedJmpBuf
2589# endif /* !IEM_WITH_THROW_CATCH */
2590#endif /* IEM_WITH_SETJMP */
2591
2592
2593/**
2594 * Shared per-VM IEM data.
2595 */
2596typedef struct IEM
2597{
2598 /** The VMX APIC-access page handler type. */
2599 PGMPHYSHANDLERTYPE hVmxApicAccessPage;
2600#ifndef VBOX_WITHOUT_CPUID_HOST_CALL
2601 /** Set if the CPUID host call functionality is enabled. */
2602 bool fCpuIdHostCall;
2603#endif
2604} IEM;
2605
2606
2607
2608/** @name IEM_ACCESS_XXX - Access details.
2609 * @{ */
2610#define IEM_ACCESS_INVALID UINT32_C(0x000000ff)
2611#define IEM_ACCESS_TYPE_READ UINT32_C(0x00000001)
2612#define IEM_ACCESS_TYPE_WRITE UINT32_C(0x00000002)
2613#define IEM_ACCESS_TYPE_EXEC UINT32_C(0x00000004)
2614#define IEM_ACCESS_TYPE_MASK UINT32_C(0x00000007)
2615#define IEM_ACCESS_WHAT_CODE UINT32_C(0x00000010)
2616#define IEM_ACCESS_WHAT_DATA UINT32_C(0x00000020)
2617#define IEM_ACCESS_WHAT_STACK UINT32_C(0x00000030)
2618#define IEM_ACCESS_WHAT_SYS UINT32_C(0x00000040)
2619#define IEM_ACCESS_WHAT_MASK UINT32_C(0x00000070)
2620/** The writes are partial, so if initialize the bounce buffer with the
2621 * orignal RAM content. */
2622#define IEM_ACCESS_PARTIAL_WRITE UINT32_C(0x00000100)
2623/** Used in aMemMappings to indicate that the entry is bounce buffered. */
2624#define IEM_ACCESS_BOUNCE_BUFFERED UINT32_C(0x00000200)
2625/** Bounce buffer with ring-3 write pending, first page. */
2626#define IEM_ACCESS_PENDING_R3_WRITE_1ST UINT32_C(0x00000400)
2627/** Bounce buffer with ring-3 write pending, second page. */
2628#define IEM_ACCESS_PENDING_R3_WRITE_2ND UINT32_C(0x00000800)
2629/** Not locked, accessed via the TLB. */
2630#define IEM_ACCESS_NOT_LOCKED UINT32_C(0x00001000)
2631/** Atomic access.
2632 * This enables special alignment checks and the VINF_EM_EMULATE_SPLIT_LOCK
2633 * fallback for misaligned stuff. See @bugref{10547}. */
2634#define IEM_ACCESS_ATOMIC UINT32_C(0x00002000)
2635/** Valid bit mask. */
2636#define IEM_ACCESS_VALID_MASK UINT32_C(0x00003fff)
2637/** Shift count for the TLB flags (upper word). */
2638#define IEM_ACCESS_SHIFT_TLB_FLAGS 16
2639
2640/** Atomic read+write data alias. */
2641#define IEM_ACCESS_DATA_ATOMIC (IEM_ACCESS_TYPE_READ | IEM_ACCESS_TYPE_WRITE | IEM_ACCESS_WHAT_DATA | IEM_ACCESS_ATOMIC)
2642/** Read+write data alias. */
2643#define IEM_ACCESS_DATA_RW (IEM_ACCESS_TYPE_READ | IEM_ACCESS_TYPE_WRITE | IEM_ACCESS_WHAT_DATA)
2644/** Write data alias. */
2645#define IEM_ACCESS_DATA_W (IEM_ACCESS_TYPE_WRITE | IEM_ACCESS_WHAT_DATA)
2646/** Read data alias. */
2647#define IEM_ACCESS_DATA_R (IEM_ACCESS_TYPE_READ | IEM_ACCESS_WHAT_DATA)
2648/** Instruction fetch alias. */
2649#define IEM_ACCESS_INSTRUCTION (IEM_ACCESS_TYPE_EXEC | IEM_ACCESS_WHAT_CODE)
2650/** Stack write alias. */
2651#define IEM_ACCESS_STACK_W (IEM_ACCESS_TYPE_WRITE | IEM_ACCESS_WHAT_STACK)
2652/** Stack read alias. */
2653#define IEM_ACCESS_STACK_R (IEM_ACCESS_TYPE_READ | IEM_ACCESS_WHAT_STACK)
2654/** Stack read+write alias. */
2655#define IEM_ACCESS_STACK_RW (IEM_ACCESS_TYPE_READ | IEM_ACCESS_TYPE_WRITE | IEM_ACCESS_WHAT_STACK)
2656/** Read system table alias. */
2657#define IEM_ACCESS_SYS_R (IEM_ACCESS_TYPE_READ | IEM_ACCESS_WHAT_SYS)
2658/** Read+write system table alias. */
2659#define IEM_ACCESS_SYS_RW (IEM_ACCESS_TYPE_READ | IEM_ACCESS_TYPE_WRITE | IEM_ACCESS_WHAT_SYS)
2660/** @} */
2661
2662/** @name Prefix constants (IEMCPU::fPrefixes)
2663 * @{ */
2664#define IEM_OP_PRF_SEG_CS RT_BIT_32(0) /**< CS segment prefix (0x2e). */
2665#define IEM_OP_PRF_SEG_SS RT_BIT_32(1) /**< SS segment prefix (0x36). */
2666#define IEM_OP_PRF_SEG_DS RT_BIT_32(2) /**< DS segment prefix (0x3e). */
2667#define IEM_OP_PRF_SEG_ES RT_BIT_32(3) /**< ES segment prefix (0x26). */
2668#define IEM_OP_PRF_SEG_FS RT_BIT_32(4) /**< FS segment prefix (0x64). */
2669#define IEM_OP_PRF_SEG_GS RT_BIT_32(5) /**< GS segment prefix (0x65). */
2670#define IEM_OP_PRF_SEG_MASK UINT32_C(0x3f)
2671
2672#define IEM_OP_PRF_SIZE_OP RT_BIT_32(8) /**< Operand size prefix (0x66). */
2673#define IEM_OP_PRF_SIZE_REX_W RT_BIT_32(9) /**< REX.W prefix (0x48-0x4f). */
2674#define IEM_OP_PRF_SIZE_ADDR RT_BIT_32(10) /**< Address size prefix (0x67). */
2675
2676#define IEM_OP_PRF_LOCK RT_BIT_32(16) /**< Lock prefix (0xf0). */
2677#define IEM_OP_PRF_REPNZ RT_BIT_32(17) /**< Repeat-not-zero prefix (0xf2). */
2678#define IEM_OP_PRF_REPZ RT_BIT_32(18) /**< Repeat-if-zero prefix (0xf3). */
2679
2680#define IEM_OP_PRF_REX RT_BIT_32(24) /**< Any REX prefix (0x40-0x4f). */
2681#define IEM_OP_PRF_REX_B RT_BIT_32(25) /**< REX.B prefix (0x41,0x43,0x45,0x47,0x49,0x4b,0x4d,0x4f). */
2682#define IEM_OP_PRF_REX_X RT_BIT_32(26) /**< REX.X prefix (0x42,0x43,0x46,0x47,0x4a,0x4b,0x4e,0x4f). */
2683#define IEM_OP_PRF_REX_R RT_BIT_32(27) /**< REX.R prefix (0x44,0x45,0x46,0x47,0x4c,0x4d,0x4e,0x4f). */
2684/** Mask with all the REX prefix flags.
2685 * This is generally for use when needing to undo the REX prefixes when they
2686 * are followed legacy prefixes and therefore does not immediately preceed
2687 * the first opcode byte.
2688 * For testing whether any REX prefix is present, use IEM_OP_PRF_REX instead. */
2689#define IEM_OP_PRF_REX_MASK (IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W )
2690
2691#define IEM_OP_PRF_VEX RT_BIT_32(28) /**< Indiciates VEX prefix. */
2692#define IEM_OP_PRF_EVEX RT_BIT_32(29) /**< Indiciates EVEX prefix. */
2693#define IEM_OP_PRF_XOP RT_BIT_32(30) /**< Indiciates XOP prefix. */
2694/** @} */
2695
2696/** @name IEMOPFORM_XXX - Opcode forms
2697 * @note These are ORed together with IEMOPHINT_XXX.
2698 * @{ */
2699/** ModR/M: reg, r/m */
2700#define IEMOPFORM_RM 0
2701/** ModR/M: reg, r/m (register) */
2702#define IEMOPFORM_RM_REG (IEMOPFORM_RM | IEMOPFORM_MOD3)
2703/** ModR/M: reg, r/m (memory) */
2704#define IEMOPFORM_RM_MEM (IEMOPFORM_RM | IEMOPFORM_NOT_MOD3)
2705/** ModR/M: reg, r/m, imm */
2706#define IEMOPFORM_RMI 1
2707/** ModR/M: reg, r/m (register), imm */
2708#define IEMOPFORM_RMI_REG (IEMOPFORM_RMI | IEMOPFORM_MOD3)
2709/** ModR/M: reg, r/m (memory), imm */
2710#define IEMOPFORM_RMI_MEM (IEMOPFORM_RMI | IEMOPFORM_NOT_MOD3)
2711/** ModR/M: reg, r/m, xmm0 */
2712#define IEMOPFORM_RM0 2
2713/** ModR/M: reg, r/m (register), xmm0 */
2714#define IEMOPFORM_RM0_REG (IEMOPFORM_RM0 | IEMOPFORM_MOD3)
2715/** ModR/M: reg, r/m (memory), xmm0 */
2716#define IEMOPFORM_RM0_MEM (IEMOPFORM_RM0 | IEMOPFORM_NOT_MOD3)
2717/** ModR/M: r/m, reg */
2718#define IEMOPFORM_MR 3
2719/** ModR/M: r/m (register), reg */
2720#define IEMOPFORM_MR_REG (IEMOPFORM_MR | IEMOPFORM_MOD3)
2721/** ModR/M: r/m (memory), reg */
2722#define IEMOPFORM_MR_MEM (IEMOPFORM_MR | IEMOPFORM_NOT_MOD3)
2723/** ModR/M: r/m, reg, imm */
2724#define IEMOPFORM_MRI 4
2725/** ModR/M: r/m (register), reg, imm */
2726#define IEMOPFORM_MRI_REG (IEMOPFORM_MRI | IEMOPFORM_MOD3)
2727/** ModR/M: r/m (memory), reg, imm */
2728#define IEMOPFORM_MRI_MEM (IEMOPFORM_MRI | IEMOPFORM_NOT_MOD3)
2729/** ModR/M: r/m only */
2730#define IEMOPFORM_M 5
2731/** ModR/M: r/m only (register). */
2732#define IEMOPFORM_M_REG (IEMOPFORM_M | IEMOPFORM_MOD3)
2733/** ModR/M: r/m only (memory). */
2734#define IEMOPFORM_M_MEM (IEMOPFORM_M | IEMOPFORM_NOT_MOD3)
2735/** ModR/M: r/m, imm */
2736#define IEMOPFORM_MI 6
2737/** ModR/M: r/m (register), imm */
2738#define IEMOPFORM_MI_REG (IEMOPFORM_MI | IEMOPFORM_MOD3)
2739/** ModR/M: r/m (memory), imm */
2740#define IEMOPFORM_MI_MEM (IEMOPFORM_MI | IEMOPFORM_NOT_MOD3)
2741/** ModR/M: r/m, 1 (shift and rotate instructions) */
2742#define IEMOPFORM_M1 7
2743/** ModR/M: r/m (register), 1. */
2744#define IEMOPFORM_M1_REG (IEMOPFORM_M1 | IEMOPFORM_MOD3)
2745/** ModR/M: r/m (memory), 1. */
2746#define IEMOPFORM_M1_MEM (IEMOPFORM_M1 | IEMOPFORM_NOT_MOD3)
2747/** ModR/M: r/m, CL (shift and rotate instructions)
2748 * @todo This should just've been a generic fixed register. But the python
2749 * code doesn't needs more convincing. */
2750#define IEMOPFORM_M_CL 8
2751/** ModR/M: r/m (register), CL. */
2752#define IEMOPFORM_M_CL_REG (IEMOPFORM_M_CL | IEMOPFORM_MOD3)
2753/** ModR/M: r/m (memory), CL. */
2754#define IEMOPFORM_M_CL_MEM (IEMOPFORM_M_CL | IEMOPFORM_NOT_MOD3)
2755/** ModR/M: reg only */
2756#define IEMOPFORM_R 9
2757
2758/** VEX+ModR/M: reg, r/m */
2759#define IEMOPFORM_VEX_RM 16
2760/** VEX+ModR/M: reg, r/m (register) */
2761#define IEMOPFORM_VEX_RM_REG (IEMOPFORM_VEX_RM | IEMOPFORM_MOD3)
2762/** VEX+ModR/M: reg, r/m (memory) */
2763#define IEMOPFORM_VEX_RM_MEM (IEMOPFORM_VEX_RM | IEMOPFORM_NOT_MOD3)
2764/** VEX+ModR/M: r/m, reg */
2765#define IEMOPFORM_VEX_MR 17
2766/** VEX+ModR/M: r/m (register), reg */
2767#define IEMOPFORM_VEX_MR_REG (IEMOPFORM_VEX_MR | IEMOPFORM_MOD3)
2768/** VEX+ModR/M: r/m (memory), reg */
2769#define IEMOPFORM_VEX_MR_MEM (IEMOPFORM_VEX_MR | IEMOPFORM_NOT_MOD3)
2770/** VEX+ModR/M: r/m, reg, imm8 */
2771#define IEMOPFORM_VEX_MRI 18
2772/** VEX+ModR/M: r/m (register), reg, imm8 */
2773#define IEMOPFORM_VEX_MRI_REG (IEMOPFORM_VEX_MRI | IEMOPFORM_MOD3)
2774/** VEX+ModR/M: r/m (memory), reg, imm8 */
2775#define IEMOPFORM_VEX_MRI_MEM (IEMOPFORM_VEX_MRI | IEMOPFORM_NOT_MOD3)
2776/** VEX+ModR/M: r/m only */
2777#define IEMOPFORM_VEX_M 19
2778/** VEX+ModR/M: r/m only (register). */
2779#define IEMOPFORM_VEX_M_REG (IEMOPFORM_VEX_M | IEMOPFORM_MOD3)
2780/** VEX+ModR/M: r/m only (memory). */
2781#define IEMOPFORM_VEX_M_MEM (IEMOPFORM_VEX_M | IEMOPFORM_NOT_MOD3)
2782/** VEX+ModR/M: reg only */
2783#define IEMOPFORM_VEX_R 20
2784/** VEX+ModR/M: reg, vvvv, r/m */
2785#define IEMOPFORM_VEX_RVM 21
2786/** VEX+ModR/M: reg, vvvv, r/m (register). */
2787#define IEMOPFORM_VEX_RVM_REG (IEMOPFORM_VEX_RVM | IEMOPFORM_MOD3)
2788/** VEX+ModR/M: reg, vvvv, r/m (memory). */
2789#define IEMOPFORM_VEX_RVM_MEM (IEMOPFORM_VEX_RVM | IEMOPFORM_NOT_MOD3)
2790/** VEX+ModR/M: reg, vvvv, r/m, imm */
2791#define IEMOPFORM_VEX_RVMI 22
2792/** VEX+ModR/M: reg, vvvv, r/m (register), imm. */
2793#define IEMOPFORM_VEX_RVMI_REG (IEMOPFORM_VEX_RVMI | IEMOPFORM_MOD3)
2794/** VEX+ModR/M: reg, vvvv, r/m (memory), imm. */
2795#define IEMOPFORM_VEX_RVMI_MEM (IEMOPFORM_VEX_RVMI | IEMOPFORM_NOT_MOD3)
2796/** VEX+ModR/M: reg, vvvv, r/m, imm(reg) */
2797#define IEMOPFORM_VEX_RVMR 23
2798/** VEX+ModR/M: reg, vvvv, r/m (register), imm(reg). */
2799#define IEMOPFORM_VEX_RVMR_REG (IEMOPFORM_VEX_RVMI | IEMOPFORM_MOD3)
2800/** VEX+ModR/M: reg, vvvv, r/m (memory), imm(reg). */
2801#define IEMOPFORM_VEX_RVMR_MEM (IEMOPFORM_VEX_RVMI | IEMOPFORM_NOT_MOD3)
2802/** VEX+ModR/M: reg, r/m, vvvv */
2803#define IEMOPFORM_VEX_RMV 24
2804/** VEX+ModR/M: reg, r/m, vvvv (register). */
2805#define IEMOPFORM_VEX_RMV_REG (IEMOPFORM_VEX_RMV | IEMOPFORM_MOD3)
2806/** VEX+ModR/M: reg, r/m, vvvv (memory). */
2807#define IEMOPFORM_VEX_RMV_MEM (IEMOPFORM_VEX_RMV | IEMOPFORM_NOT_MOD3)
2808/** VEX+ModR/M: reg, r/m, imm8 */
2809#define IEMOPFORM_VEX_RMI 25
2810/** VEX+ModR/M: reg, r/m, imm8 (register). */
2811#define IEMOPFORM_VEX_RMI_REG (IEMOPFORM_VEX_RMI | IEMOPFORM_MOD3)
2812/** VEX+ModR/M: reg, r/m, imm8 (memory). */
2813#define IEMOPFORM_VEX_RMI_MEM (IEMOPFORM_VEX_RMI | IEMOPFORM_NOT_MOD3)
2814/** VEX+ModR/M: r/m, vvvv, reg */
2815#define IEMOPFORM_VEX_MVR 26
2816/** VEX+ModR/M: r/m, vvvv, reg (register) */
2817#define IEMOPFORM_VEX_MVR_REG (IEMOPFORM_VEX_MVR | IEMOPFORM_MOD3)
2818/** VEX+ModR/M: r/m, vvvv, reg (memory) */
2819#define IEMOPFORM_VEX_MVR_MEM (IEMOPFORM_VEX_MVR | IEMOPFORM_NOT_MOD3)
2820/** VEX+ModR/M+/n: vvvv, r/m */
2821#define IEMOPFORM_VEX_VM 27
2822/** VEX+ModR/M+/n: vvvv, r/m (register) */
2823#define IEMOPFORM_VEX_VM_REG (IEMOPFORM_VEX_VM | IEMOPFORM_MOD3)
2824/** VEX+ModR/M+/n: vvvv, r/m (memory) */
2825#define IEMOPFORM_VEX_VM_MEM (IEMOPFORM_VEX_VM | IEMOPFORM_NOT_MOD3)
2826/** VEX+ModR/M+/n: vvvv, r/m, imm8 */
2827#define IEMOPFORM_VEX_VMI 28
2828/** VEX+ModR/M+/n: vvvv, r/m, imm8 (register) */
2829#define IEMOPFORM_VEX_VMI_REG (IEMOPFORM_VEX_VMI | IEMOPFORM_MOD3)
2830/** VEX+ModR/M+/n: vvvv, r/m, imm8 (memory) */
2831#define IEMOPFORM_VEX_VMI_MEM (IEMOPFORM_VEX_VMI | IEMOPFORM_NOT_MOD3)
2832
2833/** Fixed register instruction, no R/M. */
2834#define IEMOPFORM_FIXED 32
2835
2836/** The r/m is a register. */
2837#define IEMOPFORM_MOD3 RT_BIT_32(8)
2838/** The r/m is a memory access. */
2839#define IEMOPFORM_NOT_MOD3 RT_BIT_32(9)
2840/** @} */
2841
2842/** @name IEMOPHINT_XXX - Additional Opcode Hints
2843 * @note These are ORed together with IEMOPFORM_XXX.
2844 * @{ */
2845/** Ignores the operand size prefix (66h). */
2846#define IEMOPHINT_IGNORES_OZ_PFX RT_BIT_32(10)
2847/** Ignores REX.W (aka WIG). */
2848#define IEMOPHINT_IGNORES_REXW RT_BIT_32(11)
2849/** Both the operand size prefixes (66h + REX.W) are ignored. */
2850#define IEMOPHINT_IGNORES_OP_SIZES (IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW)
2851/** Allowed with the lock prefix. */
2852#define IEMOPHINT_LOCK_ALLOWED RT_BIT_32(11)
2853/** The VEX.L value is ignored (aka LIG). */
2854#define IEMOPHINT_VEX_L_IGNORED RT_BIT_32(12)
2855/** The VEX.L value must be zero (i.e. 128-bit width only). */
2856#define IEMOPHINT_VEX_L_ZERO RT_BIT_32(13)
2857/** The VEX.L value must be one (i.e. 256-bit width only). */
2858#define IEMOPHINT_VEX_L_ONE RT_BIT_32(14)
2859/** The VEX.V value must be zero. */
2860#define IEMOPHINT_VEX_V_ZERO RT_BIT_32(15)
2861/** The REX.W/VEX.V value must be zero. */
2862#define IEMOPHINT_REX_W_ZERO RT_BIT_32(16)
2863#define IEMOPHINT_VEX_W_ZERO IEMOPHINT_REX_W_ZERO
2864/** The REX.W/VEX.V value must be one. */
2865#define IEMOPHINT_REX_W_ONE RT_BIT_32(17)
2866#define IEMOPHINT_VEX_W_ONE IEMOPHINT_REX_W_ONE
2867
2868/** Hint to IEMAllInstructionPython.py that this macro should be skipped. */
2869#define IEMOPHINT_SKIP_PYTHON RT_BIT_32(31)
2870/** @} */
2871
2872/**
2873 * Possible hardware task switch sources.
2874 */
2875typedef enum IEMTASKSWITCH
2876{
2877 /** Task switch caused by an interrupt/exception. */
2878 IEMTASKSWITCH_INT_XCPT = 1,
2879 /** Task switch caused by a far CALL. */
2880 IEMTASKSWITCH_CALL,
2881 /** Task switch caused by a far JMP. */
2882 IEMTASKSWITCH_JUMP,
2883 /** Task switch caused by an IRET. */
2884 IEMTASKSWITCH_IRET
2885} IEMTASKSWITCH;
2886AssertCompileSize(IEMTASKSWITCH, 4);
2887
2888/**
2889 * Possible CrX load (write) sources.
2890 */
2891typedef enum IEMACCESSCRX
2892{
2893 /** CrX access caused by 'mov crX' instruction. */
2894 IEMACCESSCRX_MOV_CRX,
2895 /** CrX (CR0) write caused by 'lmsw' instruction. */
2896 IEMACCESSCRX_LMSW,
2897 /** CrX (CR0) write caused by 'clts' instruction. */
2898 IEMACCESSCRX_CLTS,
2899 /** CrX (CR0) read caused by 'smsw' instruction. */
2900 IEMACCESSCRX_SMSW
2901} IEMACCESSCRX;
2902
2903#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
2904/** @name IEM_SLAT_FAIL_XXX - Second-level address translation failure information.
2905 *
2906 * These flags provide further context to SLAT page-walk failures that could not be
2907 * determined by PGM (e.g, PGM is not privy to memory access permissions).
2908 *
2909 * @{
2910 */
2911/** Translating a nested-guest linear address failed accessing a nested-guest
2912 * physical address. */
2913# define IEM_SLAT_FAIL_LINEAR_TO_PHYS_ADDR RT_BIT_32(0)
2914/** Translating a nested-guest linear address failed accessing a
2915 * paging-structure entry or updating accessed/dirty bits. */
2916# define IEM_SLAT_FAIL_LINEAR_TO_PAGE_TABLE RT_BIT_32(1)
2917/** @} */
2918
2919DECLCALLBACK(FNPGMPHYSHANDLER) iemVmxApicAccessPageHandler;
2920# ifndef IN_RING3
2921DECLCALLBACK(FNPGMRZPHYSPFHANDLER) iemVmxApicAccessPagePfHandler;
2922# endif
2923#endif
2924
2925/**
2926 * Indicates to the verifier that the given flag set is undefined.
2927 *
2928 * Can be invoked again to add more flags.
2929 *
2930 * This is a NOOP if the verifier isn't compiled in.
2931 *
2932 * @note We're temporarily keeping this until code is converted to new
2933 * disassembler style opcode handling.
2934 */
2935#define IEMOP_VERIFICATION_UNDEFINED_EFLAGS(a_fEfl) do { } while (0)
2936
2937
2938/** @def IEM_DECL_IMPL_TYPE
2939 * For typedef'ing an instruction implementation function.
2940 *
2941 * @param a_RetType The return type.
2942 * @param a_Name The name of the type.
2943 * @param a_ArgList The argument list enclosed in parentheses.
2944 */
2945
2946/** @def IEM_DECL_IMPL_DEF
2947 * For defining an instruction implementation function.
2948 *
2949 * @param a_RetType The return type.
2950 * @param a_Name The name of the type.
2951 * @param a_ArgList The argument list enclosed in parentheses.
2952 */
2953
2954#if defined(__GNUC__) && defined(RT_ARCH_X86)
2955# define IEM_DECL_IMPL_TYPE(a_RetType, a_Name, a_ArgList) \
2956 __attribute__((__fastcall__)) a_RetType (a_Name) a_ArgList
2957# define IEM_DECL_IMPL_DEF(a_RetType, a_Name, a_ArgList) \
2958 __attribute__((__fastcall__, __nothrow__)) DECL_HIDDEN_ONLY(a_RetType) a_Name a_ArgList
2959# define IEM_DECL_IMPL_PROTO(a_RetType, a_Name, a_ArgList) \
2960 __attribute__((__fastcall__, __nothrow__)) DECL_HIDDEN_ONLY(a_RetType) a_Name a_ArgList
2961
2962#elif defined(_MSC_VER) && defined(RT_ARCH_X86)
2963# define IEM_DECL_IMPL_TYPE(a_RetType, a_Name, a_ArgList) \
2964 a_RetType (__fastcall a_Name) a_ArgList
2965# define IEM_DECL_IMPL_DEF(a_RetType, a_Name, a_ArgList) \
2966 a_RetType __fastcall a_Name a_ArgList RT_NOEXCEPT
2967# define IEM_DECL_IMPL_PROTO(a_RetType, a_Name, a_ArgList) \
2968 a_RetType __fastcall a_Name a_ArgList RT_NOEXCEPT
2969
2970#elif __cplusplus >= 201700 /* P0012R1 support */
2971# define IEM_DECL_IMPL_TYPE(a_RetType, a_Name, a_ArgList) \
2972 a_RetType (VBOXCALL a_Name) a_ArgList RT_NOEXCEPT
2973# define IEM_DECL_IMPL_DEF(a_RetType, a_Name, a_ArgList) \
2974 DECL_HIDDEN_ONLY(a_RetType) VBOXCALL a_Name a_ArgList RT_NOEXCEPT
2975# define IEM_DECL_IMPL_PROTO(a_RetType, a_Name, a_ArgList) \
2976 DECL_HIDDEN_ONLY(a_RetType) VBOXCALL a_Name a_ArgList RT_NOEXCEPT
2977
2978#else
2979# define IEM_DECL_IMPL_TYPE(a_RetType, a_Name, a_ArgList) \
2980 a_RetType (VBOXCALL a_Name) a_ArgList
2981# define IEM_DECL_IMPL_DEF(a_RetType, a_Name, a_ArgList) \
2982 DECL_HIDDEN_ONLY(a_RetType) VBOXCALL a_Name a_ArgList
2983# define IEM_DECL_IMPL_PROTO(a_RetType, a_Name, a_ArgList) \
2984 DECL_HIDDEN_ONLY(a_RetType) VBOXCALL a_Name a_ArgList
2985
2986#endif
2987
2988/** Defined in IEMAllAImplC.cpp but also used by IEMAllAImplA.asm. */
2989RT_C_DECLS_BEGIN
2990extern uint8_t const g_afParity[256];
2991RT_C_DECLS_END
2992
2993
2994/** @name Arithmetic assignment operations on bytes (binary).
2995 * @{ */
2996typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLBINU8, (uint32_t fEFlagsIn, uint8_t *pu8Dst, uint8_t u8Src));
2997typedef FNIEMAIMPLBINU8 *PFNIEMAIMPLBINU8;
2998FNIEMAIMPLBINU8 iemAImpl_add_u8, iemAImpl_add_u8_locked;
2999FNIEMAIMPLBINU8 iemAImpl_adc_u8, iemAImpl_adc_u8_locked;
3000FNIEMAIMPLBINU8 iemAImpl_sub_u8, iemAImpl_sub_u8_locked;
3001FNIEMAIMPLBINU8 iemAImpl_sbb_u8, iemAImpl_sbb_u8_locked;
3002FNIEMAIMPLBINU8 iemAImpl_or_u8, iemAImpl_or_u8_locked;
3003FNIEMAIMPLBINU8 iemAImpl_xor_u8, iemAImpl_xor_u8_locked;
3004FNIEMAIMPLBINU8 iemAImpl_and_u8, iemAImpl_and_u8_locked;
3005/** @} */
3006
3007/** @name Arithmetic assignment operations on words (binary).
3008 * @{ */
3009typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLBINU16, (uint32_t fEFlagsIn, uint16_t *pu16Dst, uint16_t u16Src));
3010typedef FNIEMAIMPLBINU16 *PFNIEMAIMPLBINU16;
3011FNIEMAIMPLBINU16 iemAImpl_add_u16, iemAImpl_add_u16_locked;
3012FNIEMAIMPLBINU16 iemAImpl_adc_u16, iemAImpl_adc_u16_locked;
3013FNIEMAIMPLBINU16 iemAImpl_sub_u16, iemAImpl_sub_u16_locked;
3014FNIEMAIMPLBINU16 iemAImpl_sbb_u16, iemAImpl_sbb_u16_locked;
3015FNIEMAIMPLBINU16 iemAImpl_or_u16, iemAImpl_or_u16_locked;
3016FNIEMAIMPLBINU16 iemAImpl_xor_u16, iemAImpl_xor_u16_locked;
3017FNIEMAIMPLBINU16 iemAImpl_and_u16, iemAImpl_and_u16_locked;
3018/** @} */
3019
3020
3021/** @name Arithmetic assignment operations on double words (binary).
3022 * @{ */
3023typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLBINU32, (uint32_t fEFlagsIn, uint32_t *pu32Dst, uint32_t u32Src));
3024typedef FNIEMAIMPLBINU32 *PFNIEMAIMPLBINU32;
3025FNIEMAIMPLBINU32 iemAImpl_add_u32, iemAImpl_add_u32_locked;
3026FNIEMAIMPLBINU32 iemAImpl_adc_u32, iemAImpl_adc_u32_locked;
3027FNIEMAIMPLBINU32 iemAImpl_sub_u32, iemAImpl_sub_u32_locked;
3028FNIEMAIMPLBINU32 iemAImpl_sbb_u32, iemAImpl_sbb_u32_locked;
3029FNIEMAIMPLBINU32 iemAImpl_or_u32, iemAImpl_or_u32_locked;
3030FNIEMAIMPLBINU32 iemAImpl_xor_u32, iemAImpl_xor_u32_locked;
3031FNIEMAIMPLBINU32 iemAImpl_and_u32, iemAImpl_and_u32_locked;
3032FNIEMAIMPLBINU32 iemAImpl_blsi_u32, iemAImpl_blsi_u32_fallback;
3033FNIEMAIMPLBINU32 iemAImpl_blsr_u32, iemAImpl_blsr_u32_fallback;
3034FNIEMAIMPLBINU32 iemAImpl_blsmsk_u32, iemAImpl_blsmsk_u32_fallback;
3035/** @} */
3036
3037/** @name Arithmetic assignment operations on quad words (binary).
3038 * @{ */
3039typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLBINU64, (uint32_t fEFlagsIn, uint64_t *pu64Dst, uint64_t u64Src));
3040typedef FNIEMAIMPLBINU64 *PFNIEMAIMPLBINU64;
3041FNIEMAIMPLBINU64 iemAImpl_add_u64, iemAImpl_add_u64_locked;
3042FNIEMAIMPLBINU64 iemAImpl_adc_u64, iemAImpl_adc_u64_locked;
3043FNIEMAIMPLBINU64 iemAImpl_sub_u64, iemAImpl_sub_u64_locked;
3044FNIEMAIMPLBINU64 iemAImpl_sbb_u64, iemAImpl_sbb_u64_locked;
3045FNIEMAIMPLBINU64 iemAImpl_or_u64, iemAImpl_or_u64_locked;
3046FNIEMAIMPLBINU64 iemAImpl_xor_u64, iemAImpl_xor_u64_locked;
3047FNIEMAIMPLBINU64 iemAImpl_and_u64, iemAImpl_and_u64_locked;
3048FNIEMAIMPLBINU64 iemAImpl_blsi_u64, iemAImpl_blsi_u64_fallback;
3049FNIEMAIMPLBINU64 iemAImpl_blsr_u64, iemAImpl_blsr_u64_fallback;
3050FNIEMAIMPLBINU64 iemAImpl_blsmsk_u64, iemAImpl_blsmsk_u64_fallback;
3051/** @} */
3052
3053typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLBINROU8, (uint32_t fEFlagsIn, uint8_t const *pu8Dst, uint8_t u8Src));
3054typedef FNIEMAIMPLBINROU8 *PFNIEMAIMPLBINROU8;
3055typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLBINROU16,(uint32_t fEFlagsIn, uint16_t const *pu16Dst, uint16_t u16Src));
3056typedef FNIEMAIMPLBINROU16 *PFNIEMAIMPLBINROU16;
3057typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLBINROU32,(uint32_t fEFlagsIn, uint32_t const *pu32Dst, uint32_t u32Src));
3058typedef FNIEMAIMPLBINROU32 *PFNIEMAIMPLBINROU32;
3059typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLBINROU64,(uint32_t fEFlagsIn, uint64_t const *pu64Dst, uint64_t u64Src));
3060typedef FNIEMAIMPLBINROU64 *PFNIEMAIMPLBINROU64;
3061
3062/** @name Compare operations (thrown in with the binary ops).
3063 * @{ */
3064FNIEMAIMPLBINROU8 iemAImpl_cmp_u8;
3065FNIEMAIMPLBINROU16 iemAImpl_cmp_u16;
3066FNIEMAIMPLBINROU32 iemAImpl_cmp_u32;
3067FNIEMAIMPLBINROU64 iemAImpl_cmp_u64;
3068/** @} */
3069
3070/** @name Test operations (thrown in with the binary ops).
3071 * @{ */
3072FNIEMAIMPLBINROU8 iemAImpl_test_u8;
3073FNIEMAIMPLBINROU16 iemAImpl_test_u16;
3074FNIEMAIMPLBINROU32 iemAImpl_test_u32;
3075FNIEMAIMPLBINROU64 iemAImpl_test_u64;
3076/** @} */
3077
3078/** @name Bit operations operations (thrown in with the binary ops).
3079 * @{ */
3080FNIEMAIMPLBINROU16 iemAImpl_bt_u16;
3081FNIEMAIMPLBINROU32 iemAImpl_bt_u32;
3082FNIEMAIMPLBINROU64 iemAImpl_bt_u64;
3083FNIEMAIMPLBINU16 iemAImpl_btc_u16, iemAImpl_btc_u16_locked;
3084FNIEMAIMPLBINU32 iemAImpl_btc_u32, iemAImpl_btc_u32_locked;
3085FNIEMAIMPLBINU64 iemAImpl_btc_u64, iemAImpl_btc_u64_locked;
3086FNIEMAIMPLBINU16 iemAImpl_btr_u16, iemAImpl_btr_u16_locked;
3087FNIEMAIMPLBINU32 iemAImpl_btr_u32, iemAImpl_btr_u32_locked;
3088FNIEMAIMPLBINU64 iemAImpl_btr_u64, iemAImpl_btr_u64_locked;
3089FNIEMAIMPLBINU16 iemAImpl_bts_u16, iemAImpl_bts_u16_locked;
3090FNIEMAIMPLBINU32 iemAImpl_bts_u32, iemAImpl_bts_u32_locked;
3091FNIEMAIMPLBINU64 iemAImpl_bts_u64, iemAImpl_bts_u64_locked;
3092/** @} */
3093
3094/** @name Arithmetic three operand operations on double words (binary).
3095 * @{ */
3096typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLBINVEXU32, (uint32_t *pu32Dst, uint32_t u32Src1, uint32_t u32Src2, uint32_t *pEFlags));
3097typedef FNIEMAIMPLBINVEXU32 *PFNIEMAIMPLBINVEXU32;
3098FNIEMAIMPLBINVEXU32 iemAImpl_andn_u32, iemAImpl_andn_u32_fallback;
3099FNIEMAIMPLBINVEXU32 iemAImpl_bextr_u32, iemAImpl_bextr_u32_fallback;
3100FNIEMAIMPLBINVEXU32 iemAImpl_bzhi_u32, iemAImpl_bzhi_u32_fallback;
3101/** @} */
3102
3103/** @name Arithmetic three operand operations on quad words (binary).
3104 * @{ */
3105typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLBINVEXU64, (uint64_t *pu64Dst, uint64_t u64Src1, uint64_t u64Src2, uint32_t *pEFlags));
3106typedef FNIEMAIMPLBINVEXU64 *PFNIEMAIMPLBINVEXU64;
3107FNIEMAIMPLBINVEXU64 iemAImpl_andn_u64, iemAImpl_andn_u64_fallback;
3108FNIEMAIMPLBINVEXU64 iemAImpl_bextr_u64, iemAImpl_bextr_u64_fallback;
3109FNIEMAIMPLBINVEXU64 iemAImpl_bzhi_u64, iemAImpl_bzhi_u64_fallback;
3110/** @} */
3111
3112/** @name Arithmetic three operand operations on double words w/o EFLAGS (binary).
3113 * @{ */
3114typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLBINVEXU32NOEFL, (uint32_t *pu32Dst, uint32_t u32Src1, uint32_t u32Src2));
3115typedef FNIEMAIMPLBINVEXU32NOEFL *PFNIEMAIMPLBINVEXU32NOEFL;
3116FNIEMAIMPLBINVEXU32NOEFL iemAImpl_pdep_u32, iemAImpl_pdep_u32_fallback;
3117FNIEMAIMPLBINVEXU32NOEFL iemAImpl_pext_u32, iemAImpl_pext_u32_fallback;
3118FNIEMAIMPLBINVEXU32NOEFL iemAImpl_sarx_u32, iemAImpl_sarx_u32_fallback;
3119FNIEMAIMPLBINVEXU32NOEFL iemAImpl_shlx_u32, iemAImpl_shlx_u32_fallback;
3120FNIEMAIMPLBINVEXU32NOEFL iemAImpl_shrx_u32, iemAImpl_shrx_u32_fallback;
3121FNIEMAIMPLBINVEXU32NOEFL iemAImpl_rorx_u32;
3122/** @} */
3123
3124/** @name Arithmetic three operand operations on quad words w/o EFLAGS (binary).
3125 * @{ */
3126typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLBINVEXU64NOEFL, (uint64_t *pu64Dst, uint64_t u64Src1, uint64_t u64Src2));
3127typedef FNIEMAIMPLBINVEXU64NOEFL *PFNIEMAIMPLBINVEXU64NOEFL;
3128FNIEMAIMPLBINVEXU64NOEFL iemAImpl_pdep_u64, iemAImpl_pdep_u64_fallback;
3129FNIEMAIMPLBINVEXU64NOEFL iemAImpl_pext_u64, iemAImpl_pext_u64_fallback;
3130FNIEMAIMPLBINVEXU64NOEFL iemAImpl_sarx_u64, iemAImpl_sarx_u64_fallback;
3131FNIEMAIMPLBINVEXU64NOEFL iemAImpl_shlx_u64, iemAImpl_shlx_u64_fallback;
3132FNIEMAIMPLBINVEXU64NOEFL iemAImpl_shrx_u64, iemAImpl_shrx_u64_fallback;
3133FNIEMAIMPLBINVEXU64NOEFL iemAImpl_rorx_u64;
3134/** @} */
3135
3136/** @name MULX 32-bit and 64-bit.
3137 * @{ */
3138typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMULXVEXU32, (uint32_t *puDst1, uint32_t *puDst2, uint32_t uSrc1, uint32_t uSrc2));
3139typedef FNIEMAIMPLMULXVEXU32 *PFNIEMAIMPLMULXVEXU32;
3140FNIEMAIMPLMULXVEXU32 iemAImpl_mulx_u32, iemAImpl_mulx_u32_fallback;
3141
3142typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMULXVEXU64, (uint64_t *puDst1, uint64_t *puDst2, uint64_t uSrc1, uint64_t uSrc2));
3143typedef FNIEMAIMPLMULXVEXU64 *PFNIEMAIMPLMULXVEXU64;
3144FNIEMAIMPLMULXVEXU64 iemAImpl_mulx_u64, iemAImpl_mulx_u64_fallback;
3145/** @} */
3146
3147
3148/** @name Exchange memory with register operations.
3149 * @{ */
3150IEM_DECL_IMPL_DEF(void, iemAImpl_xchg_u8_locked, (uint8_t *pu8Mem, uint8_t *pu8Reg));
3151IEM_DECL_IMPL_DEF(void, iemAImpl_xchg_u16_locked,(uint16_t *pu16Mem, uint16_t *pu16Reg));
3152IEM_DECL_IMPL_DEF(void, iemAImpl_xchg_u32_locked,(uint32_t *pu32Mem, uint32_t *pu32Reg));
3153IEM_DECL_IMPL_DEF(void, iemAImpl_xchg_u64_locked,(uint64_t *pu64Mem, uint64_t *pu64Reg));
3154IEM_DECL_IMPL_DEF(void, iemAImpl_xchg_u8_unlocked, (uint8_t *pu8Mem, uint8_t *pu8Reg));
3155IEM_DECL_IMPL_DEF(void, iemAImpl_xchg_u16_unlocked,(uint16_t *pu16Mem, uint16_t *pu16Reg));
3156IEM_DECL_IMPL_DEF(void, iemAImpl_xchg_u32_unlocked,(uint32_t *pu32Mem, uint32_t *pu32Reg));
3157IEM_DECL_IMPL_DEF(void, iemAImpl_xchg_u64_unlocked,(uint64_t *pu64Mem, uint64_t *pu64Reg));
3158/** @} */
3159
3160/** @name Exchange and add operations.
3161 * @{ */
3162IEM_DECL_IMPL_DEF(void, iemAImpl_xadd_u8, (uint8_t *pu8Dst, uint8_t *pu8Reg, uint32_t *pEFlags));
3163IEM_DECL_IMPL_DEF(void, iemAImpl_xadd_u16,(uint16_t *pu16Dst, uint16_t *pu16Reg, uint32_t *pEFlags));
3164IEM_DECL_IMPL_DEF(void, iemAImpl_xadd_u32,(uint32_t *pu32Dst, uint32_t *pu32Reg, uint32_t *pEFlags));
3165IEM_DECL_IMPL_DEF(void, iemAImpl_xadd_u64,(uint64_t *pu64Dst, uint64_t *pu64Reg, uint32_t *pEFlags));
3166IEM_DECL_IMPL_DEF(void, iemAImpl_xadd_u8_locked, (uint8_t *pu8Dst, uint8_t *pu8Reg, uint32_t *pEFlags));
3167IEM_DECL_IMPL_DEF(void, iemAImpl_xadd_u16_locked,(uint16_t *pu16Dst, uint16_t *pu16Reg, uint32_t *pEFlags));
3168IEM_DECL_IMPL_DEF(void, iemAImpl_xadd_u32_locked,(uint32_t *pu32Dst, uint32_t *pu32Reg, uint32_t *pEFlags));
3169IEM_DECL_IMPL_DEF(void, iemAImpl_xadd_u64_locked,(uint64_t *pu64Dst, uint64_t *pu64Reg, uint32_t *pEFlags));
3170/** @} */
3171
3172/** @name Compare and exchange.
3173 * @{ */
3174IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg_u8, (uint8_t *pu8Dst, uint8_t *puAl, uint8_t uSrcReg, uint32_t *pEFlags));
3175IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg_u8_locked, (uint8_t *pu8Dst, uint8_t *puAl, uint8_t uSrcReg, uint32_t *pEFlags));
3176IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg_u16, (uint16_t *pu16Dst, uint16_t *puAx, uint16_t uSrcReg, uint32_t *pEFlags));
3177IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg_u16_locked,(uint16_t *pu16Dst, uint16_t *puAx, uint16_t uSrcReg, uint32_t *pEFlags));
3178IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg_u32, (uint32_t *pu32Dst, uint32_t *puEax, uint32_t uSrcReg, uint32_t *pEFlags));
3179IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg_u32_locked,(uint32_t *pu32Dst, uint32_t *puEax, uint32_t uSrcReg, uint32_t *pEFlags));
3180#if ARCH_BITS == 32
3181IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg_u64, (uint64_t *pu64Dst, uint64_t *puRax, uint64_t *puSrcReg, uint32_t *pEFlags));
3182IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg_u64_locked,(uint64_t *pu64Dst, uint64_t *puRax, uint64_t *puSrcReg, uint32_t *pEFlags));
3183#else
3184IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg_u64, (uint64_t *pu64Dst, uint64_t *puRax, uint64_t uSrcReg, uint32_t *pEFlags));
3185IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg_u64_locked,(uint64_t *pu64Dst, uint64_t *puRax, uint64_t uSrcReg, uint32_t *pEFlags));
3186#endif
3187IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg8b,(uint64_t *pu64Dst, PRTUINT64U pu64EaxEdx, PRTUINT64U pu64EbxEcx,
3188 uint32_t *pEFlags));
3189IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg8b_locked,(uint64_t *pu64Dst, PRTUINT64U pu64EaxEdx, PRTUINT64U pu64EbxEcx,
3190 uint32_t *pEFlags));
3191IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg16b,(PRTUINT128U pu128Dst, PRTUINT128U pu128RaxRdx, PRTUINT128U pu128RbxRcx,
3192 uint32_t *pEFlags));
3193IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg16b_locked,(PRTUINT128U pu128Dst, PRTUINT128U pu128RaxRdx, PRTUINT128U pu128RbxRcx,
3194 uint32_t *pEFlags));
3195#ifndef RT_ARCH_ARM64
3196IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg16b_fallback,(PRTUINT128U pu128Dst, PRTUINT128U pu128RaxRdx,
3197 PRTUINT128U pu128RbxRcx, uint32_t *pEFlags));
3198#endif
3199/** @} */
3200
3201/** @name Memory ordering
3202 * @{ */
3203typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEMFENCE,(void));
3204typedef FNIEMAIMPLMEMFENCE *PFNIEMAIMPLMEMFENCE;
3205IEM_DECL_IMPL_DEF(void, iemAImpl_mfence,(void));
3206IEM_DECL_IMPL_DEF(void, iemAImpl_sfence,(void));
3207IEM_DECL_IMPL_DEF(void, iemAImpl_lfence,(void));
3208#ifndef RT_ARCH_ARM64
3209IEM_DECL_IMPL_DEF(void, iemAImpl_alt_mem_fence,(void));
3210#endif
3211/** @} */
3212
3213/** @name Double precision shifts
3214 * @{ */
3215typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLSHIFTDBLU16,(uint16_t *pu16Dst, uint16_t u16Src, uint8_t cShift, uint32_t *pEFlags));
3216typedef FNIEMAIMPLSHIFTDBLU16 *PFNIEMAIMPLSHIFTDBLU16;
3217typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLSHIFTDBLU32,(uint32_t *pu32Dst, uint32_t u32Src, uint8_t cShift, uint32_t *pEFlags));
3218typedef FNIEMAIMPLSHIFTDBLU32 *PFNIEMAIMPLSHIFTDBLU32;
3219typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLSHIFTDBLU64,(uint64_t *pu64Dst, uint64_t u64Src, uint8_t cShift, uint32_t *pEFlags));
3220typedef FNIEMAIMPLSHIFTDBLU64 *PFNIEMAIMPLSHIFTDBLU64;
3221FNIEMAIMPLSHIFTDBLU16 iemAImpl_shld_u16, iemAImpl_shld_u16_amd, iemAImpl_shld_u16_intel;
3222FNIEMAIMPLSHIFTDBLU32 iemAImpl_shld_u32, iemAImpl_shld_u32_amd, iemAImpl_shld_u32_intel;
3223FNIEMAIMPLSHIFTDBLU64 iemAImpl_shld_u64, iemAImpl_shld_u64_amd, iemAImpl_shld_u64_intel;
3224FNIEMAIMPLSHIFTDBLU16 iemAImpl_shrd_u16, iemAImpl_shrd_u16_amd, iemAImpl_shrd_u16_intel;
3225FNIEMAIMPLSHIFTDBLU32 iemAImpl_shrd_u32, iemAImpl_shrd_u32_amd, iemAImpl_shrd_u32_intel;
3226FNIEMAIMPLSHIFTDBLU64 iemAImpl_shrd_u64, iemAImpl_shrd_u64_amd, iemAImpl_shrd_u64_intel;
3227/** @} */
3228
3229
3230/** @name Bit search operations (thrown in with the binary ops).
3231 * @{ */
3232FNIEMAIMPLBINU16 iemAImpl_bsf_u16, iemAImpl_bsf_u16_amd, iemAImpl_bsf_u16_intel;
3233FNIEMAIMPLBINU32 iemAImpl_bsf_u32, iemAImpl_bsf_u32_amd, iemAImpl_bsf_u32_intel;
3234FNIEMAIMPLBINU64 iemAImpl_bsf_u64, iemAImpl_bsf_u64_amd, iemAImpl_bsf_u64_intel;
3235FNIEMAIMPLBINU16 iemAImpl_bsr_u16, iemAImpl_bsr_u16_amd, iemAImpl_bsr_u16_intel;
3236FNIEMAIMPLBINU32 iemAImpl_bsr_u32, iemAImpl_bsr_u32_amd, iemAImpl_bsr_u32_intel;
3237FNIEMAIMPLBINU64 iemAImpl_bsr_u64, iemAImpl_bsr_u64_amd, iemAImpl_bsr_u64_intel;
3238FNIEMAIMPLBINU16 iemAImpl_lzcnt_u16, iemAImpl_lzcnt_u16_amd, iemAImpl_lzcnt_u16_intel;
3239FNIEMAIMPLBINU32 iemAImpl_lzcnt_u32, iemAImpl_lzcnt_u32_amd, iemAImpl_lzcnt_u32_intel;
3240FNIEMAIMPLBINU64 iemAImpl_lzcnt_u64, iemAImpl_lzcnt_u64_amd, iemAImpl_lzcnt_u64_intel;
3241FNIEMAIMPLBINU16 iemAImpl_tzcnt_u16, iemAImpl_tzcnt_u16_amd, iemAImpl_tzcnt_u16_intel;
3242FNIEMAIMPLBINU32 iemAImpl_tzcnt_u32, iemAImpl_tzcnt_u32_amd, iemAImpl_tzcnt_u32_intel;
3243FNIEMAIMPLBINU64 iemAImpl_tzcnt_u64, iemAImpl_tzcnt_u64_amd, iemAImpl_tzcnt_u64_intel;
3244FNIEMAIMPLBINU16 iemAImpl_popcnt_u16, iemAImpl_popcnt_u16_fallback;
3245FNIEMAIMPLBINU32 iemAImpl_popcnt_u32, iemAImpl_popcnt_u32_fallback;
3246FNIEMAIMPLBINU64 iemAImpl_popcnt_u64, iemAImpl_popcnt_u64_fallback;
3247/** @} */
3248
3249/** @name Signed multiplication operations (thrown in with the binary ops).
3250 * @{ */
3251FNIEMAIMPLBINU16 iemAImpl_imul_two_u16, iemAImpl_imul_two_u16_amd, iemAImpl_imul_two_u16_intel;
3252FNIEMAIMPLBINU32 iemAImpl_imul_two_u32, iemAImpl_imul_two_u32_amd, iemAImpl_imul_two_u32_intel;
3253FNIEMAIMPLBINU64 iemAImpl_imul_two_u64, iemAImpl_imul_two_u64_amd, iemAImpl_imul_two_u64_intel;
3254/** @} */
3255
3256/** @name Arithmetic assignment operations on bytes (unary).
3257 * @{ */
3258typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLUNARYU8, (uint8_t *pu8Dst, uint32_t *pEFlags));
3259typedef FNIEMAIMPLUNARYU8 *PFNIEMAIMPLUNARYU8;
3260FNIEMAIMPLUNARYU8 iemAImpl_inc_u8, iemAImpl_inc_u8_locked;
3261FNIEMAIMPLUNARYU8 iemAImpl_dec_u8, iemAImpl_dec_u8_locked;
3262FNIEMAIMPLUNARYU8 iemAImpl_not_u8, iemAImpl_not_u8_locked;
3263FNIEMAIMPLUNARYU8 iemAImpl_neg_u8, iemAImpl_neg_u8_locked;
3264/** @} */
3265
3266/** @name Arithmetic assignment operations on words (unary).
3267 * @{ */
3268typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLUNARYU16, (uint16_t *pu16Dst, uint32_t *pEFlags));
3269typedef FNIEMAIMPLUNARYU16 *PFNIEMAIMPLUNARYU16;
3270FNIEMAIMPLUNARYU16 iemAImpl_inc_u16, iemAImpl_inc_u16_locked;
3271FNIEMAIMPLUNARYU16 iemAImpl_dec_u16, iemAImpl_dec_u16_locked;
3272FNIEMAIMPLUNARYU16 iemAImpl_not_u16, iemAImpl_not_u16_locked;
3273FNIEMAIMPLUNARYU16 iemAImpl_neg_u16, iemAImpl_neg_u16_locked;
3274/** @} */
3275
3276/** @name Arithmetic assignment operations on double words (unary).
3277 * @{ */
3278typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLUNARYU32, (uint32_t *pu32Dst, uint32_t *pEFlags));
3279typedef FNIEMAIMPLUNARYU32 *PFNIEMAIMPLUNARYU32;
3280FNIEMAIMPLUNARYU32 iemAImpl_inc_u32, iemAImpl_inc_u32_locked;
3281FNIEMAIMPLUNARYU32 iemAImpl_dec_u32, iemAImpl_dec_u32_locked;
3282FNIEMAIMPLUNARYU32 iemAImpl_not_u32, iemAImpl_not_u32_locked;
3283FNIEMAIMPLUNARYU32 iemAImpl_neg_u32, iemAImpl_neg_u32_locked;
3284/** @} */
3285
3286/** @name Arithmetic assignment operations on quad words (unary).
3287 * @{ */
3288typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLUNARYU64, (uint64_t *pu64Dst, uint32_t *pEFlags));
3289typedef FNIEMAIMPLUNARYU64 *PFNIEMAIMPLUNARYU64;
3290FNIEMAIMPLUNARYU64 iemAImpl_inc_u64, iemAImpl_inc_u64_locked;
3291FNIEMAIMPLUNARYU64 iemAImpl_dec_u64, iemAImpl_dec_u64_locked;
3292FNIEMAIMPLUNARYU64 iemAImpl_not_u64, iemAImpl_not_u64_locked;
3293FNIEMAIMPLUNARYU64 iemAImpl_neg_u64, iemAImpl_neg_u64_locked;
3294/** @} */
3295
3296
3297/** @name Shift operations on bytes (Group 2).
3298 * @{ */
3299typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSHIFTU8,(uint32_t fEFlagsIn, uint8_t *pu8Dst, uint8_t cShift));
3300typedef FNIEMAIMPLSHIFTU8 *PFNIEMAIMPLSHIFTU8;
3301FNIEMAIMPLSHIFTU8 iemAImpl_rol_u8, iemAImpl_rol_u8_amd, iemAImpl_rol_u8_intel;
3302FNIEMAIMPLSHIFTU8 iemAImpl_ror_u8, iemAImpl_ror_u8_amd, iemAImpl_ror_u8_intel;
3303FNIEMAIMPLSHIFTU8 iemAImpl_rcl_u8, iemAImpl_rcl_u8_amd, iemAImpl_rcl_u8_intel;
3304FNIEMAIMPLSHIFTU8 iemAImpl_rcr_u8, iemAImpl_rcr_u8_amd, iemAImpl_rcr_u8_intel;
3305FNIEMAIMPLSHIFTU8 iemAImpl_shl_u8, iemAImpl_shl_u8_amd, iemAImpl_shl_u8_intel;
3306FNIEMAIMPLSHIFTU8 iemAImpl_shr_u8, iemAImpl_shr_u8_amd, iemAImpl_shr_u8_intel;
3307FNIEMAIMPLSHIFTU8 iemAImpl_sar_u8, iemAImpl_sar_u8_amd, iemAImpl_sar_u8_intel;
3308/** @} */
3309
3310/** @name Shift operations on words (Group 2).
3311 * @{ */
3312typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSHIFTU16,(uint32_t fEFlagsIn, uint16_t *pu16Dst, uint8_t cShift));
3313typedef FNIEMAIMPLSHIFTU16 *PFNIEMAIMPLSHIFTU16;
3314FNIEMAIMPLSHIFTU16 iemAImpl_rol_u16, iemAImpl_rol_u16_amd, iemAImpl_rol_u16_intel;
3315FNIEMAIMPLSHIFTU16 iemAImpl_ror_u16, iemAImpl_ror_u16_amd, iemAImpl_ror_u16_intel;
3316FNIEMAIMPLSHIFTU16 iemAImpl_rcl_u16, iemAImpl_rcl_u16_amd, iemAImpl_rcl_u16_intel;
3317FNIEMAIMPLSHIFTU16 iemAImpl_rcr_u16, iemAImpl_rcr_u16_amd, iemAImpl_rcr_u16_intel;
3318FNIEMAIMPLSHIFTU16 iemAImpl_shl_u16, iemAImpl_shl_u16_amd, iemAImpl_shl_u16_intel;
3319FNIEMAIMPLSHIFTU16 iemAImpl_shr_u16, iemAImpl_shr_u16_amd, iemAImpl_shr_u16_intel;
3320FNIEMAIMPLSHIFTU16 iemAImpl_sar_u16, iemAImpl_sar_u16_amd, iemAImpl_sar_u16_intel;
3321/** @} */
3322
3323/** @name Shift operations on double words (Group 2).
3324 * @{ */
3325typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSHIFTU32,(uint32_t fEFlagsIn, uint32_t *pu32Dst, uint8_t cShift));
3326typedef FNIEMAIMPLSHIFTU32 *PFNIEMAIMPLSHIFTU32;
3327FNIEMAIMPLSHIFTU32 iemAImpl_rol_u32, iemAImpl_rol_u32_amd, iemAImpl_rol_u32_intel;
3328FNIEMAIMPLSHIFTU32 iemAImpl_ror_u32, iemAImpl_ror_u32_amd, iemAImpl_ror_u32_intel;
3329FNIEMAIMPLSHIFTU32 iemAImpl_rcl_u32, iemAImpl_rcl_u32_amd, iemAImpl_rcl_u32_intel;
3330FNIEMAIMPLSHIFTU32 iemAImpl_rcr_u32, iemAImpl_rcr_u32_amd, iemAImpl_rcr_u32_intel;
3331FNIEMAIMPLSHIFTU32 iemAImpl_shl_u32, iemAImpl_shl_u32_amd, iemAImpl_shl_u32_intel;
3332FNIEMAIMPLSHIFTU32 iemAImpl_shr_u32, iemAImpl_shr_u32_amd, iemAImpl_shr_u32_intel;
3333FNIEMAIMPLSHIFTU32 iemAImpl_sar_u32, iemAImpl_sar_u32_amd, iemAImpl_sar_u32_intel;
3334/** @} */
3335
3336/** @name Shift operations on words (Group 2).
3337 * @{ */
3338typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSHIFTU64,(uint32_t fEFlagsIn, uint64_t *pu64Dst, uint8_t cShift));
3339typedef FNIEMAIMPLSHIFTU64 *PFNIEMAIMPLSHIFTU64;
3340FNIEMAIMPLSHIFTU64 iemAImpl_rol_u64, iemAImpl_rol_u64_amd, iemAImpl_rol_u64_intel;
3341FNIEMAIMPLSHIFTU64 iemAImpl_ror_u64, iemAImpl_ror_u64_amd, iemAImpl_ror_u64_intel;
3342FNIEMAIMPLSHIFTU64 iemAImpl_rcl_u64, iemAImpl_rcl_u64_amd, iemAImpl_rcl_u64_intel;
3343FNIEMAIMPLSHIFTU64 iemAImpl_rcr_u64, iemAImpl_rcr_u64_amd, iemAImpl_rcr_u64_intel;
3344FNIEMAIMPLSHIFTU64 iemAImpl_shl_u64, iemAImpl_shl_u64_amd, iemAImpl_shl_u64_intel;
3345FNIEMAIMPLSHIFTU64 iemAImpl_shr_u64, iemAImpl_shr_u64_amd, iemAImpl_shr_u64_intel;
3346FNIEMAIMPLSHIFTU64 iemAImpl_sar_u64, iemAImpl_sar_u64_amd, iemAImpl_sar_u64_intel;
3347/** @} */
3348
3349/** @name Multiplication and division operations.
3350 * @{ */
3351typedef IEM_DECL_IMPL_TYPE(int, FNIEMAIMPLMULDIVU8,(uint16_t *pu16AX, uint8_t u8FactorDivisor, uint32_t *pEFlags));
3352typedef FNIEMAIMPLMULDIVU8 *PFNIEMAIMPLMULDIVU8;
3353FNIEMAIMPLMULDIVU8 iemAImpl_mul_u8, iemAImpl_mul_u8_amd, iemAImpl_mul_u8_intel;
3354FNIEMAIMPLMULDIVU8 iemAImpl_imul_u8, iemAImpl_imul_u8_amd, iemAImpl_imul_u8_intel;
3355FNIEMAIMPLMULDIVU8 iemAImpl_div_u8, iemAImpl_div_u8_amd, iemAImpl_div_u8_intel;
3356FNIEMAIMPLMULDIVU8 iemAImpl_idiv_u8, iemAImpl_idiv_u8_amd, iemAImpl_idiv_u8_intel;
3357
3358typedef IEM_DECL_IMPL_TYPE(int, FNIEMAIMPLMULDIVU16,(uint16_t *pu16AX, uint16_t *pu16DX, uint16_t u16FactorDivisor, uint32_t *pEFlags));
3359typedef FNIEMAIMPLMULDIVU16 *PFNIEMAIMPLMULDIVU16;
3360FNIEMAIMPLMULDIVU16 iemAImpl_mul_u16, iemAImpl_mul_u16_amd, iemAImpl_mul_u16_intel;
3361FNIEMAIMPLMULDIVU16 iemAImpl_imul_u16, iemAImpl_imul_u16_amd, iemAImpl_imul_u16_intel;
3362FNIEMAIMPLMULDIVU16 iemAImpl_div_u16, iemAImpl_div_u16_amd, iemAImpl_div_u16_intel;
3363FNIEMAIMPLMULDIVU16 iemAImpl_idiv_u16, iemAImpl_idiv_u16_amd, iemAImpl_idiv_u16_intel;
3364
3365typedef IEM_DECL_IMPL_TYPE(int, FNIEMAIMPLMULDIVU32,(uint32_t *pu32EAX, uint32_t *pu32EDX, uint32_t u32FactorDivisor, uint32_t *pEFlags));
3366typedef FNIEMAIMPLMULDIVU32 *PFNIEMAIMPLMULDIVU32;
3367FNIEMAIMPLMULDIVU32 iemAImpl_mul_u32, iemAImpl_mul_u32_amd, iemAImpl_mul_u32_intel;
3368FNIEMAIMPLMULDIVU32 iemAImpl_imul_u32, iemAImpl_imul_u32_amd, iemAImpl_imul_u32_intel;
3369FNIEMAIMPLMULDIVU32 iemAImpl_div_u32, iemAImpl_div_u32_amd, iemAImpl_div_u32_intel;
3370FNIEMAIMPLMULDIVU32 iemAImpl_idiv_u32, iemAImpl_idiv_u32_amd, iemAImpl_idiv_u32_intel;
3371
3372typedef IEM_DECL_IMPL_TYPE(int, FNIEMAIMPLMULDIVU64,(uint64_t *pu64RAX, uint64_t *pu64RDX, uint64_t u64FactorDivisor, uint32_t *pEFlags));
3373typedef FNIEMAIMPLMULDIVU64 *PFNIEMAIMPLMULDIVU64;
3374FNIEMAIMPLMULDIVU64 iemAImpl_mul_u64, iemAImpl_mul_u64_amd, iemAImpl_mul_u64_intel;
3375FNIEMAIMPLMULDIVU64 iemAImpl_imul_u64, iemAImpl_imul_u64_amd, iemAImpl_imul_u64_intel;
3376FNIEMAIMPLMULDIVU64 iemAImpl_div_u64, iemAImpl_div_u64_amd, iemAImpl_div_u64_intel;
3377FNIEMAIMPLMULDIVU64 iemAImpl_idiv_u64, iemAImpl_idiv_u64_amd, iemAImpl_idiv_u64_intel;
3378/** @} */
3379
3380/** @name Byte Swap.
3381 * @{ */
3382IEM_DECL_IMPL_TYPE(void, iemAImpl_bswap_u16,(uint32_t *pu32Dst)); /* Yes, 32-bit register access. */
3383IEM_DECL_IMPL_TYPE(void, iemAImpl_bswap_u32,(uint32_t *pu32Dst));
3384IEM_DECL_IMPL_TYPE(void, iemAImpl_bswap_u64,(uint64_t *pu64Dst));
3385/** @} */
3386
3387/** @name Misc.
3388 * @{ */
3389FNIEMAIMPLBINU16 iemAImpl_arpl;
3390/** @} */
3391
3392/** @name RDRAND and RDSEED
3393 * @{ */
3394typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLRDRANDSEEDU16,(uint16_t *puDst, uint32_t *pEFlags));
3395typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLRDRANDSEEDU32,(uint32_t *puDst, uint32_t *pEFlags));
3396typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLRDRANDSEEDU64,(uint64_t *puDst, uint32_t *pEFlags));
3397typedef FNIEMAIMPLRDRANDSEEDU16 *PFNIEMAIMPLRDRANDSEEDU16;
3398typedef FNIEMAIMPLRDRANDSEEDU32 *PFNIEMAIMPLRDRANDSEEDU32;
3399typedef FNIEMAIMPLRDRANDSEEDU64 *PFNIEMAIMPLRDRANDSEEDU64;
3400
3401FNIEMAIMPLRDRANDSEEDU16 iemAImpl_rdrand_u16, iemAImpl_rdrand_u16_fallback;
3402FNIEMAIMPLRDRANDSEEDU32 iemAImpl_rdrand_u32, iemAImpl_rdrand_u32_fallback;
3403FNIEMAIMPLRDRANDSEEDU64 iemAImpl_rdrand_u64, iemAImpl_rdrand_u64_fallback;
3404FNIEMAIMPLRDRANDSEEDU16 iemAImpl_rdseed_u16, iemAImpl_rdseed_u16_fallback;
3405FNIEMAIMPLRDRANDSEEDU32 iemAImpl_rdseed_u32, iemAImpl_rdseed_u32_fallback;
3406FNIEMAIMPLRDRANDSEEDU64 iemAImpl_rdseed_u64, iemAImpl_rdseed_u64_fallback;
3407/** @} */
3408
3409/** @name ADOX and ADCX
3410 * @{ */
3411FNIEMAIMPLBINU32 iemAImpl_adcx_u32, iemAImpl_adcx_u32_fallback;
3412FNIEMAIMPLBINU64 iemAImpl_adcx_u64, iemAImpl_adcx_u64_fallback;
3413FNIEMAIMPLBINU32 iemAImpl_adox_u32, iemAImpl_adox_u32_fallback;
3414FNIEMAIMPLBINU64 iemAImpl_adox_u64, iemAImpl_adox_u64_fallback;
3415/** @} */
3416
3417/** @name FPU operations taking a 32-bit float argument
3418 * @{ */
3419typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUR32FSW,(PCX86FXSTATE pFpuState, uint16_t *pFSW,
3420 PCRTFLOAT80U pr80Val1, PCRTFLOAT32U pr32Val2));
3421typedef FNIEMAIMPLFPUR32FSW *PFNIEMAIMPLFPUR32FSW;
3422
3423typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUR32,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes,
3424 PCRTFLOAT80U pr80Val1, PCRTFLOAT32U pr32Val2));
3425typedef FNIEMAIMPLFPUR32 *PFNIEMAIMPLFPUR32;
3426
3427FNIEMAIMPLFPUR32FSW iemAImpl_fcom_r80_by_r32;
3428FNIEMAIMPLFPUR32 iemAImpl_fadd_r80_by_r32;
3429FNIEMAIMPLFPUR32 iemAImpl_fmul_r80_by_r32;
3430FNIEMAIMPLFPUR32 iemAImpl_fsub_r80_by_r32;
3431FNIEMAIMPLFPUR32 iemAImpl_fsubr_r80_by_r32;
3432FNIEMAIMPLFPUR32 iemAImpl_fdiv_r80_by_r32;
3433FNIEMAIMPLFPUR32 iemAImpl_fdivr_r80_by_r32;
3434
3435IEM_DECL_IMPL_DEF(void, iemAImpl_fld_r80_from_r32,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes, PCRTFLOAT32U pr32Val));
3436IEM_DECL_IMPL_DEF(void, iemAImpl_fst_r80_to_r32,(PCX86FXSTATE pFpuState, uint16_t *pu16FSW,
3437 PRTFLOAT32U pr32Val, PCRTFLOAT80U pr80Val));
3438/** @} */
3439
3440/** @name FPU operations taking a 64-bit float argument
3441 * @{ */
3442typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUR64FSW,(PCX86FXSTATE pFpuState, uint16_t *pFSW,
3443 PCRTFLOAT80U pr80Val1, PCRTFLOAT64U pr64Val2));
3444typedef FNIEMAIMPLFPUR64FSW *PFNIEMAIMPLFPUR64FSW;
3445
3446typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUR64,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes,
3447 PCRTFLOAT80U pr80Val1, PCRTFLOAT64U pr64Val2));
3448typedef FNIEMAIMPLFPUR64 *PFNIEMAIMPLFPUR64;
3449
3450FNIEMAIMPLFPUR64FSW iemAImpl_fcom_r80_by_r64;
3451FNIEMAIMPLFPUR64 iemAImpl_fadd_r80_by_r64;
3452FNIEMAIMPLFPUR64 iemAImpl_fmul_r80_by_r64;
3453FNIEMAIMPLFPUR64 iemAImpl_fsub_r80_by_r64;
3454FNIEMAIMPLFPUR64 iemAImpl_fsubr_r80_by_r64;
3455FNIEMAIMPLFPUR64 iemAImpl_fdiv_r80_by_r64;
3456FNIEMAIMPLFPUR64 iemAImpl_fdivr_r80_by_r64;
3457
3458IEM_DECL_IMPL_DEF(void, iemAImpl_fld_r80_from_r64,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes, PCRTFLOAT64U pr64Val));
3459IEM_DECL_IMPL_DEF(void, iemAImpl_fst_r80_to_r64,(PCX86FXSTATE pFpuState, uint16_t *pu16FSW,
3460 PRTFLOAT64U pr32Val, PCRTFLOAT80U pr80Val));
3461/** @} */
3462
3463/** @name FPU operations taking a 80-bit float argument
3464 * @{ */
3465typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUR80,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes,
3466 PCRTFLOAT80U pr80Val1, PCRTFLOAT80U pr80Val2));
3467typedef FNIEMAIMPLFPUR80 *PFNIEMAIMPLFPUR80;
3468FNIEMAIMPLFPUR80 iemAImpl_fadd_r80_by_r80;
3469FNIEMAIMPLFPUR80 iemAImpl_fmul_r80_by_r80;
3470FNIEMAIMPLFPUR80 iemAImpl_fsub_r80_by_r80;
3471FNIEMAIMPLFPUR80 iemAImpl_fsubr_r80_by_r80;
3472FNIEMAIMPLFPUR80 iemAImpl_fdiv_r80_by_r80;
3473FNIEMAIMPLFPUR80 iemAImpl_fdivr_r80_by_r80;
3474FNIEMAIMPLFPUR80 iemAImpl_fprem_r80_by_r80;
3475FNIEMAIMPLFPUR80 iemAImpl_fprem1_r80_by_r80;
3476FNIEMAIMPLFPUR80 iemAImpl_fscale_r80_by_r80;
3477
3478FNIEMAIMPLFPUR80 iemAImpl_fpatan_r80_by_r80, iemAImpl_fpatan_r80_by_r80_amd, iemAImpl_fpatan_r80_by_r80_intel;
3479FNIEMAIMPLFPUR80 iemAImpl_fyl2x_r80_by_r80, iemAImpl_fyl2x_r80_by_r80_amd, iemAImpl_fyl2x_r80_by_r80_intel;
3480FNIEMAIMPLFPUR80 iemAImpl_fyl2xp1_r80_by_r80, iemAImpl_fyl2xp1_r80_by_r80_amd, iemAImpl_fyl2xp1_r80_by_r80_intel;
3481
3482typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUR80FSW,(PCX86FXSTATE pFpuState, uint16_t *pFSW,
3483 PCRTFLOAT80U pr80Val1, PCRTFLOAT80U pr80Val2));
3484typedef FNIEMAIMPLFPUR80FSW *PFNIEMAIMPLFPUR80FSW;
3485FNIEMAIMPLFPUR80FSW iemAImpl_fcom_r80_by_r80;
3486FNIEMAIMPLFPUR80FSW iemAImpl_fucom_r80_by_r80;
3487
3488typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLFPUR80EFL,(PCX86FXSTATE pFpuState, uint16_t *pu16Fsw,
3489 PCRTFLOAT80U pr80Val1, PCRTFLOAT80U pr80Val2));
3490typedef FNIEMAIMPLFPUR80EFL *PFNIEMAIMPLFPUR80EFL;
3491FNIEMAIMPLFPUR80EFL iemAImpl_fcomi_r80_by_r80;
3492FNIEMAIMPLFPUR80EFL iemAImpl_fucomi_r80_by_r80;
3493
3494typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUR80UNARY,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes, PCRTFLOAT80U pr80Val));
3495typedef FNIEMAIMPLFPUR80UNARY *PFNIEMAIMPLFPUR80UNARY;
3496FNIEMAIMPLFPUR80UNARY iemAImpl_fabs_r80;
3497FNIEMAIMPLFPUR80UNARY iemAImpl_fchs_r80;
3498FNIEMAIMPLFPUR80UNARY iemAImpl_f2xm1_r80, iemAImpl_f2xm1_r80_amd, iemAImpl_f2xm1_r80_intel;
3499FNIEMAIMPLFPUR80UNARY iemAImpl_fsqrt_r80;
3500FNIEMAIMPLFPUR80UNARY iemAImpl_frndint_r80;
3501FNIEMAIMPLFPUR80UNARY iemAImpl_fsin_r80, iemAImpl_fsin_r80_amd, iemAImpl_fsin_r80_intel;
3502FNIEMAIMPLFPUR80UNARY iemAImpl_fcos_r80, iemAImpl_fcos_r80_amd, iemAImpl_fcos_r80_intel;
3503
3504typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUR80UNARYFSW,(PCX86FXSTATE pFpuState, uint16_t *pu16Fsw, PCRTFLOAT80U pr80Val));
3505typedef FNIEMAIMPLFPUR80UNARYFSW *PFNIEMAIMPLFPUR80UNARYFSW;
3506FNIEMAIMPLFPUR80UNARYFSW iemAImpl_ftst_r80;
3507FNIEMAIMPLFPUR80UNARYFSW iemAImpl_fxam_r80;
3508
3509typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUR80LDCONST,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes));
3510typedef FNIEMAIMPLFPUR80LDCONST *PFNIEMAIMPLFPUR80LDCONST;
3511FNIEMAIMPLFPUR80LDCONST iemAImpl_fld1;
3512FNIEMAIMPLFPUR80LDCONST iemAImpl_fldl2t;
3513FNIEMAIMPLFPUR80LDCONST iemAImpl_fldl2e;
3514FNIEMAIMPLFPUR80LDCONST iemAImpl_fldpi;
3515FNIEMAIMPLFPUR80LDCONST iemAImpl_fldlg2;
3516FNIEMAIMPLFPUR80LDCONST iemAImpl_fldln2;
3517FNIEMAIMPLFPUR80LDCONST iemAImpl_fldz;
3518
3519typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUR80UNARYTWO,(PCX86FXSTATE pFpuState, PIEMFPURESULTTWO pFpuResTwo,
3520 PCRTFLOAT80U pr80Val));
3521typedef FNIEMAIMPLFPUR80UNARYTWO *PFNIEMAIMPLFPUR80UNARYTWO;
3522FNIEMAIMPLFPUR80UNARYTWO iemAImpl_fptan_r80_r80, iemAImpl_fptan_r80_r80_amd, iemAImpl_fptan_r80_r80_intel;
3523FNIEMAIMPLFPUR80UNARYTWO iemAImpl_fxtract_r80_r80;
3524FNIEMAIMPLFPUR80UNARYTWO iemAImpl_fsincos_r80_r80, iemAImpl_fsincos_r80_r80_amd, iemAImpl_fsincos_r80_r80_intel;
3525
3526IEM_DECL_IMPL_DEF(void, iemAImpl_fld_r80_from_r80,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes, PCRTFLOAT80U pr80Val));
3527IEM_DECL_IMPL_DEF(void, iemAImpl_fst_r80_to_r80,(PCX86FXSTATE pFpuState, uint16_t *pu16FSW,
3528 PRTFLOAT80U pr80Dst, PCRTFLOAT80U pr80Src));
3529
3530IEM_DECL_IMPL_DEF(void, iemAImpl_fld_r80_from_d80,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes, PCRTPBCD80U pd80Val));
3531IEM_DECL_IMPL_DEF(void, iemAImpl_fst_r80_to_d80,(PCX86FXSTATE pFpuState, uint16_t *pu16FSW,
3532 PRTPBCD80U pd80Dst, PCRTFLOAT80U pr80Src));
3533
3534/** @} */
3535
3536/** @name FPU operations taking a 16-bit signed integer argument
3537 * @{ */
3538typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUI16,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes,
3539 PCRTFLOAT80U pr80Val1, int16_t const *pi16Val2));
3540typedef FNIEMAIMPLFPUI16 *PFNIEMAIMPLFPUI16;
3541typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUSTR80TOI16,(PCX86FXSTATE pFpuState, uint16_t *pFpuRes,
3542 int16_t *pi16Dst, PCRTFLOAT80U pr80Src));
3543typedef FNIEMAIMPLFPUSTR80TOI16 *PFNIEMAIMPLFPUSTR80TOI16;
3544
3545FNIEMAIMPLFPUI16 iemAImpl_fiadd_r80_by_i16;
3546FNIEMAIMPLFPUI16 iemAImpl_fimul_r80_by_i16;
3547FNIEMAIMPLFPUI16 iemAImpl_fisub_r80_by_i16;
3548FNIEMAIMPLFPUI16 iemAImpl_fisubr_r80_by_i16;
3549FNIEMAIMPLFPUI16 iemAImpl_fidiv_r80_by_i16;
3550FNIEMAIMPLFPUI16 iemAImpl_fidivr_r80_by_i16;
3551
3552typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUI16FSW,(PCX86FXSTATE pFpuState, uint16_t *pFSW,
3553 PCRTFLOAT80U pr80Val1, int16_t const *pi16Val2));
3554typedef FNIEMAIMPLFPUI16FSW *PFNIEMAIMPLFPUI16FSW;
3555FNIEMAIMPLFPUI16FSW iemAImpl_ficom_r80_by_i16;
3556
3557IEM_DECL_IMPL_DEF(void, iemAImpl_fild_r80_from_i16,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes, int16_t const *pi16Val));
3558FNIEMAIMPLFPUSTR80TOI16 iemAImpl_fist_r80_to_i16;
3559FNIEMAIMPLFPUSTR80TOI16 iemAImpl_fistt_r80_to_i16, iemAImpl_fistt_r80_to_i16_amd, iemAImpl_fistt_r80_to_i16_intel;
3560/** @} */
3561
3562/** @name FPU operations taking a 32-bit signed integer argument
3563 * @{ */
3564typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUI32,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes,
3565 PCRTFLOAT80U pr80Val1, int32_t const *pi32Val2));
3566typedef FNIEMAIMPLFPUI32 *PFNIEMAIMPLFPUI32;
3567typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUSTR80TOI32,(PCX86FXSTATE pFpuState, uint16_t *pFpuRes,
3568 int32_t *pi32Dst, PCRTFLOAT80U pr80Src));
3569typedef FNIEMAIMPLFPUSTR80TOI32 *PFNIEMAIMPLFPUSTR80TOI32;
3570
3571FNIEMAIMPLFPUI32 iemAImpl_fiadd_r80_by_i32;
3572FNIEMAIMPLFPUI32 iemAImpl_fimul_r80_by_i32;
3573FNIEMAIMPLFPUI32 iemAImpl_fisub_r80_by_i32;
3574FNIEMAIMPLFPUI32 iemAImpl_fisubr_r80_by_i32;
3575FNIEMAIMPLFPUI32 iemAImpl_fidiv_r80_by_i32;
3576FNIEMAIMPLFPUI32 iemAImpl_fidivr_r80_by_i32;
3577
3578typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUI32FSW,(PCX86FXSTATE pFpuState, uint16_t *pFSW,
3579 PCRTFLOAT80U pr80Val1, int32_t const *pi32Val2));
3580typedef FNIEMAIMPLFPUI32FSW *PFNIEMAIMPLFPUI32FSW;
3581FNIEMAIMPLFPUI32FSW iemAImpl_ficom_r80_by_i32;
3582
3583IEM_DECL_IMPL_DEF(void, iemAImpl_fild_r80_from_i32,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes, int32_t const *pi32Val));
3584FNIEMAIMPLFPUSTR80TOI32 iemAImpl_fist_r80_to_i32;
3585FNIEMAIMPLFPUSTR80TOI32 iemAImpl_fistt_r80_to_i32;
3586/** @} */
3587
3588/** @name FPU operations taking a 64-bit signed integer argument
3589 * @{ */
3590typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUSTR80TOI64,(PCX86FXSTATE pFpuState, uint16_t *pFpuRes,
3591 int64_t *pi64Dst, PCRTFLOAT80U pr80Src));
3592typedef FNIEMAIMPLFPUSTR80TOI64 *PFNIEMAIMPLFPUSTR80TOI64;
3593
3594IEM_DECL_IMPL_DEF(void, iemAImpl_fild_r80_from_i64,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes, int64_t const *pi64Val));
3595FNIEMAIMPLFPUSTR80TOI64 iemAImpl_fist_r80_to_i64;
3596FNIEMAIMPLFPUSTR80TOI64 iemAImpl_fistt_r80_to_i64;
3597/** @} */
3598
3599
3600/** Temporary type representing a 256-bit vector register. */
3601typedef struct { uint64_t au64[4]; } IEMVMM256;
3602/** Temporary type pointing to a 256-bit vector register. */
3603typedef IEMVMM256 *PIEMVMM256;
3604/** Temporary type pointing to a const 256-bit vector register. */
3605typedef IEMVMM256 *PCIEMVMM256;
3606
3607
3608/** @name Media (SSE/MMX/AVX) operations: full1 + full2 -> full1.
3609 * @{ */
3610typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAF2U64,(PCX86FXSTATE pFpuState, uint64_t *puDst, uint64_t const *puSrc));
3611typedef FNIEMAIMPLMEDIAF2U64 *PFNIEMAIMPLMEDIAF2U64;
3612typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMEDIAF2U128,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86XMMREG puSrc));
3613typedef FNIEMAIMPLMEDIAF2U128 *PFNIEMAIMPLMEDIAF2U128;
3614typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMEDIAF2U256,(uint32_t uMxCsrIn, PX86YMMREG puDst, PCX86YMMREG puSrc));
3615typedef FNIEMAIMPLMEDIAF2U256 *PFNIEMAIMPLMEDIAF2U256;
3616typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMEDIAF3U128,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86XMMREG puSrc1, PCX86XMMREG puSrc2));
3617typedef FNIEMAIMPLMEDIAF3U128 *PFNIEMAIMPLMEDIAF3U128;
3618typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMEDIAF3U256,(uint32_t uMxCsrIn, PX86YMMREG puDst, PCX86YMMREG puSrc1, PCX86YMMREG puSrc2));
3619typedef FNIEMAIMPLMEDIAF3U256 *PFNIEMAIMPLMEDIAF3U256;
3620typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAOPTF2U64,(uint64_t *puDst, uint64_t const *puSrc));
3621typedef FNIEMAIMPLMEDIAOPTF2U64 *PFNIEMAIMPLMEDIAOPTF2U64;
3622typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAOPTF2U128,(PRTUINT128U puDst, PCRTUINT128U puSrc));
3623typedef FNIEMAIMPLMEDIAOPTF2U128 *PFNIEMAIMPLMEDIAOPTF2U128;
3624typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAOPTF3U128,(PRTUINT128U puDst, PCRTUINT128U puSrc1, PCRTUINT128U puSrc2));
3625typedef FNIEMAIMPLMEDIAOPTF3U128 *PFNIEMAIMPLMEDIAOPTF3U128;
3626typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAOPTF3U256,(PRTUINT256U puDst, PCRTUINT256U puSrc1, PCRTUINT256U puSrc2));
3627typedef FNIEMAIMPLMEDIAOPTF3U256 *PFNIEMAIMPLMEDIAOPTF3U256;
3628typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAOPTF2U256,(PRTUINT256U puDst, PCRTUINT256U puSrc));
3629typedef FNIEMAIMPLMEDIAOPTF2U256 *PFNIEMAIMPLMEDIAOPTF2U256;
3630FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_pshufb_u64, iemAImpl_pshufb_u64_fallback;
3631FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_pand_u64, iemAImpl_pandn_u64, iemAImpl_por_u64, iemAImpl_pxor_u64;
3632FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_pcmpeqb_u64, iemAImpl_pcmpeqw_u64, iemAImpl_pcmpeqd_u64;
3633FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_pcmpgtb_u64, iemAImpl_pcmpgtw_u64, iemAImpl_pcmpgtd_u64;
3634FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_paddb_u64, iemAImpl_paddsb_u64, iemAImpl_paddusb_u64;
3635FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_paddw_u64, iemAImpl_paddsw_u64, iemAImpl_paddusw_u64;
3636FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_paddd_u64;
3637FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_paddq_u64;
3638FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_psubb_u64, iemAImpl_psubsb_u64, iemAImpl_psubusb_u64;
3639FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_psubw_u64, iemAImpl_psubsw_u64, iemAImpl_psubusw_u64;
3640FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_psubd_u64;
3641FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_psubq_u64;
3642FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_pmaddwd_u64, iemAImpl_pmaddwd_u64_fallback;
3643FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_pmullw_u64, iemAImpl_pmulhw_u64;
3644FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_pminub_u64, iemAImpl_pmaxub_u64;
3645FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_pminsw_u64, iemAImpl_pmaxsw_u64;
3646FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_pabsb_u64, iemAImpl_pabsb_u64_fallback;
3647FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_pabsw_u64, iemAImpl_pabsw_u64_fallback;
3648FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_pabsd_u64, iemAImpl_pabsd_u64_fallback;
3649FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_psignb_u64, iemAImpl_psignb_u64_fallback;
3650FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_psignw_u64, iemAImpl_psignw_u64_fallback;
3651FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_psignd_u64, iemAImpl_psignd_u64_fallback;
3652FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_phaddw_u64, iemAImpl_phaddw_u64_fallback;
3653FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_phaddd_u64, iemAImpl_phaddd_u64_fallback;
3654FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_phsubw_u64, iemAImpl_phsubw_u64_fallback;
3655FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_phsubd_u64, iemAImpl_phsubd_u64_fallback;
3656FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_phaddsw_u64, iemAImpl_phaddsw_u64_fallback;
3657FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_phsubsw_u64, iemAImpl_phsubsw_u64_fallback;
3658FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_pmaddubsw_u64, iemAImpl_pmaddubsw_u64_fallback;
3659FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_pmulhrsw_u64, iemAImpl_pmulhrsw_u64_fallback;
3660FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_pmuludq_u64;
3661FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_psllw_u64, iemAImpl_psrlw_u64, iemAImpl_psraw_u64;
3662FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_pslld_u64, iemAImpl_psrld_u64, iemAImpl_psrad_u64;
3663FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_psllq_u64, iemAImpl_psrlq_u64;
3664FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_packsswb_u64, iemAImpl_packuswb_u64;
3665FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_packssdw_u64;
3666FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_pmulhuw_u64;
3667FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_pavgb_u64, iemAImpl_pavgw_u64;
3668FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_psadbw_u64;
3669
3670FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pshufb_u128, iemAImpl_pshufb_u128_fallback;
3671FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pand_u128, iemAImpl_pandn_u128, iemAImpl_por_u128, iemAImpl_pxor_u128;
3672FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pcmpeqb_u128, iemAImpl_pcmpeqw_u128, iemAImpl_pcmpeqd_u128;
3673FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pcmpeqq_u128, iemAImpl_pcmpeqq_u128_fallback;
3674FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pcmpgtb_u128, iemAImpl_pcmpgtw_u128, iemAImpl_pcmpgtd_u128;
3675FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pcmpgtq_u128, iemAImpl_pcmpgtq_u128_fallback;
3676FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_paddb_u128, iemAImpl_paddsb_u128, iemAImpl_paddusb_u128;
3677FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_paddw_u128, iemAImpl_paddsw_u128, iemAImpl_paddusw_u128;
3678FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_paddd_u128;
3679FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_paddq_u128;
3680FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_psubb_u128, iemAImpl_psubsb_u128, iemAImpl_psubusb_u128;
3681FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_psubw_u128, iemAImpl_psubsw_u128, iemAImpl_psubusw_u128;
3682FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_psubd_u128;
3683FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_psubq_u128;
3684FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmullw_u128, iemAImpl_pmullw_u128_fallback;
3685FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmulhw_u128;
3686FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmulld_u128, iemAImpl_pmulld_u128_fallback;
3687FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmaddwd_u128, iemAImpl_pmaddwd_u128_fallback;
3688FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pminub_u128;
3689FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pminud_u128, iemAImpl_pminud_u128_fallback;
3690FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pminuw_u128, iemAImpl_pminuw_u128_fallback;
3691FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pminsb_u128, iemAImpl_pminsb_u128_fallback;
3692FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pminsd_u128, iemAImpl_pminsd_u128_fallback;
3693FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pminsw_u128, iemAImpl_pminsw_u128_fallback;
3694FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmaxub_u128;
3695FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmaxud_u128, iemAImpl_pmaxud_u128_fallback;
3696FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmaxuw_u128, iemAImpl_pmaxuw_u128_fallback;
3697FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmaxsb_u128, iemAImpl_pmaxsb_u128_fallback;
3698FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmaxsw_u128;
3699FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmaxsd_u128, iemAImpl_pmaxsd_u128_fallback;
3700FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pabsb_u128, iemAImpl_pabsb_u128_fallback;
3701FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pabsw_u128, iemAImpl_pabsw_u128_fallback;
3702FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pabsd_u128, iemAImpl_pabsd_u128_fallback;
3703FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_psignb_u128, iemAImpl_psignb_u128_fallback;
3704FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_psignw_u128, iemAImpl_psignw_u128_fallback;
3705FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_psignd_u128, iemAImpl_psignd_u128_fallback;
3706FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_phaddw_u128, iemAImpl_phaddw_u128_fallback;
3707FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_phaddd_u128, iemAImpl_phaddd_u128_fallback;
3708FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_phsubw_u128, iemAImpl_phsubw_u128_fallback;
3709FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_phsubd_u128, iemAImpl_phsubd_u128_fallback;
3710FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_phaddsw_u128, iemAImpl_phaddsw_u128_fallback;
3711FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_phsubsw_u128, iemAImpl_phsubsw_u128_fallback;
3712FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmaddubsw_u128, iemAImpl_pmaddubsw_u128_fallback;
3713FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmulhrsw_u128, iemAImpl_pmulhrsw_u128_fallback;
3714FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmuludq_u128;
3715FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmaddwd_u128, iemAImpl_pmaddwd_u128_fallback;
3716FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_packsswb_u128, iemAImpl_packuswb_u128;
3717FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_packssdw_u128, iemAImpl_packusdw_u128;
3718FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_psllw_u128, iemAImpl_psrlw_u128, iemAImpl_psraw_u128;
3719FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pslld_u128, iemAImpl_psrld_u128, iemAImpl_psrad_u128;
3720FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_psllq_u128, iemAImpl_psrlq_u128;
3721FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmulhuw_u128;
3722FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pavgb_u128, iemAImpl_pavgw_u128;
3723FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_psadbw_u128;
3724FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmuldq_u128, iemAImpl_pmuldq_u128_fallback;
3725FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_unpcklps_u128, iemAImpl_unpcklpd_u128;
3726FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_unpckhps_u128, iemAImpl_unpckhpd_u128;
3727FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_phminposuw_u128, iemAImpl_phminposuw_u128_fallback;
3728
3729FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpshufb_u128, iemAImpl_vpshufb_u128_fallback;
3730FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpand_u128, iemAImpl_vpand_u128_fallback;
3731FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpandn_u128, iemAImpl_vpandn_u128_fallback;
3732FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpor_u128, iemAImpl_vpor_u128_fallback;
3733FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpxor_u128, iemAImpl_vpxor_u128_fallback;
3734FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpcmpeqb_u128, iemAImpl_vpcmpeqb_u128_fallback;
3735FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpcmpeqw_u128, iemAImpl_vpcmpeqw_u128_fallback;
3736FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpcmpeqd_u128, iemAImpl_vpcmpeqd_u128_fallback;
3737FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpcmpeqq_u128, iemAImpl_vpcmpeqq_u128_fallback;
3738FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpcmpgtb_u128, iemAImpl_vpcmpgtb_u128_fallback;
3739FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpcmpgtw_u128, iemAImpl_vpcmpgtw_u128_fallback;
3740FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpcmpgtd_u128, iemAImpl_vpcmpgtd_u128_fallback;
3741FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpcmpgtq_u128, iemAImpl_vpcmpgtq_u128_fallback;
3742FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpaddb_u128, iemAImpl_vpaddb_u128_fallback;
3743FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpaddw_u128, iemAImpl_vpaddw_u128_fallback;
3744FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpaddd_u128, iemAImpl_vpaddd_u128_fallback;
3745FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpaddq_u128, iemAImpl_vpaddq_u128_fallback;
3746FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsubb_u128, iemAImpl_vpsubb_u128_fallback;
3747FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsubw_u128, iemAImpl_vpsubw_u128_fallback;
3748FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsubd_u128, iemAImpl_vpsubd_u128_fallback;
3749FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsubq_u128, iemAImpl_vpsubq_u128_fallback;
3750FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpminub_u128, iemAImpl_vpminub_u128_fallback;
3751FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpminuw_u128, iemAImpl_vpminuw_u128_fallback;
3752FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpminud_u128, iemAImpl_vpminud_u128_fallback;
3753FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpminsb_u128, iemAImpl_vpminsb_u128_fallback;
3754FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpminsw_u128, iemAImpl_vpminsw_u128_fallback;
3755FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpminsd_u128, iemAImpl_vpminsd_u128_fallback;
3756FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmaxub_u128, iemAImpl_vpmaxub_u128_fallback;
3757FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmaxuw_u128, iemAImpl_vpmaxuw_u128_fallback;
3758FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmaxud_u128, iemAImpl_vpmaxud_u128_fallback;
3759FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmaxsb_u128, iemAImpl_vpmaxsb_u128_fallback;
3760FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmaxsw_u128, iemAImpl_vpmaxsw_u128_fallback;
3761FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmaxsd_u128, iemAImpl_vpmaxsd_u128_fallback;
3762FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpacksswb_u128, iemAImpl_vpacksswb_u128_fallback;
3763FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpackssdw_u128, iemAImpl_vpackssdw_u128_fallback;
3764FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpackuswb_u128, iemAImpl_vpackuswb_u128_fallback;
3765FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpackusdw_u128, iemAImpl_vpackusdw_u128_fallback;
3766FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmullw_u128, iemAImpl_vpmullw_u128_fallback;
3767FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmulld_u128, iemAImpl_vpmulld_u128_fallback;
3768FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmulhw_u128, iemAImpl_vpmulhw_u128_fallback;
3769FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmulhuw_u128, iemAImpl_vpmulhuw_u128_fallback;
3770FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpavgb_u128, iemAImpl_vpavgb_u128_fallback;
3771FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpavgw_u128, iemAImpl_vpavgw_u128_fallback;
3772FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsignb_u128, iemAImpl_vpsignb_u128_fallback;
3773FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsignw_u128, iemAImpl_vpsignw_u128_fallback;
3774FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsignd_u128, iemAImpl_vpsignd_u128_fallback;
3775FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vphaddw_u128, iemAImpl_vphaddw_u128_fallback;
3776FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vphaddd_u128, iemAImpl_vphaddd_u128_fallback;
3777FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vphsubw_u128, iemAImpl_vphsubw_u128_fallback;
3778FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vphsubd_u128, iemAImpl_vphsubd_u128_fallback;
3779FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vphaddsw_u128, iemAImpl_vphaddsw_u128_fallback;
3780FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vphsubsw_u128, iemAImpl_vphsubsw_u128_fallback;
3781FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmaddubsw_u128, iemAImpl_vpmaddubsw_u128_fallback;
3782FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmulhrsw_u128, iemAImpl_vpmulhrsw_u128_fallback;
3783FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsadbw_u128, iemAImpl_vpsadbw_u128_fallback;
3784FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmuldq_u128, iemAImpl_vpmuldq_u128_fallback;
3785FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmuludq_u128, iemAImpl_vpmuludq_u128_fallback;
3786FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsubsb_u128, iemAImpl_vpsubsb_u128_fallback;
3787FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsubsw_u128, iemAImpl_vpsubsw_u128_fallback;
3788FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsubusb_u128, iemAImpl_vpsubusb_u128_fallback;
3789FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsubusw_u128, iemAImpl_vpsubusw_u128_fallback;
3790FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpaddusb_u128, iemAImpl_vpaddusb_u128_fallback;
3791FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpaddusw_u128, iemAImpl_vpaddusw_u128_fallback;
3792FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpaddsb_u128, iemAImpl_vpaddsb_u128_fallback;
3793FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpaddsw_u128, iemAImpl_vpaddsw_u128_fallback;
3794FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsllw_u128, iemAImpl_vpsllw_u128_fallback;
3795FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpslld_u128, iemAImpl_vpslld_u128_fallback;
3796FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsllq_u128, iemAImpl_vpsllq_u128_fallback;
3797FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsraw_u128, iemAImpl_vpsraw_u128_fallback;
3798FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsrad_u128, iemAImpl_vpsrad_u128_fallback;
3799FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsrlw_u128, iemAImpl_vpsrlw_u128_fallback;
3800FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsrld_u128, iemAImpl_vpsrld_u128_fallback;
3801FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsrlq_u128, iemAImpl_vpsrlq_u128_fallback;
3802FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmaddwd_u128, iemAImpl_vpmaddwd_u128_fallback;
3803
3804FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_vpabsb_u128, iemAImpl_vpabsb_u128_fallback;
3805FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_vpabsw_u128, iemAImpl_vpabsd_u128_fallback;
3806FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_vpabsd_u128, iemAImpl_vpabsw_u128_fallback;
3807FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_vphminposuw_u128, iemAImpl_vphminposuw_u128_fallback;
3808
3809FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpshufb_u256, iemAImpl_vpshufb_u256_fallback;
3810FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpand_u256, iemAImpl_vpand_u256_fallback;
3811FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpandn_u256, iemAImpl_vpandn_u256_fallback;
3812FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpor_u256, iemAImpl_vpor_u256_fallback;
3813FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpxor_u256, iemAImpl_vpxor_u256_fallback;
3814FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpcmpeqb_u256, iemAImpl_vpcmpeqb_u256_fallback;
3815FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpcmpeqw_u256, iemAImpl_vpcmpeqw_u256_fallback;
3816FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpcmpeqd_u256, iemAImpl_vpcmpeqd_u256_fallback;
3817FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpcmpeqq_u256, iemAImpl_vpcmpeqq_u256_fallback;
3818FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpcmpgtb_u256, iemAImpl_vpcmpgtb_u256_fallback;
3819FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpcmpgtw_u256, iemAImpl_vpcmpgtw_u256_fallback;
3820FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpcmpgtd_u256, iemAImpl_vpcmpgtd_u256_fallback;
3821FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpcmpgtq_u256, iemAImpl_vpcmpgtq_u256_fallback;
3822FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpaddb_u256, iemAImpl_vpaddb_u256_fallback;
3823FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpaddw_u256, iemAImpl_vpaddw_u256_fallback;
3824FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpaddd_u256, iemAImpl_vpaddd_u256_fallback;
3825FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpaddq_u256, iemAImpl_vpaddq_u256_fallback;
3826FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsubb_u256, iemAImpl_vpsubb_u256_fallback;
3827FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsubw_u256, iemAImpl_vpsubw_u256_fallback;
3828FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsubd_u256, iemAImpl_vpsubd_u256_fallback;
3829FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsubq_u256, iemAImpl_vpsubq_u256_fallback;
3830FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpminub_u256, iemAImpl_vpminub_u256_fallback;
3831FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpminuw_u256, iemAImpl_vpminuw_u256_fallback;
3832FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpminud_u256, iemAImpl_vpminud_u256_fallback;
3833FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpminsb_u256, iemAImpl_vpminsb_u256_fallback;
3834FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpminsw_u256, iemAImpl_vpminsw_u256_fallback;
3835FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpminsd_u256, iemAImpl_vpminsd_u256_fallback;
3836FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmaxub_u256, iemAImpl_vpmaxub_u256_fallback;
3837FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmaxuw_u256, iemAImpl_vpmaxuw_u256_fallback;
3838FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmaxud_u256, iemAImpl_vpmaxud_u256_fallback;
3839FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmaxsb_u256, iemAImpl_vpmaxsb_u256_fallback;
3840FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmaxsw_u256, iemAImpl_vpmaxsw_u256_fallback;
3841FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmaxsd_u256, iemAImpl_vpmaxsd_u256_fallback;
3842FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpacksswb_u256, iemAImpl_vpacksswb_u256_fallback;
3843FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpackssdw_u256, iemAImpl_vpackssdw_u256_fallback;
3844FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpackuswb_u256, iemAImpl_vpackuswb_u256_fallback;
3845FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpackusdw_u256, iemAImpl_vpackusdw_u256_fallback;
3846FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmullw_u256, iemAImpl_vpmullw_u256_fallback;
3847FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmulld_u256, iemAImpl_vpmulld_u256_fallback;
3848FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmulhw_u256, iemAImpl_vpmulhw_u256_fallback;
3849FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmulhuw_u256, iemAImpl_vpmulhuw_u256_fallback;
3850FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpavgb_u256, iemAImpl_vpavgb_u256_fallback;
3851FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpavgw_u256, iemAImpl_vpavgw_u256_fallback;
3852FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsignb_u256, iemAImpl_vpsignb_u256_fallback;
3853FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsignw_u256, iemAImpl_vpsignw_u256_fallback;
3854FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsignd_u256, iemAImpl_vpsignd_u256_fallback;
3855FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vphaddw_u256, iemAImpl_vphaddw_u256_fallback;
3856FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vphaddd_u256, iemAImpl_vphaddd_u256_fallback;
3857FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vphsubw_u256, iemAImpl_vphsubw_u256_fallback;
3858FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vphsubd_u256, iemAImpl_vphsubd_u256_fallback;
3859FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vphaddsw_u256, iemAImpl_vphaddsw_u256_fallback;
3860FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vphsubsw_u256, iemAImpl_vphsubsw_u256_fallback;
3861FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmaddubsw_u256, iemAImpl_vpmaddubsw_u256_fallback;
3862FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmulhrsw_u256, iemAImpl_vpmulhrsw_u256_fallback;
3863FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsadbw_u256, iemAImpl_vpsadbw_u256_fallback;
3864FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmuldq_u256, iemAImpl_vpmuldq_u256_fallback;
3865FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmuludq_u256, iemAImpl_vpmuludq_u256_fallback;
3866FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsubsb_u256, iemAImpl_vpsubsb_u256_fallback;
3867FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsubsw_u256, iemAImpl_vpsubsw_u256_fallback;
3868FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsubusb_u256, iemAImpl_vpsubusb_u256_fallback;
3869FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsubusw_u256, iemAImpl_vpsubusw_u256_fallback;
3870FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpaddusb_u256, iemAImpl_vpaddusb_u256_fallback;
3871FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpaddusw_u256, iemAImpl_vpaddusw_u256_fallback;
3872FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpaddsb_u256, iemAImpl_vpaddsb_u256_fallback;
3873FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpaddsw_u256, iemAImpl_vpaddsw_u256_fallback;
3874FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsllw_u256, iemAImpl_vpsllw_u256_fallback;
3875FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpslld_u256, iemAImpl_vpslld_u256_fallback;
3876FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsllq_u256, iemAImpl_vpsllq_u256_fallback;
3877FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsraw_u256, iemAImpl_vpsraw_u256_fallback;
3878FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsrad_u256, iemAImpl_vpsrad_u256_fallback;
3879FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsrlw_u256, iemAImpl_vpsrlw_u256_fallback;
3880FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsrld_u256, iemAImpl_vpsrld_u256_fallback;
3881FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsrlq_u256, iemAImpl_vpsrlq_u256_fallback;
3882FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmaddwd_u256, iemAImpl_vpmaddwd_u256_fallback;
3883FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpermps_u256, iemAImpl_vpermps_u256_fallback;
3884FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpermd_u256, iemAImpl_vpermd_u256_fallback;
3885
3886FNIEMAIMPLMEDIAOPTF2U256 iemAImpl_vpabsb_u256, iemAImpl_vpabsb_u256_fallback;
3887FNIEMAIMPLMEDIAOPTF2U256 iemAImpl_vpabsw_u256, iemAImpl_vpabsw_u256_fallback;
3888FNIEMAIMPLMEDIAOPTF2U256 iemAImpl_vpabsd_u256, iemAImpl_vpabsd_u256_fallback;
3889/** @} */
3890
3891/** @name Media (SSE/MMX/AVX) operations: lowhalf1 + lowhalf1 -> full1.
3892 * @{ */
3893FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_punpcklbw_u64, iemAImpl_punpcklwd_u64, iemAImpl_punpckldq_u64;
3894FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_punpcklbw_u128, iemAImpl_punpcklwd_u128, iemAImpl_punpckldq_u128, iemAImpl_punpcklqdq_u128;
3895FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpunpcklbw_u128, iemAImpl_vpunpcklbw_u128_fallback,
3896 iemAImpl_vpunpcklwd_u128, iemAImpl_vpunpcklwd_u128_fallback,
3897 iemAImpl_vpunpckldq_u128, iemAImpl_vpunpckldq_u128_fallback,
3898 iemAImpl_vpunpcklqdq_u128, iemAImpl_vpunpcklqdq_u128_fallback,
3899 iemAImpl_vunpcklps_u128, iemAImpl_vunpcklps_u128_fallback,
3900 iemAImpl_vunpcklpd_u128, iemAImpl_vunpcklpd_u128_fallback,
3901 iemAImpl_vunpckhps_u128, iemAImpl_vunpckhps_u128_fallback,
3902 iemAImpl_vunpckhpd_u128, iemAImpl_vunpckhpd_u128_fallback;
3903
3904FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpunpcklbw_u256, iemAImpl_vpunpcklbw_u256_fallback,
3905 iemAImpl_vpunpcklwd_u256, iemAImpl_vpunpcklwd_u256_fallback,
3906 iemAImpl_vpunpckldq_u256, iemAImpl_vpunpckldq_u256_fallback,
3907 iemAImpl_vpunpcklqdq_u256, iemAImpl_vpunpcklqdq_u256_fallback,
3908 iemAImpl_vunpcklps_u256, iemAImpl_vunpcklps_u256_fallback,
3909 iemAImpl_vunpcklpd_u256, iemAImpl_vunpcklpd_u256_fallback,
3910 iemAImpl_vunpckhps_u256, iemAImpl_vunpckhps_u256_fallback,
3911 iemAImpl_vunpckhpd_u256, iemAImpl_vunpckhpd_u256_fallback;
3912/** @} */
3913
3914/** @name Media (SSE/MMX/AVX) operations: hihalf1 + hihalf2 -> full1.
3915 * @{ */
3916FNIEMAIMPLMEDIAOPTF2U64 iemAImpl_punpckhbw_u64, iemAImpl_punpckhwd_u64, iemAImpl_punpckhdq_u64;
3917FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_punpckhbw_u128, iemAImpl_punpckhwd_u128, iemAImpl_punpckhdq_u128, iemAImpl_punpckhqdq_u128;
3918FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpunpckhbw_u128, iemAImpl_vpunpckhbw_u128_fallback,
3919 iemAImpl_vpunpckhwd_u128, iemAImpl_vpunpckhwd_u128_fallback,
3920 iemAImpl_vpunpckhdq_u128, iemAImpl_vpunpckhdq_u128_fallback,
3921 iemAImpl_vpunpckhqdq_u128, iemAImpl_vpunpckhqdq_u128_fallback;
3922FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpunpckhbw_u256, iemAImpl_vpunpckhbw_u256_fallback,
3923 iemAImpl_vpunpckhwd_u256, iemAImpl_vpunpckhwd_u256_fallback,
3924 iemAImpl_vpunpckhdq_u256, iemAImpl_vpunpckhdq_u256_fallback,
3925 iemAImpl_vpunpckhqdq_u256, iemAImpl_vpunpckhqdq_u256_fallback;
3926/** @} */
3927
3928/** @name Media (SSE/MMX/AVX) operation: Packed Shuffle Stuff (evil)
3929 * @{ */
3930typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAPSHUFU128,(PRTUINT128U puDst, PCRTUINT128U puSrc, uint8_t bEvil));
3931typedef FNIEMAIMPLMEDIAPSHUFU128 *PFNIEMAIMPLMEDIAPSHUFU128;
3932typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAPSHUFU256,(PRTUINT256U puDst, PCRTUINT256U puSrc, uint8_t bEvil));
3933typedef FNIEMAIMPLMEDIAPSHUFU256 *PFNIEMAIMPLMEDIAPSHUFU256;
3934IEM_DECL_IMPL_DEF(void, iemAImpl_pshufw_u64,(uint64_t *puDst, uint64_t const *puSrc, uint8_t bEvil));
3935FNIEMAIMPLMEDIAPSHUFU128 iemAImpl_pshufhw_u128, iemAImpl_pshuflw_u128, iemAImpl_pshufd_u128;
3936#ifndef IEM_WITHOUT_ASSEMBLY
3937FNIEMAIMPLMEDIAPSHUFU256 iemAImpl_vpshufhw_u256, iemAImpl_vpshuflw_u256, iemAImpl_vpshufd_u256;
3938#endif
3939FNIEMAIMPLMEDIAPSHUFU256 iemAImpl_vpshufhw_u256_fallback, iemAImpl_vpshuflw_u256_fallback, iemAImpl_vpshufd_u256_fallback;
3940/** @} */
3941
3942/** @name Media (SSE/MMX/AVX) operation: Shift Immediate Stuff (evil)
3943 * @{ */
3944typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAPSHIFTU64,(uint64_t *puDst, uint8_t bShift));
3945typedef FNIEMAIMPLMEDIAPSHIFTU64 *PFNIEMAIMPLMEDIAPSHIFTU64;
3946typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAPSHIFTU128,(PRTUINT128U puDst, uint8_t bShift));
3947typedef FNIEMAIMPLMEDIAPSHIFTU128 *PFNIEMAIMPLMEDIAPSHIFTU128;
3948typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAPSHIFTU256,(PRTUINT256U puDst, uint8_t bShift));
3949typedef FNIEMAIMPLMEDIAPSHIFTU256 *PFNIEMAIMPLMEDIAPSHIFTU256;
3950FNIEMAIMPLMEDIAPSHIFTU64 iemAImpl_psllw_imm_u64, iemAImpl_pslld_imm_u64, iemAImpl_psllq_imm_u64;
3951FNIEMAIMPLMEDIAPSHIFTU64 iemAImpl_psrlw_imm_u64, iemAImpl_psrld_imm_u64, iemAImpl_psrlq_imm_u64;
3952FNIEMAIMPLMEDIAPSHIFTU64 iemAImpl_psraw_imm_u64, iemAImpl_psrad_imm_u64;
3953FNIEMAIMPLMEDIAPSHIFTU128 iemAImpl_psllw_imm_u128, iemAImpl_pslld_imm_u128, iemAImpl_psllq_imm_u128;
3954FNIEMAIMPLMEDIAPSHIFTU128 iemAImpl_psrlw_imm_u128, iemAImpl_psrld_imm_u128, iemAImpl_psrlq_imm_u128;
3955FNIEMAIMPLMEDIAPSHIFTU128 iemAImpl_psraw_imm_u128, iemAImpl_psrad_imm_u128;
3956FNIEMAIMPLMEDIAPSHIFTU128 iemAImpl_pslldq_imm_u128, iemAImpl_psrldq_imm_u128;
3957/** @} */
3958
3959/** @name Media (SSE/MMX/AVX) operation: Move Byte Mask
3960 * @{ */
3961IEM_DECL_IMPL_DEF(void, iemAImpl_maskmovq_u64,(uint64_t *puMem, uint64_t const *puSrc, uint64_t const *puMsk));
3962IEM_DECL_IMPL_DEF(void, iemAImpl_maskmovdqu_u128,(PRTUINT128U puMem, PCRTUINT128U puSrc, PCRTUINT128U puMsk));
3963IEM_DECL_IMPL_DEF(void, iemAImpl_pmovmskb_u64,(uint64_t *pu64Dst, uint64_t const *puSrc));
3964IEM_DECL_IMPL_DEF(void, iemAImpl_pmovmskb_u128,(uint64_t *pu64Dst, PCRTUINT128U puSrc));
3965#ifndef IEM_WITHOUT_ASSEMBLY
3966IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovmskb_u256,(uint64_t *pu64Dst, PCRTUINT256U puSrc));
3967#endif
3968IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovmskb_u256_fallback,(uint64_t *pu64Dst, PCRTUINT256U puSrc));
3969/** @} */
3970
3971/** @name Media (SSE/MMX/AVX) operations: Variable Blend Packed Bytes/R32/R64.
3972 * @{ */
3973typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLBLENDU128,(PRTUINT128U puDst, PCRTUINT128U puSrc, PCRTUINT128U puMask));
3974typedef FNIEMAIMPLBLENDU128 *PFNIEMAIMPLBLENDU128;
3975typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLAVXBLENDU128,(PRTUINT128U puDst, PCRTUINT128U puSrc1, PCRTUINT128U puSrc2, PCRTUINT128U puMask));
3976typedef FNIEMAIMPLAVXBLENDU128 *PFNIEMAIMPLAVXBLENDU128;
3977typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLAVXBLENDU256,(PRTUINT256U puDst, PCRTUINT256U puSrc1, PCRTUINT256U puSrc2, PCRTUINT256U puMask));
3978typedef FNIEMAIMPLAVXBLENDU256 *PFNIEMAIMPLAVXBLENDU256;
3979
3980FNIEMAIMPLBLENDU128 iemAImpl_pblendvb_u128;
3981FNIEMAIMPLBLENDU128 iemAImpl_pblendvb_u128_fallback;
3982FNIEMAIMPLAVXBLENDU128 iemAImpl_vpblendvb_u128;
3983FNIEMAIMPLAVXBLENDU128 iemAImpl_vpblendvb_u128_fallback;
3984FNIEMAIMPLAVXBLENDU256 iemAImpl_vpblendvb_u256;
3985FNIEMAIMPLAVXBLENDU256 iemAImpl_vpblendvb_u256_fallback;
3986
3987FNIEMAIMPLBLENDU128 iemAImpl_blendvps_u128;
3988FNIEMAIMPLBLENDU128 iemAImpl_blendvps_u128_fallback;
3989FNIEMAIMPLAVXBLENDU128 iemAImpl_vblendvps_u128;
3990FNIEMAIMPLAVXBLENDU128 iemAImpl_vblendvps_u128_fallback;
3991FNIEMAIMPLAVXBLENDU256 iemAImpl_vblendvps_u256;
3992FNIEMAIMPLAVXBLENDU256 iemAImpl_vblendvps_u256_fallback;
3993
3994FNIEMAIMPLBLENDU128 iemAImpl_blendvpd_u128;
3995FNIEMAIMPLBLENDU128 iemAImpl_blendvpd_u128_fallback;
3996FNIEMAIMPLAVXBLENDU128 iemAImpl_vblendvpd_u128;
3997FNIEMAIMPLAVXBLENDU128 iemAImpl_vblendvpd_u128_fallback;
3998FNIEMAIMPLAVXBLENDU256 iemAImpl_vblendvpd_u256;
3999FNIEMAIMPLAVXBLENDU256 iemAImpl_vblendvpd_u256_fallback;
4000/** @} */
4001
4002
4003/** @name Media (SSE/MMX/AVX) operation: Sort this later
4004 * @{ */
4005IEM_DECL_IMPL_DEF(void, iemAImpl_pmovsxbw_u128,(PRTUINT128U puDst, uint64_t uSrc));
4006IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxbw_u128,(PRTUINT128U puDst, uint64_t uSrc));
4007IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxbw_u128_fallback,(PRTUINT128U puDst, uint64_t uSrc));
4008IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxbw_u256,(PRTUINT256U puDst, PCRTUINT128U puSrc));
4009IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxbw_u256_fallback,(PRTUINT256U puDst, PCRTUINT128U puSrc));
4010
4011IEM_DECL_IMPL_DEF(void, iemAImpl_pmovsxbd_u128,(PRTUINT128U puDst, uint32_t uSrc));
4012IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxbd_u128,(PRTUINT128U puDst, uint32_t uSrc));
4013IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxbd_u128_fallback,(PRTUINT128U puDst, uint32_t uSrc));
4014IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxbd_u256,(PRTUINT256U puDst, PCRTUINT128U puSrc));
4015IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxbd_u256_fallback,(PRTUINT256U puDst, PCRTUINT128U puSrc));
4016
4017IEM_DECL_IMPL_DEF(void, iemAImpl_pmovsxbq_u128,(PRTUINT128U puDst, uint16_t uSrc));
4018IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxbq_u128,(PRTUINT128U puDst, uint16_t uSrc));
4019IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxbq_u128_fallback,(PRTUINT128U puDst, uint16_t uSrc));
4020IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxbq_u256,(PRTUINT256U puDst, PCRTUINT128U puSrc));
4021IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxbq_u256_fallback,(PRTUINT256U puDst, PCRTUINT128U puSrc));
4022
4023IEM_DECL_IMPL_DEF(void, iemAImpl_pmovsxwd_u128,(PRTUINT128U puDst, uint64_t uSrc));
4024IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxwd_u128,(PRTUINT128U puDst, uint64_t uSrc));
4025IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxwd_u128_fallback,(PRTUINT128U puDst, uint64_t uSrc));
4026IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxwd_u256,(PRTUINT256U puDst, PCRTUINT128U puSrc));
4027IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxwd_u256_fallback,(PRTUINT256U puDst, PCRTUINT128U puSrc));
4028
4029IEM_DECL_IMPL_DEF(void, iemAImpl_pmovsxwq_u128,(PRTUINT128U puDst, uint32_t uSrc));
4030IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxwq_u128,(PRTUINT128U puDst, uint32_t uSrc));
4031IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxwq_u128_fallback,(PRTUINT128U puDst, uint32_t uSrc));
4032IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxwq_u256,(PRTUINT256U puDst, PCRTUINT128U puSrc));
4033IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxwq_u256_fallback,(PRTUINT256U puDst, PCRTUINT128U puSrc));
4034
4035IEM_DECL_IMPL_DEF(void, iemAImpl_pmovsxdq_u128,(PRTUINT128U puDst, uint64_t uSrc));
4036IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxdq_u128,(PRTUINT128U puDst, uint64_t uSrc));
4037IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxdq_u128_fallback,(PRTUINT128U puDst, uint64_t uSrc));
4038IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxdq_u256,(PRTUINT256U puDst, PCRTUINT128U puSrc));
4039IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxdq_u256_fallback,(PRTUINT256U puDst, PCRTUINT128U puSrc));
4040
4041IEM_DECL_IMPL_DEF(void, iemAImpl_pmovzxbw_u128,(PRTUINT128U puDst, uint64_t uSrc));
4042IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxbw_u128,(PRTUINT128U puDst, uint64_t uSrc));
4043IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxbw_u128_fallback,(PRTUINT128U puDst, uint64_t uSrc));
4044IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxbw_u256,(PRTUINT256U puDst, PCRTUINT128U puSrc));
4045IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxbw_u256_fallback,(PRTUINT256U puDst, PCRTUINT128U puSrc));
4046
4047IEM_DECL_IMPL_DEF(void, iemAImpl_pmovzxbd_u128,(PRTUINT128U puDst, uint32_t uSrc));
4048IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxbd_u128,(PRTUINT128U puDst, uint32_t uSrc));
4049IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxbd_u128_fallback,(PRTUINT128U puDst, uint32_t uSrc));
4050IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxbd_u256,(PRTUINT256U puDst, PCRTUINT128U puSrc));
4051IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxbd_u256_fallback,(PRTUINT256U puDst, PCRTUINT128U puSrc));
4052
4053IEM_DECL_IMPL_DEF(void, iemAImpl_pmovzxbq_u128,(PRTUINT128U puDst, uint16_t uSrc));
4054IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxbq_u128,(PRTUINT128U puDst, uint16_t uSrc));
4055IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxbq_u128_fallback,(PRTUINT128U puDst, uint16_t uSrc));
4056IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxbq_u256,(PRTUINT256U puDst, PCRTUINT128U puSrc));
4057IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxbq_u256_fallback,(PRTUINT256U puDst, PCRTUINT128U puSrc));
4058
4059IEM_DECL_IMPL_DEF(void, iemAImpl_pmovzxwd_u128,(PRTUINT128U puDst, uint64_t uSrc));
4060IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxwd_u128,(PRTUINT128U puDst, uint64_t uSrc));
4061IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxwd_u128_fallback,(PRTUINT128U puDst, uint64_t uSrc));
4062IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxwd_u256,(PRTUINT256U puDst, PCRTUINT128U puSrc));
4063IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxwd_u256_fallback,(PRTUINT256U puDst, PCRTUINT128U puSrc));
4064
4065IEM_DECL_IMPL_DEF(void, iemAImpl_pmovzxwq_u128,(PRTUINT128U puDst, uint32_t uSrc));
4066IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxwq_u128,(PRTUINT128U puDst, uint32_t uSrc));
4067IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxwq_u128_fallback,(PRTUINT128U puDst, uint32_t uSrc));
4068IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxwq_u256,(PRTUINT256U puDst, PCRTUINT128U puSrc));
4069IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxwq_u256_fallback,(PRTUINT256U puDst, PCRTUINT128U puSrc));
4070
4071IEM_DECL_IMPL_DEF(void, iemAImpl_pmovzxdq_u128,(PRTUINT128U puDst, uint64_t uSrc));
4072IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxdq_u128,(PRTUINT128U puDst, uint64_t uSrc));
4073IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxdq_u128_fallback,(PRTUINT128U puDst, uint64_t uSrc));
4074IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxdq_u256,(PRTUINT256U puDst, PCRTUINT128U puSrc));
4075IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxdq_u256_fallback,(PRTUINT256U puDst, PCRTUINT128U puSrc));
4076
4077IEM_DECL_IMPL_DEF(void, iemAImpl_shufpd_u128,(PRTUINT128U puDst, PCRTUINT128U puSrc, uint8_t bEvil));
4078IEM_DECL_IMPL_DEF(void, iemAImpl_vshufpd_u128,(PRTUINT128U puDst, PCRTUINT128U puSrc1, PCRTUINT128U puSrc2, uint8_t bEvil));
4079IEM_DECL_IMPL_DEF(void, iemAImpl_vshufpd_u128_fallback,(PRTUINT128U puDst, PCRTUINT128U puSrc1, PCRTUINT128U puSrc2, uint8_t bEvil));
4080IEM_DECL_IMPL_DEF(void, iemAImpl_vshufpd_u256,(PRTUINT256U puDst, PCRTUINT256U puSrc1, PCRTUINT256U puSrc2, uint8_t bEvil));
4081IEM_DECL_IMPL_DEF(void, iemAImpl_vshufpd_u256_fallback,(PRTUINT256U puDst, PCRTUINT256U puSrc1, PCRTUINT256U puSrc2, uint8_t bEvil));
4082
4083IEM_DECL_IMPL_DEF(void, iemAImpl_shufps_u128,(PRTUINT128U puDst, PCRTUINT128U puSrc, uint8_t bEvil));
4084IEM_DECL_IMPL_DEF(void, iemAImpl_vshufps_u128,(PRTUINT128U puDst, PCRTUINT128U puSrc1, PCRTUINT128U puSrc2, uint8_t bEvil));
4085IEM_DECL_IMPL_DEF(void, iemAImpl_vshufps_u128_fallback,(PRTUINT128U puDst, PCRTUINT128U puSrc1, PCRTUINT128U puSrc2, uint8_t bEvil));
4086IEM_DECL_IMPL_DEF(void, iemAImpl_vshufps_u256,(PRTUINT256U puDst, PCRTUINT256U puSrc1, PCRTUINT256U puSrc2, uint8_t bEvil));
4087IEM_DECL_IMPL_DEF(void, iemAImpl_vshufps_u256_fallback,(PRTUINT256U puDst, PCRTUINT256U puSrc1, PCRTUINT256U puSrc2, uint8_t bEvil));
4088
4089IEM_DECL_IMPL_DEF(void, iemAImpl_palignr_u64,(uint64_t *pu64Dst, uint64_t u64Src, uint8_t bEvil));
4090IEM_DECL_IMPL_DEF(void, iemAImpl_palignr_u64_fallback,(uint64_t *pu64Dst, uint64_t u64Src, uint8_t bEvil));
4091
4092IEM_DECL_IMPL_DEF(void, iemAImpl_movmskps_u128,(uint8_t *pu8Dst, PCRTUINT128U puSrc));
4093IEM_DECL_IMPL_DEF(void, iemAImpl_vmovmskps_u128,(uint8_t *pu8Dst, PCRTUINT128U puSrc));
4094IEM_DECL_IMPL_DEF(void, iemAImpl_vmovmskps_u128_fallback,(uint8_t *pu8Dst, PCRTUINT128U puSrc));
4095IEM_DECL_IMPL_DEF(void, iemAImpl_vmovmskps_u256,(uint8_t *pu8Dst, PCRTUINT256U puSrc));
4096IEM_DECL_IMPL_DEF(void, iemAImpl_vmovmskps_u256_fallback,(uint8_t *pu8Dst, PCRTUINT256U puSrc));
4097
4098IEM_DECL_IMPL_DEF(void, iemAImpl_movmskpd_u128,(uint8_t *pu8Dst, PCRTUINT128U puSrc));
4099IEM_DECL_IMPL_DEF(void, iemAImpl_vmovmskpd_u128,(uint8_t *pu8Dst, PCRTUINT128U puSrc));
4100IEM_DECL_IMPL_DEF(void, iemAImpl_vmovmskpd_u128_fallback,(uint8_t *pu8Dst, PCRTUINT128U puSrc));
4101IEM_DECL_IMPL_DEF(void, iemAImpl_vmovmskpd_u256,(uint8_t *pu8Dst, PCRTUINT256U puSrc));
4102IEM_DECL_IMPL_DEF(void, iemAImpl_vmovmskpd_u256_fallback,(uint8_t *pu8Dst, PCRTUINT256U puSrc));
4103
4104
4105typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAOPTF2U128IMM8,(PRTUINT128U puDst, PCRTUINT128U puSrc, uint8_t bEvil));
4106typedef FNIEMAIMPLMEDIAOPTF2U128IMM8 *PFNIEMAIMPLMEDIAOPTF2U128IMM8;
4107typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAOPTF2U256IMM8,(PRTUINT256U puDst, PCRTUINT256U puSrc, uint8_t bEvil));
4108typedef FNIEMAIMPLMEDIAOPTF2U256IMM8 *PFNIEMAIMPLMEDIAOPTF2U256IMM8;
4109typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAOPTF3U128IMM8,(PRTUINT128U puDst, PCRTUINT128U puSrc1, PCRTUINT128U puSrc2, uint8_t bEvil));
4110typedef FNIEMAIMPLMEDIAOPTF3U128IMM8 *PFNIEMAIMPLMEDIAOPTF3U128IMM8;
4111typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAOPTF3U256IMM8,(PRTUINT256U puDst, PCRTUINT256U puSrc1, PCRTUINT256U puSrc2, uint8_t bEvil));
4112typedef FNIEMAIMPLMEDIAOPTF3U256IMM8 *PFNIEMAIMPLMEDIAOPTF3U256IMM8;
4113
4114FNIEMAIMPLMEDIAOPTF2U128IMM8 iemAImpl_palignr_u128, iemAImpl_palignr_u128_fallback;
4115FNIEMAIMPLMEDIAOPTF2U128IMM8 iemAImpl_pblendw_u128, iemAImpl_pblendw_u128_fallback;
4116FNIEMAIMPLMEDIAOPTF2U128IMM8 iemAImpl_blendps_u128, iemAImpl_blendps_u128_fallback;
4117FNIEMAIMPLMEDIAOPTF2U128IMM8 iemAImpl_blendpd_u128, iemAImpl_blendpd_u128_fallback;
4118
4119FNIEMAIMPLMEDIAOPTF3U128IMM8 iemAImpl_vpalignr_u128, iemAImpl_vpalignr_u128_fallback;
4120FNIEMAIMPLMEDIAOPTF3U128IMM8 iemAImpl_vpblendw_u128, iemAImpl_vpblendw_u128_fallback;
4121FNIEMAIMPLMEDIAOPTF3U128IMM8 iemAImpl_vpblendd_u128, iemAImpl_vpblendd_u128_fallback;
4122FNIEMAIMPLMEDIAOPTF3U128IMM8 iemAImpl_vblendps_u128, iemAImpl_vblendps_u128_fallback;
4123FNIEMAIMPLMEDIAOPTF3U128IMM8 iemAImpl_vblendpd_u128, iemAImpl_vblendpd_u128_fallback;
4124
4125FNIEMAIMPLMEDIAOPTF3U256IMM8 iemAImpl_vpalignr_u256, iemAImpl_vpalignr_u256_fallback;
4126FNIEMAIMPLMEDIAOPTF3U256IMM8 iemAImpl_vpblendw_u256, iemAImpl_vpblendw_u256_fallback;
4127FNIEMAIMPLMEDIAOPTF3U256IMM8 iemAImpl_vpblendd_u256, iemAImpl_vpblendd_u256_fallback;
4128FNIEMAIMPLMEDIAOPTF3U256IMM8 iemAImpl_vblendps_u256, iemAImpl_vblendps_u256_fallback;
4129FNIEMAIMPLMEDIAOPTF3U256IMM8 iemAImpl_vblendpd_u256, iemAImpl_vblendpd_u256_fallback;
4130FNIEMAIMPLMEDIAOPTF3U256IMM8 iemAImpl_vperm2i128_u256, iemAImpl_vperm2i128_u256_fallback;
4131FNIEMAIMPLMEDIAOPTF3U256IMM8 iemAImpl_vperm2f128_u256, iemAImpl_vperm2f128_u256_fallback;
4132
4133FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_aesimc_u128, iemAImpl_aesimc_u128_fallback;
4134FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_aesenc_u128, iemAImpl_aesenc_u128_fallback;
4135FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_aesenclast_u128, iemAImpl_aesenclast_u128_fallback;
4136FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_aesdec_u128, iemAImpl_aesdec_u128_fallback;
4137FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_aesdeclast_u128, iemAImpl_aesdeclast_u128_fallback;
4138
4139FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_vaesimc_u128, iemAImpl_vaesimc_u128_fallback;
4140FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vaesenc_u128, iemAImpl_vaesenc_u128_fallback;
4141FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vaesenclast_u128, iemAImpl_vaesenclast_u128_fallback;
4142FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vaesdec_u128, iemAImpl_vaesdec_u128_fallback;
4143FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vaesdeclast_u128, iemAImpl_vaesdeclast_u128_fallback;
4144
4145FNIEMAIMPLMEDIAOPTF2U128IMM8 iemAImpl_aeskeygenassist_u128, iemAImpl_aeskeygenassist_u128_fallback;
4146
4147FNIEMAIMPLMEDIAOPTF2U128IMM8 iemAImpl_vaeskeygenassist_u128, iemAImpl_vaeskeygenassist_u128_fallback;
4148
4149FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_sha1nexte_u128, iemAImpl_sha1nexte_u128_fallback;
4150FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_sha1msg1_u128, iemAImpl_sha1msg1_u128_fallback;
4151FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_sha1msg2_u128, iemAImpl_sha1msg2_u128_fallback;
4152FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_sha256msg1_u128, iemAImpl_sha256msg1_u128_fallback;
4153FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_sha256msg2_u128, iemAImpl_sha256msg2_u128_fallback;
4154FNIEMAIMPLMEDIAOPTF2U128IMM8 iemAImpl_sha1rnds4_u128, iemAImpl_sha1rnds4_u128_fallback;
4155IEM_DECL_IMPL_DEF(void, iemAImpl_sha256rnds2_u128,(PRTUINT128U puDst, PCRTUINT128U puSrc, PCRTUINT128U puXmm0Constants));
4156IEM_DECL_IMPL_DEF(void, iemAImpl_sha256rnds2_u128_fallback,(PRTUINT128U puDst, PCRTUINT128U puSrc, PCRTUINT128U puXmm0Constants));
4157
4158FNIEMAIMPLMEDIAOPTF2U256IMM8 iemAImpl_vpermq_u256, iemAImpl_vpermq_u256_fallback;
4159FNIEMAIMPLMEDIAOPTF2U256IMM8 iemAImpl_vpermpd_u256, iemAImpl_vpermpd_u256_fallback;
4160
4161typedef struct IEMPCMPISTRXSRC
4162{
4163 RTUINT128U uSrc1;
4164 RTUINT128U uSrc2;
4165} IEMPCMPISTRXSRC;
4166typedef IEMPCMPISTRXSRC *PIEMPCMPISTRXSRC;
4167typedef const IEMPCMPISTRXSRC *PCIEMPCMPISTRXSRC;
4168
4169typedef struct IEMPCMPESTRXSRC
4170{
4171 RTUINT128U uSrc1;
4172 RTUINT128U uSrc2;
4173 uint64_t u64Rax;
4174 uint64_t u64Rdx;
4175} IEMPCMPESTRXSRC;
4176typedef IEMPCMPESTRXSRC *PIEMPCMPESTRXSRC;
4177typedef const IEMPCMPESTRXSRC *PCIEMPCMPESTRXSRC;
4178
4179typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLPCMPISTRIU128IMM8,(uint32_t *pEFlags, PCRTUINT128U pSrc1, PCRTUINT128U pSrc2, uint8_t bEvil));
4180typedef FNIEMAIMPLPCMPISTRIU128IMM8 *PFNIEMAIMPLPCMPISTRIU128IMM8;
4181typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLPCMPESTRIU128IMM8,(uint32_t *pu32Ecx, uint32_t *pEFlags, PCIEMPCMPESTRXSRC pSrc, uint8_t bEvil));
4182typedef FNIEMAIMPLPCMPESTRIU128IMM8 *PFNIEMAIMPLPCMPESTRIU128IMM8;
4183
4184typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLPCMPISTRMU128IMM8,(PRTUINT128U puDst, uint32_t *pEFlags, PCIEMPCMPISTRXSRC pSrc, uint8_t bEvil));
4185typedef FNIEMAIMPLPCMPISTRMU128IMM8 *PFNIEMAIMPLPCMPISTRMU128IMM8;
4186typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLPCMPESTRMU128IMM8,(PRTUINT128U puDst, uint32_t *pEFlags, PCIEMPCMPESTRXSRC pSrc, uint8_t bEvil));
4187typedef FNIEMAIMPLPCMPESTRMU128IMM8 *PFNIEMAIMPLPCMPESTRMU128IMM8;
4188
4189FNIEMAIMPLPCMPISTRIU128IMM8 iemAImpl_pcmpistri_u128, iemAImpl_pcmpistri_u128_fallback;
4190FNIEMAIMPLPCMPESTRIU128IMM8 iemAImpl_pcmpestri_u128, iemAImpl_pcmpestri_u128_fallback;
4191FNIEMAIMPLPCMPISTRMU128IMM8 iemAImpl_pcmpistrm_u128, iemAImpl_pcmpistrm_u128_fallback;
4192FNIEMAIMPLPCMPESTRMU128IMM8 iemAImpl_pcmpestrm_u128, iemAImpl_pcmpestrm_u128_fallback;
4193FNIEMAIMPLPCMPISTRIU128IMM8 iemAImpl_vpcmpistri_u128, iemAImpl_vpcmpistri_u128_fallback;
4194FNIEMAIMPLPCMPESTRIU128IMM8 iemAImpl_vpcmpestri_u128, iemAImpl_vpcmpestri_u128_fallback;
4195FNIEMAIMPLPCMPISTRMU128IMM8 iemAImpl_vpcmpistrm_u128, iemAImpl_vpcmpistrm_u128_fallback;
4196FNIEMAIMPLPCMPESTRMU128IMM8 iemAImpl_vpcmpestrm_u128, iemAImpl_vpcmpestrm_u128_fallback;
4197
4198
4199FNIEMAIMPLMEDIAOPTF2U128IMM8 iemAImpl_pclmulqdq_u128, iemAImpl_pclmulqdq_u128_fallback;
4200FNIEMAIMPLMEDIAOPTF3U128IMM8 iemAImpl_vpclmulqdq_u128, iemAImpl_vpclmulqdq_u128_fallback;
4201
4202FNIEMAIMPLMEDIAOPTF2U128IMM8 iemAImpl_mpsadbw_u128, iemAImpl_mpsadbw_u128_fallback;
4203FNIEMAIMPLMEDIAOPTF3U128IMM8 iemAImpl_vmpsadbw_u128, iemAImpl_vmpsadbw_u128_fallback;
4204FNIEMAIMPLMEDIAOPTF3U256IMM8 iemAImpl_vmpsadbw_u256, iemAImpl_vmpsadbw_u256_fallback;
4205
4206FNIEMAIMPLMEDIAPSHUFU128 iemAImpl_vpsllw_imm_u128, iemAImpl_vpsllw_imm_u128_fallback;
4207FNIEMAIMPLMEDIAPSHUFU256 iemAImpl_vpsllw_imm_u256, iemAImpl_vpsllw_imm_u256_fallback;
4208FNIEMAIMPLMEDIAPSHUFU128 iemAImpl_vpslld_imm_u128, iemAImpl_vpslld_imm_u128_fallback;
4209FNIEMAIMPLMEDIAPSHUFU256 iemAImpl_vpslld_imm_u256, iemAImpl_vpslld_imm_u256_fallback;
4210FNIEMAIMPLMEDIAPSHUFU128 iemAImpl_vpsllq_imm_u128, iemAImpl_vpsllq_imm_u128_fallback;
4211FNIEMAIMPLMEDIAPSHUFU256 iemAImpl_vpsllq_imm_u256, iemAImpl_vpsllq_imm_u256_fallback;
4212IEM_DECL_IMPL_DEF(void, iemAImpl_vpslldq_imm_u128,(PRTUINT128U puDst, PCRTUINT128U puSrc, uint8_t uShift));
4213IEM_DECL_IMPL_DEF(void, iemAImpl_vpslldq_imm_u128_fallback,(PRTUINT128U puDst, PCRTUINT128U puSrc, uint8_t uShift));
4214IEM_DECL_IMPL_DEF(void, iemAImpl_vpslldq_imm_u256,(PRTUINT256U puDst, PCRTUINT256U puSrc, uint8_t uShift));
4215IEM_DECL_IMPL_DEF(void, iemAImpl_vpslldq_imm_u256_fallback,(PRTUINT256U puDst, PCRTUINT256U puSrc, uint8_t uShift));
4216
4217FNIEMAIMPLMEDIAPSHUFU128 iemAImpl_vpsraw_imm_u128, iemAImpl_vpsraw_imm_u128_fallback;
4218FNIEMAIMPLMEDIAPSHUFU256 iemAImpl_vpsraw_imm_u256, iemAImpl_vpsraw_imm_u256_fallback;
4219FNIEMAIMPLMEDIAPSHUFU128 iemAImpl_vpsrad_imm_u128, iemAImpl_vpsrad_imm_u128_fallback;
4220FNIEMAIMPLMEDIAPSHUFU256 iemAImpl_vpsrad_imm_u256, iemAImpl_vpsrad_imm_u256_fallback;
4221
4222FNIEMAIMPLMEDIAPSHUFU128 iemAImpl_vpsrlw_imm_u128, iemAImpl_vpsrlw_imm_u128_fallback;
4223FNIEMAIMPLMEDIAPSHUFU256 iemAImpl_vpsrlw_imm_u256, iemAImpl_vpsrlw_imm_u256_fallback;
4224FNIEMAIMPLMEDIAPSHUFU128 iemAImpl_vpsrld_imm_u128, iemAImpl_vpsrld_imm_u128_fallback;
4225FNIEMAIMPLMEDIAPSHUFU256 iemAImpl_vpsrld_imm_u256, iemAImpl_vpsrld_imm_u256_fallback;
4226FNIEMAIMPLMEDIAPSHUFU128 iemAImpl_vpsrlq_imm_u128, iemAImpl_vpsrlq_imm_u128_fallback;
4227FNIEMAIMPLMEDIAPSHUFU256 iemAImpl_vpsrlq_imm_u256, iemAImpl_vpsrlq_imm_u256_fallback;
4228IEM_DECL_IMPL_DEF(void, iemAImpl_vpsrldq_imm_u128,(PRTUINT128U puDst, PCRTUINT128U puSrc, uint8_t uShift));
4229IEM_DECL_IMPL_DEF(void, iemAImpl_vpsrldq_imm_u128_fallback,(PRTUINT128U puDst, PCRTUINT128U puSrc, uint8_t uShift));
4230IEM_DECL_IMPL_DEF(void, iemAImpl_vpsrldq_imm_u256,(PRTUINT256U puDst, PCRTUINT256U puSrc, uint8_t uShift));
4231IEM_DECL_IMPL_DEF(void, iemAImpl_vpsrldq_imm_u256_fallback,(PRTUINT256U puDst, PCRTUINT256U puSrc, uint8_t uShift));
4232
4233FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpermilps_u128, iemAImpl_vpermilps_u128_fallback;
4234FNIEMAIMPLMEDIAOPTF2U128IMM8 iemAImpl_vpermilps_imm_u128, iemAImpl_vpermilps_imm_u128_fallback;
4235FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpermilps_u256, iemAImpl_vpermilps_u256_fallback;
4236FNIEMAIMPLMEDIAOPTF2U256IMM8 iemAImpl_vpermilps_imm_u256, iemAImpl_vpermilps_imm_u256_fallback;
4237
4238FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpermilpd_u128, iemAImpl_vpermilpd_u128_fallback;
4239FNIEMAIMPLMEDIAOPTF2U128IMM8 iemAImpl_vpermilpd_imm_u128, iemAImpl_vpermilpd_imm_u128_fallback;
4240FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpermilpd_u256, iemAImpl_vpermilpd_u256_fallback;
4241FNIEMAIMPLMEDIAOPTF2U256IMM8 iemAImpl_vpermilpd_imm_u256, iemAImpl_vpermilpd_imm_u256_fallback;
4242
4243FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsllvd_u128, iemAImpl_vpsllvd_u128_fallback;
4244FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsllvd_u256, iemAImpl_vpsllvd_u256_fallback;
4245FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsllvq_u128, iemAImpl_vpsllvq_u128_fallback;
4246FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsllvq_u256, iemAImpl_vpsllvq_u256_fallback;
4247FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsravd_u128, iemAImpl_vpsravd_u128_fallback;
4248FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsravd_u256, iemAImpl_vpsravd_u256_fallback;
4249FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsrlvd_u128, iemAImpl_vpsrlvd_u128_fallback;
4250FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsrlvd_u256, iemAImpl_vpsrlvd_u256_fallback;
4251FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsrlvq_u128, iemAImpl_vpsrlvq_u128_fallback;
4252FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsrlvq_u256, iemAImpl_vpsrlvq_u256_fallback;
4253/** @} */
4254
4255/** @name Media Odds and Ends
4256 * @{ */
4257typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLCR32U8,(uint32_t *puDst, uint8_t uSrc));
4258typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLCR32U16,(uint32_t *puDst, uint16_t uSrc));
4259typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLCR32U32,(uint32_t *puDst, uint32_t uSrc));
4260typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLCR32U64,(uint32_t *puDst, uint64_t uSrc));
4261FNIEMAIMPLCR32U8 iemAImpl_crc32_u8, iemAImpl_crc32_u8_fallback;
4262FNIEMAIMPLCR32U16 iemAImpl_crc32_u16, iemAImpl_crc32_u16_fallback;
4263FNIEMAIMPLCR32U32 iemAImpl_crc32_u32, iemAImpl_crc32_u32_fallback;
4264FNIEMAIMPLCR32U64 iemAImpl_crc32_u64, iemAImpl_crc32_u64_fallback;
4265
4266typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLF2EFL128,(PCRTUINT128U puSrc1, PCRTUINT128U puSrc2, uint32_t *pEFlags));
4267typedef FNIEMAIMPLF2EFL128 *PFNIEMAIMPLF2EFL128;
4268typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLF2EFL256,(PCRTUINT256U puSrc1, PCRTUINT256U puSrc2, uint32_t *pEFlags));
4269typedef FNIEMAIMPLF2EFL256 *PFNIEMAIMPLF2EFL256;
4270FNIEMAIMPLF2EFL128 iemAImpl_ptest_u128;
4271FNIEMAIMPLF2EFL256 iemAImpl_vptest_u256, iemAImpl_vptest_u256_fallback;
4272FNIEMAIMPLF2EFL128 iemAImpl_vtestps_u128, iemAImpl_vtestps_u128_fallback;
4273FNIEMAIMPLF2EFL256 iemAImpl_vtestps_u256, iemAImpl_vtestps_u256_fallback;
4274FNIEMAIMPLF2EFL128 iemAImpl_vtestpd_u128, iemAImpl_vtestpd_u128_fallback;
4275FNIEMAIMPLF2EFL256 iemAImpl_vtestpd_u256, iemAImpl_vtestpd_u256_fallback;
4276
4277typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSSEF2I32U64,(uint32_t uMxCsrIn, int32_t *pi32Dst, const uint64_t *pu64Src)); /* pu64Src is a double precision floating point. */
4278typedef FNIEMAIMPLSSEF2I32U64 *PFNIEMAIMPLSSEF2I32U64;
4279typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSSEF2I64U64,(uint32_t uMxCsrIn, int64_t *pi64Dst, const uint64_t *pu64Src)); /* pu64Src is a double precision floating point. */
4280typedef FNIEMAIMPLSSEF2I64U64 *PFNIEMAIMPLSSEF2I64U64;
4281typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSSEF2I32U32,(uint32_t uMxCsrIn, int32_t *pi32Dst, const uint32_t *pu32Src)); /* pu32Src is a single precision floating point. */
4282typedef FNIEMAIMPLSSEF2I32U32 *PFNIEMAIMPLSSEF2I32U32;
4283typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSSEF2I64U32,(uint32_t uMxCsrIn, int64_t *pi64Dst, const uint32_t *pu32Src)); /* pu32Src is a single precision floating point. */
4284typedef FNIEMAIMPLSSEF2I64U32 *PFNIEMAIMPLSSEF2I64U32;
4285typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSSEF2I32R32,(uint32_t uMxCsrIn, int32_t *pi32Dst, PCRTFLOAT32U pr32Src));
4286typedef FNIEMAIMPLSSEF2I32R32 *PFNIEMAIMPLSSEF2I32R32;
4287typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSSEF2I64R32,(uint32_t uMxCsrIn, int64_t *pi64Dst, PCRTFLOAT32U pr32Src));
4288typedef FNIEMAIMPLSSEF2I64R32 *PFNIEMAIMPLSSEF2I64R32;
4289typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSSEF2I32R64,(uint32_t uMxCsrIn, int32_t *pi32Dst, PCRTFLOAT64U pr64Src));
4290typedef FNIEMAIMPLSSEF2I32R64 *PFNIEMAIMPLSSEF2I32R64;
4291typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSSEF2I64R64,(uint32_t uMxCsrIn, int64_t *pi64Dst, PCRTFLOAT64U pr64Src));
4292typedef FNIEMAIMPLSSEF2I64R64 *PFNIEMAIMPLSSEF2I64R64;
4293
4294FNIEMAIMPLSSEF2I32U64 iemAImpl_cvttsd2si_i32_r64;
4295FNIEMAIMPLSSEF2I32U64 iemAImpl_cvtsd2si_i32_r64;
4296
4297FNIEMAIMPLSSEF2I64U64 iemAImpl_cvttsd2si_i64_r64;
4298FNIEMAIMPLSSEF2I64U64 iemAImpl_cvtsd2si_i64_r64;
4299
4300FNIEMAIMPLSSEF2I32U32 iemAImpl_cvttss2si_i32_r32;
4301FNIEMAIMPLSSEF2I32U32 iemAImpl_cvtss2si_i32_r32;
4302
4303FNIEMAIMPLSSEF2I64U32 iemAImpl_cvttss2si_i64_r32;
4304FNIEMAIMPLSSEF2I64U32 iemAImpl_cvtss2si_i64_r32;
4305
4306FNIEMAIMPLSSEF2I32R32 iemAImpl_vcvttss2si_i32_r32, iemAImpl_vcvttss2si_i32_r32_fallback;
4307FNIEMAIMPLSSEF2I64R32 iemAImpl_vcvttss2si_i64_r32, iemAImpl_vcvttss2si_i64_r32_fallback;
4308FNIEMAIMPLSSEF2I32R32 iemAImpl_vcvtss2si_i32_r32, iemAImpl_vcvtss2si_i32_r32_fallback;
4309FNIEMAIMPLSSEF2I64R32 iemAImpl_vcvtss2si_i64_r32, iemAImpl_vcvtss2si_i64_r32_fallback;
4310
4311FNIEMAIMPLSSEF2I32R64 iemAImpl_vcvttss2si_i32_r64, iemAImpl_vcvttss2si_i32_r64_fallback;
4312FNIEMAIMPLSSEF2I64R64 iemAImpl_vcvttss2si_i64_r64, iemAImpl_vcvttss2si_i64_r64_fallback;
4313FNIEMAIMPLSSEF2I32R64 iemAImpl_vcvtss2si_i32_r64, iemAImpl_vcvtss2si_i32_r64_fallback;
4314FNIEMAIMPLSSEF2I64R64 iemAImpl_vcvtss2si_i64_r64, iemAImpl_vcvtss2si_i64_r64_fallback;
4315
4316FNIEMAIMPLSSEF2I32R32 iemAImpl_vcvttsd2si_i32_r32, iemAImpl_vcvttsd2si_i32_r32_fallback;
4317FNIEMAIMPLSSEF2I64R32 iemAImpl_vcvttsd2si_i64_r32, iemAImpl_vcvttsd2si_i64_r32_fallback;
4318FNIEMAIMPLSSEF2I32R32 iemAImpl_vcvtsd2si_i32_r32, iemAImpl_vcvtsd2si_i32_r32_fallback;
4319FNIEMAIMPLSSEF2I64R32 iemAImpl_vcvtsd2si_i64_r32, iemAImpl_vcvtsd2si_i64_r32_fallback;
4320
4321FNIEMAIMPLSSEF2I32R64 iemAImpl_vcvttsd2si_i32_r64, iemAImpl_vcvttsd2si_i32_r64_fallback;
4322FNIEMAIMPLSSEF2I64R64 iemAImpl_vcvttsd2si_i64_r64, iemAImpl_vcvttsd2si_i64_r64_fallback;
4323FNIEMAIMPLSSEF2I32R64 iemAImpl_vcvtsd2si_i32_r64, iemAImpl_vcvtsd2si_i32_r64_fallback;
4324FNIEMAIMPLSSEF2I64R64 iemAImpl_vcvtsd2si_i64_r64, iemAImpl_vcvtsd2si_i64_r64_fallback;
4325
4326typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSSEF2R32I32,(uint32_t uMxCsrIn, PRTFLOAT32U pr32Dst, const int32_t *pi32Src));
4327typedef FNIEMAIMPLSSEF2R32I32 *PFNIEMAIMPLSSEF2R32I32;
4328typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSSEF2R32I64,(uint32_t uMxCsrIn, PRTFLOAT32U pr32Dst, const int64_t *pi64Src));
4329typedef FNIEMAIMPLSSEF2R32I64 *PFNIEMAIMPLSSEF2R32I64;
4330
4331FNIEMAIMPLSSEF2R32I32 iemAImpl_cvtsi2ss_r32_i32;
4332FNIEMAIMPLSSEF2R32I64 iemAImpl_cvtsi2ss_r32_i64;
4333
4334typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLAVXF3XMMI32,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86XMMREG puSrc, const int32_t *pi32Src));
4335typedef FNIEMAIMPLAVXF3XMMI32 *PFNIEMAIMPLAVXF3XMMI32;
4336typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLAVXF3XMMI64,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86XMMREG puSrc, const int64_t *pi64Src));
4337typedef FNIEMAIMPLAVXF3XMMI64 *PFNIEMAIMPLAVXF3XMMI64;
4338
4339FNIEMAIMPLAVXF3XMMI32 iemAImpl_vcvtsi2ss_u128_i32, iemAImpl_vcvtsi2ss_u128_i32_fallback;
4340FNIEMAIMPLAVXF3XMMI64 iemAImpl_vcvtsi2ss_u128_i64, iemAImpl_vcvtsi2ss_u128_i64_fallback;
4341
4342
4343typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSSEF2R64I32,(uint32_t uMxCsrIn, PRTFLOAT64U pr64Dst, const int32_t *pi32Src));
4344typedef FNIEMAIMPLSSEF2R64I32 *PFNIEMAIMPLSSEF2R64I32;
4345typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSSEF2R64I64,(uint32_t uMxCsrIn, PRTFLOAT64U pr64Dst, const int64_t *pi64Src));
4346typedef FNIEMAIMPLSSEF2R64I64 *PFNIEMAIMPLSSEF2R64I64;
4347
4348FNIEMAIMPLSSEF2R64I32 iemAImpl_cvtsi2sd_r64_i32;
4349FNIEMAIMPLSSEF2R64I64 iemAImpl_cvtsi2sd_r64_i64;
4350
4351FNIEMAIMPLAVXF3XMMI32 iemAImpl_vcvtsi2sd_u128_i32, iemAImpl_vcvtsi2sd_u128_i32_fallback;
4352FNIEMAIMPLAVXF3XMMI64 iemAImpl_vcvtsi2sd_u128_i64, iemAImpl_vcvtsi2sd_u128_i64_fallback;
4353
4354IEM_DECL_IMPL_DEF(uint32_t, iemAImpl_vcvtps2pd_u128_u64,(uint32_t uMxCsrIn, PX86XMMREG puDst, const uint64_t *pu64Src)); /* Actually two single precision floating point values. */
4355IEM_DECL_IMPL_DEF(uint32_t, iemAImpl_vcvtps2pd_u128_u64_fallback,(uint32_t uMxCsrIn, PX86XMMREG puDst, const uint64_t *pu64Src)); /* Actually two single precision floating point values. */
4356IEM_DECL_IMPL_DEF(uint32_t, iemAImpl_vcvtps2pd_u256_u128,(uint32_t uMxCsrIn, PX86YMMREG puDst, PCX86XMMREG puSrc));
4357IEM_DECL_IMPL_DEF(uint32_t, iemAImpl_vcvtps2pd_u256_u128_fallback,(uint32_t uMxCsrIn, PX86YMMREG puDst, PCX86XMMREG puSrc));
4358
4359
4360IEM_DECL_IMPL_DEF(uint32_t, iemAImpl_vcvtdq2pd_u128_u64,(uint32_t uMxCsrIn, PX86XMMREG puDst, const uint64_t *pu64Src)); /* Actually two single precision floating point values. */
4361IEM_DECL_IMPL_DEF(uint32_t, iemAImpl_vcvtdq2pd_u128_u64_fallback,(uint32_t uMxCsrIn, PX86XMMREG puDst, const uint64_t *pu64Src)); /* Actually two single precision floating point values. */
4362IEM_DECL_IMPL_DEF(uint32_t, iemAImpl_vcvtdq2pd_u256_u128,(uint32_t uMxCsrIn, PX86YMMREG puDst, PCX86XMMREG puSrc));
4363IEM_DECL_IMPL_DEF(uint32_t, iemAImpl_vcvtdq2pd_u256_u128_fallback,(uint32_t uMxCsrIn, PX86YMMREG puDst, PCX86XMMREG puSrc));
4364
4365
4366typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLF2EFLMXCSRR32R32,(uint32_t uMxCsrIn, uint32_t *pfEFlags, RTFLOAT32U uSrc1, RTFLOAT32U uSrc2));
4367typedef FNIEMAIMPLF2EFLMXCSRR32R32 *PFNIEMAIMPLF2EFLMXCSRR32R32;
4368
4369typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLF2EFLMXCSRR64R64,(uint32_t uMxCsrIn, uint32_t *pfEFlags, RTFLOAT64U uSrc1, RTFLOAT64U uSrc2));
4370typedef FNIEMAIMPLF2EFLMXCSRR64R64 *PFNIEMAIMPLF2EFLMXCSRR64R64;
4371
4372FNIEMAIMPLF2EFLMXCSRR32R32 iemAImpl_ucomiss_u128;
4373FNIEMAIMPLF2EFLMXCSRR32R32 iemAImpl_vucomiss_u128, iemAImpl_vucomiss_u128_fallback;
4374
4375FNIEMAIMPLF2EFLMXCSRR64R64 iemAImpl_ucomisd_u128;
4376FNIEMAIMPLF2EFLMXCSRR64R64 iemAImpl_vucomisd_u128, iemAImpl_vucomisd_u128_fallback;
4377
4378FNIEMAIMPLF2EFLMXCSRR32R32 iemAImpl_comiss_u128;
4379FNIEMAIMPLF2EFLMXCSRR32R32 iemAImpl_vcomiss_u128, iemAImpl_vcomiss_u128_fallback;
4380
4381FNIEMAIMPLF2EFLMXCSRR64R64 iemAImpl_comisd_u128;
4382FNIEMAIMPLF2EFLMXCSRR64R64 iemAImpl_vcomisd_u128, iemAImpl_vcomisd_u128_fallback;
4383
4384
4385typedef struct IEMMEDIAF2XMMSRC
4386{
4387 X86XMMREG uSrc1;
4388 X86XMMREG uSrc2;
4389} IEMMEDIAF2XMMSRC;
4390typedef IEMMEDIAF2XMMSRC *PIEMMEDIAF2XMMSRC;
4391typedef const IEMMEDIAF2XMMSRC *PCIEMMEDIAF2XMMSRC;
4392
4393
4394typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMEDIAF3XMMIMM8,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCIEMMEDIAF2XMMSRC puSrc, uint8_t bEvil));
4395typedef FNIEMAIMPLMEDIAF3XMMIMM8 *PFNIEMAIMPLMEDIAF3XMMIMM8;
4396
4397
4398typedef struct IEMMEDIAF2YMMSRC
4399{
4400 X86YMMREG uSrc1;
4401 X86YMMREG uSrc2;
4402} IEMMEDIAF2YMMSRC;
4403typedef IEMMEDIAF2YMMSRC *PIEMMEDIAF2YMMSRC;
4404typedef const IEMMEDIAF2YMMSRC *PCIEMMEDIAF2YMMSRC;
4405
4406
4407typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMEDIAF3YMMIMM8,(uint32_t uMxCsrIn, PX86YMMREG puDst, PCIEMMEDIAF2YMMSRC puSrc, uint8_t bEvil));
4408typedef FNIEMAIMPLMEDIAF3YMMIMM8 *PFNIEMAIMPLMEDIAF3YMMIMM8;
4409
4410
4411FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_cmpps_u128;
4412FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_cmppd_u128;
4413FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_cmpss_u128;
4414FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_cmpsd_u128;
4415
4416FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_vcmpps_u128, iemAImpl_vcmpps_u128_fallback;
4417FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_vcmppd_u128, iemAImpl_vcmppd_u128_fallback;
4418FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_vcmpss_u128, iemAImpl_vcmpss_u128_fallback;
4419FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_vcmpsd_u128, iemAImpl_vcmpsd_u128_fallback;
4420
4421FNIEMAIMPLMEDIAF3YMMIMM8 iemAImpl_vcmpps_u256, iemAImpl_vcmpps_u256_fallback;
4422FNIEMAIMPLMEDIAF3YMMIMM8 iemAImpl_vcmppd_u256, iemAImpl_vcmppd_u256_fallback;
4423
4424FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_roundss_u128;
4425FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_roundsd_u128;
4426
4427FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_dpps_u128, iemAImpl_dpps_u128_fallback;
4428FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_dppd_u128, iemAImpl_dppd_u128_fallback;
4429
4430
4431typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMEDIAF2U128IMM8,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86XMMREG puSrc, uint8_t bEvil));
4432typedef FNIEMAIMPLMEDIAF2U128IMM8 *PFNIEMAIMPLMEDIAF2U128IMM8;
4433
4434
4435typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMEDIAF2U256IMM8,(uint32_t uMxCsrIn, PX86YMMREG puDst, PCX86YMMREG puSrc, uint8_t bEvil));
4436typedef FNIEMAIMPLMEDIAF2U256IMM8 *PFNIEMAIMPLMEDIAF2U256IMM8;
4437
4438
4439FNIEMAIMPLMEDIAF2U128IMM8 iemAImpl_roundps_u128, iemAImpl_roundps_u128_fallback;
4440FNIEMAIMPLMEDIAF2U128IMM8 iemAImpl_roundpd_u128, iemAImpl_roundpd_u128_fallback;
4441
4442FNIEMAIMPLMEDIAF2U128IMM8 iemAImpl_vroundps_u128, iemAImpl_vroundps_u128_fallback;
4443FNIEMAIMPLMEDIAF2U128IMM8 iemAImpl_vroundpd_u128, iemAImpl_vroundpd_u128_fallback;
4444
4445FNIEMAIMPLMEDIAF2U256IMM8 iemAImpl_vroundps_u256, iemAImpl_vroundps_u256_fallback;
4446FNIEMAIMPLMEDIAF2U256IMM8 iemAImpl_vroundpd_u256, iemAImpl_vroundpd_u256_fallback;
4447
4448FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_vroundss_u128, iemAImpl_vroundss_u128_fallback;
4449FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_vroundsd_u128, iemAImpl_vroundsd_u128_fallback;
4450
4451FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_vdpps_u128, iemAImpl_vdpps_u128_fallback;
4452FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_vdppd_u128, iemAImpl_vdppd_u128_fallback;
4453
4454FNIEMAIMPLMEDIAF3YMMIMM8 iemAImpl_vdpps_u256, iemAImpl_vdpps_u256_fallback;
4455
4456
4457typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMXCSRU64U128,(uint32_t fMxCsrIn, uint64_t *pu64Dst, PCX86XMMREG pSrc));
4458typedef FNIEMAIMPLMXCSRU64U128 *PFNIEMAIMPLMXCSRU64U128;
4459
4460FNIEMAIMPLMXCSRU64U128 iemAImpl_cvtpd2pi_u128;
4461FNIEMAIMPLMXCSRU64U128 iemAImpl_cvttpd2pi_u128;
4462
4463typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMXCSRU128U64,(uint32_t fMxCsrIn, PX86XMMREG pDst, uint64_t u64Src));
4464typedef FNIEMAIMPLMXCSRU128U64 *PFNIEMAIMPLMXCSRU128U64;
4465
4466FNIEMAIMPLMXCSRU128U64 iemAImpl_cvtpi2ps_u128;
4467FNIEMAIMPLMXCSRU128U64 iemAImpl_cvtpi2pd_u128;
4468
4469typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMXCSRU64U64,(uint32_t fMxCsrIn, uint64_t *pu64Dst, uint64_t u64Src));
4470typedef FNIEMAIMPLMXCSRU64U64 *PFNIEMAIMPLMXCSRU64U64;
4471
4472FNIEMAIMPLMXCSRU64U64 iemAImpl_cvtps2pi_u128;
4473FNIEMAIMPLMXCSRU64U64 iemAImpl_cvttps2pi_u128;
4474
4475/** @} */
4476
4477
4478/** @name Function tables.
4479 * @{
4480 */
4481
4482/**
4483 * Function table for a binary operator providing implementation based on
4484 * operand size.
4485 */
4486typedef struct IEMOPBINSIZES
4487{
4488 PFNIEMAIMPLBINU8 pfnNormalU8, pfnLockedU8;
4489 PFNIEMAIMPLBINU16 pfnNormalU16, pfnLockedU16;
4490 PFNIEMAIMPLBINU32 pfnNormalU32, pfnLockedU32;
4491 PFNIEMAIMPLBINU64 pfnNormalU64, pfnLockedU64;
4492} IEMOPBINSIZES;
4493/** Pointer to a binary operator function table. */
4494typedef IEMOPBINSIZES const *PCIEMOPBINSIZES;
4495
4496
4497/**
4498 * Function table for a unary operator providing implementation based on
4499 * operand size.
4500 */
4501typedef struct IEMOPUNARYSIZES
4502{
4503 PFNIEMAIMPLUNARYU8 pfnNormalU8, pfnLockedU8;
4504 PFNIEMAIMPLUNARYU16 pfnNormalU16, pfnLockedU16;
4505 PFNIEMAIMPLUNARYU32 pfnNormalU32, pfnLockedU32;
4506 PFNIEMAIMPLUNARYU64 pfnNormalU64, pfnLockedU64;
4507} IEMOPUNARYSIZES;
4508/** Pointer to a unary operator function table. */
4509typedef IEMOPUNARYSIZES const *PCIEMOPUNARYSIZES;
4510
4511
4512/**
4513 * Function table for a shift operator providing implementation based on
4514 * operand size.
4515 */
4516typedef struct IEMOPSHIFTSIZES
4517{
4518 PFNIEMAIMPLSHIFTU8 pfnNormalU8;
4519 PFNIEMAIMPLSHIFTU16 pfnNormalU16;
4520 PFNIEMAIMPLSHIFTU32 pfnNormalU32;
4521 PFNIEMAIMPLSHIFTU64 pfnNormalU64;
4522} IEMOPSHIFTSIZES;
4523/** Pointer to a shift operator function table. */
4524typedef IEMOPSHIFTSIZES const *PCIEMOPSHIFTSIZES;
4525
4526
4527/**
4528 * Function table for a multiplication or division operation.
4529 */
4530typedef struct IEMOPMULDIVSIZES
4531{
4532 PFNIEMAIMPLMULDIVU8 pfnU8;
4533 PFNIEMAIMPLMULDIVU16 pfnU16;
4534 PFNIEMAIMPLMULDIVU32 pfnU32;
4535 PFNIEMAIMPLMULDIVU64 pfnU64;
4536} IEMOPMULDIVSIZES;
4537/** Pointer to a multiplication or division operation function table. */
4538typedef IEMOPMULDIVSIZES const *PCIEMOPMULDIVSIZES;
4539
4540
4541/**
4542 * Function table for a double precision shift operator providing implementation
4543 * based on operand size.
4544 */
4545typedef struct IEMOPSHIFTDBLSIZES
4546{
4547 PFNIEMAIMPLSHIFTDBLU16 pfnNormalU16;
4548 PFNIEMAIMPLSHIFTDBLU32 pfnNormalU32;
4549 PFNIEMAIMPLSHIFTDBLU64 pfnNormalU64;
4550} IEMOPSHIFTDBLSIZES;
4551/** Pointer to a double precision shift function table. */
4552typedef IEMOPSHIFTDBLSIZES const *PCIEMOPSHIFTDBLSIZES;
4553
4554
4555/**
4556 * Function table for media instruction taking two full sized media source
4557 * registers and one full sized destination register (AVX).
4558 */
4559typedef struct IEMOPMEDIAF3
4560{
4561 PFNIEMAIMPLMEDIAF3U128 pfnU128;
4562 PFNIEMAIMPLMEDIAF3U256 pfnU256;
4563} IEMOPMEDIAF3;
4564/** Pointer to a media operation function table for 3 full sized ops (AVX). */
4565typedef IEMOPMEDIAF3 const *PCIEMOPMEDIAF3;
4566
4567/** @def IEMOPMEDIAF3_INIT_VARS_EX
4568 * Declares a s_Host (x86 & amd64 only) and a s_Fallback variable with the
4569 * given functions as initializers. For use in AVX functions where a pair of
4570 * functions are only used once and the function table need not be public. */
4571#ifndef TST_IEM_CHECK_MC
4572# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
4573# define IEMOPMEDIAF3_INIT_VARS_EX(a_pfnHostU128, a_pfnHostU256, a_pfnFallbackU128, a_pfnFallbackU256) \
4574 static IEMOPMEDIAF3 const s_Host = { a_pfnHostU128, a_pfnHostU256 }; \
4575 static IEMOPMEDIAF3 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
4576# else
4577# define IEMOPMEDIAF3_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) \
4578 static IEMOPMEDIAF3 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
4579# endif
4580#else
4581# define IEMOPMEDIAF3_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) (void)0
4582#endif
4583/** @def IEMOPMEDIAF3_INIT_VARS
4584 * Generate AVX function tables for the @a a_InstrNm instruction.
4585 * @sa IEMOPMEDIAF3_INIT_VARS_EX */
4586#define IEMOPMEDIAF3_INIT_VARS(a_InstrNm) \
4587 IEMOPMEDIAF3_INIT_VARS_EX(RT_CONCAT3(iemAImpl_,a_InstrNm,_u128), RT_CONCAT3(iemAImpl_,a_InstrNm,_u256),\
4588 RT_CONCAT3(iemAImpl_,a_InstrNm,_u128_fallback), RT_CONCAT3(iemAImpl_,a_InstrNm,_u256_fallback))
4589
4590
4591/**
4592 * Function table for media instruction taking one full sized media source
4593 * registers and one full sized destination register (AVX).
4594 */
4595typedef struct IEMOPMEDIAF2
4596{
4597 PFNIEMAIMPLMEDIAF2U128 pfnU128;
4598 PFNIEMAIMPLMEDIAF2U256 pfnU256;
4599} IEMOPMEDIAF2;
4600/** Pointer to a media operation function table for 2 full sized ops (AVX). */
4601typedef IEMOPMEDIAF2 const *PCIEMOPMEDIAF2;
4602
4603/** @def IEMOPMEDIAF2_INIT_VARS_EX
4604 * Declares a s_Host (x86 & amd64 only) and a s_Fallback variable with the
4605 * given functions as initializers. For use in AVX functions where a pair of
4606 * functions are only used once and the function table need not be public. */
4607#ifndef TST_IEM_CHECK_MC
4608# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
4609# define IEMOPMEDIAF2_INIT_VARS_EX(a_pfnHostU128, a_pfnHostU256, a_pfnFallbackU128, a_pfnFallbackU256) \
4610 static IEMOPMEDIAF2 const s_Host = { a_pfnHostU128, a_pfnHostU256 }; \
4611 static IEMOPMEDIAF2 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
4612# else
4613# define IEMOPMEDIAF2_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) \
4614 static IEMOPMEDIAF2 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
4615# endif
4616#else
4617# define IEMOPMEDIAF2_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) (void)0
4618#endif
4619/** @def IEMOPMEDIAF2_INIT_VARS
4620 * Generate AVX function tables for the @a a_InstrNm instruction.
4621 * @sa IEMOPMEDIAF2_INIT_VARS_EX */
4622#define IEMOPMEDIAF2_INIT_VARS(a_InstrNm) \
4623 IEMOPMEDIAF2_INIT_VARS_EX(RT_CONCAT3(iemAImpl_,a_InstrNm,_u128), RT_CONCAT3(iemAImpl_,a_InstrNm,_u256),\
4624 RT_CONCAT3(iemAImpl_,a_InstrNm,_u128_fallback), RT_CONCAT3(iemAImpl_,a_InstrNm,_u256_fallback))
4625
4626
4627/**
4628 * Function table for media instruction taking two full sized media source
4629 * registers and one full sized destination register, but no additional state
4630 * (AVX).
4631 */
4632typedef struct IEMOPMEDIAOPTF3
4633{
4634 PFNIEMAIMPLMEDIAOPTF3U128 pfnU128;
4635 PFNIEMAIMPLMEDIAOPTF3U256 pfnU256;
4636} IEMOPMEDIAOPTF3;
4637/** Pointer to a media operation function table for 3 full sized ops (AVX). */
4638typedef IEMOPMEDIAOPTF3 const *PCIEMOPMEDIAOPTF3;
4639
4640/** @def IEMOPMEDIAOPTF3_INIT_VARS_EX
4641 * Declares a s_Host (x86 & amd64 only) and a s_Fallback variable with the
4642 * given functions as initializers. For use in AVX functions where a pair of
4643 * functions are only used once and the function table need not be public. */
4644#ifndef TST_IEM_CHECK_MC
4645# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
4646# define IEMOPMEDIAOPTF3_INIT_VARS_EX(a_pfnHostU128, a_pfnHostU256, a_pfnFallbackU128, a_pfnFallbackU256) \
4647 static IEMOPMEDIAOPTF3 const s_Host = { a_pfnHostU128, a_pfnHostU256 }; \
4648 static IEMOPMEDIAOPTF3 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
4649# else
4650# define IEMOPMEDIAOPTF3_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) \
4651 static IEMOPMEDIAOPTF3 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
4652# endif
4653#else
4654# define IEMOPMEDIAOPTF3_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) (void)0
4655#endif
4656/** @def IEMOPMEDIAOPTF3_INIT_VARS
4657 * Generate AVX function tables for the @a a_InstrNm instruction.
4658 * @sa IEMOPMEDIAOPTF3_INIT_VARS_EX */
4659#define IEMOPMEDIAOPTF3_INIT_VARS(a_InstrNm) \
4660 IEMOPMEDIAOPTF3_INIT_VARS_EX(RT_CONCAT3(iemAImpl_,a_InstrNm,_u128), RT_CONCAT3(iemAImpl_,a_InstrNm,_u256),\
4661 RT_CONCAT3(iemAImpl_,a_InstrNm,_u128_fallback), RT_CONCAT3(iemAImpl_,a_InstrNm,_u256_fallback))
4662
4663/**
4664 * Function table for media instruction taking one full sized media source
4665 * registers and one full sized destination register, but no additional state
4666 * (AVX).
4667 */
4668typedef struct IEMOPMEDIAOPTF2
4669{
4670 PFNIEMAIMPLMEDIAOPTF2U128 pfnU128;
4671 PFNIEMAIMPLMEDIAOPTF2U256 pfnU256;
4672} IEMOPMEDIAOPTF2;
4673/** Pointer to a media operation function table for 2 full sized ops (AVX). */
4674typedef IEMOPMEDIAOPTF2 const *PCIEMOPMEDIAOPTF2;
4675
4676/** @def IEMOPMEDIAOPTF2_INIT_VARS_EX
4677 * Declares a s_Host (x86 & amd64 only) and a s_Fallback variable with the
4678 * given functions as initializers. For use in AVX functions where a pair of
4679 * functions are only used once and the function table need not be public. */
4680#ifndef TST_IEM_CHECK_MC
4681# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
4682# define IEMOPMEDIAOPTF2_INIT_VARS_EX(a_pfnHostU128, a_pfnHostU256, a_pfnFallbackU128, a_pfnFallbackU256) \
4683 static IEMOPMEDIAOPTF2 const s_Host = { a_pfnHostU128, a_pfnHostU256 }; \
4684 static IEMOPMEDIAOPTF2 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
4685# else
4686# define IEMOPMEDIAOPTF2_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) \
4687 static IEMOPMEDIAOPTF2 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
4688# endif
4689#else
4690# define IEMOPMEDIAOPTF2_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) (void)0
4691#endif
4692/** @def IEMOPMEDIAOPTF2_INIT_VARS
4693 * Generate AVX function tables for the @a a_InstrNm instruction.
4694 * @sa IEMOPMEDIAOPTF2_INIT_VARS_EX */
4695#define IEMOPMEDIAOPTF2_INIT_VARS(a_InstrNm) \
4696 IEMOPMEDIAOPTF2_INIT_VARS_EX(RT_CONCAT3(iemAImpl_,a_InstrNm,_u128), RT_CONCAT3(iemAImpl_,a_InstrNm,_u256),\
4697 RT_CONCAT3(iemAImpl_,a_InstrNm,_u128_fallback), RT_CONCAT3(iemAImpl_,a_InstrNm,_u256_fallback))
4698
4699
4700/**
4701 * Function table for media instruction taking one full sized media source
4702 * register and one full sized destination register and an 8-bit immediate (AVX).
4703 */
4704typedef struct IEMOPMEDIAF2IMM8
4705{
4706 PFNIEMAIMPLMEDIAF2U128IMM8 pfnU128;
4707 PFNIEMAIMPLMEDIAF2U256IMM8 pfnU256;
4708} IEMOPMEDIAF2IMM8;
4709/** Pointer to a media operation function table for 2 full sized ops (AVX). */
4710typedef IEMOPMEDIAF2IMM8 const *PCIEMOPMEDIAF2IMM8;
4711
4712/** @def IEMOPMEDIAF2IMM8_INIT_VARS_EX
4713 * Declares a s_Host (x86 & amd64 only) and a s_Fallback variable with the
4714 * given functions as initializers. For use in AVX functions where a pair of
4715 * functions are only used once and the function table need not be public. */
4716#ifndef TST_IEM_CHECK_MC
4717# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
4718# define IEMOPMEDIAF2IMM8_INIT_VARS_EX(a_pfnHostU128, a_pfnHostU256, a_pfnFallbackU128, a_pfnFallbackU256) \
4719 static IEMOPMEDIAF2IMM8 const s_Host = { a_pfnHostU128, a_pfnHostU256 }; \
4720 static IEMOPMEDIAF2IMM8 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
4721# else
4722# define IEMOPMEDIAF2IMM8_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) \
4723 static IEMOPMEDIAF2IMM8 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
4724# endif
4725#else
4726# define IEMOPMEDIAF2IMM8_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) (void)0
4727#endif
4728/** @def IEMOPMEDIAF2IMM8_INIT_VARS
4729 * Generate AVX function tables for the @a a_InstrNm instruction.
4730 * @sa IEMOPMEDIAF2IMM8_INIT_VARS_EX */
4731#define IEMOPMEDIAF2IMM8_INIT_VARS(a_InstrNm) \
4732 IEMOPMEDIAF2IMM8_INIT_VARS_EX(RT_CONCAT3(iemAImpl_,a_InstrNm,_u128), RT_CONCAT3(iemAImpl_,a_InstrNm,_u256),\
4733 RT_CONCAT3(iemAImpl_,a_InstrNm,_u128_fallback), RT_CONCAT3(iemAImpl_,a_InstrNm,_u256_fallback))
4734
4735
4736/**
4737 * Function table for media instruction taking one full sized media source
4738 * register and one full sized destination register and an 8-bit immediate, but no additional state
4739 * (AVX).
4740 */
4741typedef struct IEMOPMEDIAOPTF2IMM8
4742{
4743 PFNIEMAIMPLMEDIAOPTF2U128IMM8 pfnU128;
4744 PFNIEMAIMPLMEDIAOPTF2U256IMM8 pfnU256;
4745} IEMOPMEDIAOPTF2IMM8;
4746/** Pointer to a media operation function table for 2 full sized ops (AVX). */
4747typedef IEMOPMEDIAOPTF2IMM8 const *PCIEMOPMEDIAOPTF2IMM8;
4748
4749/** @def IEMOPMEDIAOPTF2IMM8_INIT_VARS_EX
4750 * Declares a s_Host (x86 & amd64 only) and a s_Fallback variable with the
4751 * given functions as initializers. For use in AVX functions where a pair of
4752 * functions are only used once and the function table need not be public. */
4753#ifndef TST_IEM_CHECK_MC
4754# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
4755# define IEMOPMEDIAOPTF2IMM8_INIT_VARS_EX(a_pfnHostU128, a_pfnHostU256, a_pfnFallbackU128, a_pfnFallbackU256) \
4756 static IEMOPMEDIAOPTF2IMM8 const s_Host = { a_pfnHostU128, a_pfnHostU256 }; \
4757 static IEMOPMEDIAOPTF2IMM8 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
4758# else
4759# define IEMOPMEDIAOPTF2IMM8_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) \
4760 static IEMOPMEDIAOPTF2IMM8 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
4761# endif
4762#else
4763# define IEMOPMEDIAOPTF2IMM8_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) (void)0
4764#endif
4765/** @def IEMOPMEDIAOPTF2IMM8_INIT_VARS
4766 * Generate AVX function tables for the @a a_InstrNm instruction.
4767 * @sa IEMOPMEDIAOPTF2IMM8_INIT_VARS_EX */
4768#define IEMOPMEDIAOPTF2IMM8_INIT_VARS(a_InstrNm) \
4769 IEMOPMEDIAOPTF2IMM8_INIT_VARS_EX(RT_CONCAT3(iemAImpl_,a_InstrNm,_imm_u128), RT_CONCAT3(iemAImpl_,a_InstrNm,_imm_u256),\
4770 RT_CONCAT3(iemAImpl_,a_InstrNm,_imm_u128_fallback), RT_CONCAT3(iemAImpl_,a_InstrNm,_imm_u256_fallback))
4771
4772/**
4773 * Function table for media instruction taking two full sized media source
4774 * registers and one full sized destination register and an 8-bit immediate, but no additional state
4775 * (AVX).
4776 */
4777typedef struct IEMOPMEDIAOPTF3IMM8
4778{
4779 PFNIEMAIMPLMEDIAOPTF3U128IMM8 pfnU128;
4780 PFNIEMAIMPLMEDIAOPTF3U256IMM8 pfnU256;
4781} IEMOPMEDIAOPTF3IMM8;
4782/** Pointer to a media operation function table for 3 full sized ops (AVX). */
4783typedef IEMOPMEDIAOPTF3IMM8 const *PCIEMOPMEDIAOPTF3IMM8;
4784
4785/** @def IEMOPMEDIAOPTF3IMM8_INIT_VARS_EX
4786 * Declares a s_Host (x86 & amd64 only) and a s_Fallback variable with the
4787 * given functions as initializers. For use in AVX functions where a pair of
4788 * functions are only used once and the function table need not be public. */
4789#ifndef TST_IEM_CHECK_MC
4790# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
4791# define IEMOPMEDIAOPTF3IMM8_INIT_VARS_EX(a_pfnHostU128, a_pfnHostU256, a_pfnFallbackU128, a_pfnFallbackU256) \
4792 static IEMOPMEDIAOPTF3IMM8 const s_Host = { a_pfnHostU128, a_pfnHostU256 }; \
4793 static IEMOPMEDIAOPTF3IMM8 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
4794# else
4795# define IEMOPMEDIAOPTF3IMM8_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) \
4796 static IEMOPMEDIAOPTF3IMM8 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
4797# endif
4798#else
4799# define IEMOPMEDIAOPTF3IMM8_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) (void)0
4800#endif
4801/** @def IEMOPMEDIAOPTF3IMM8_INIT_VARS
4802 * Generate AVX function tables for the @a a_InstrNm instruction.
4803 * @sa IEMOPMEDIAOPTF3IMM8_INIT_VARS_EX */
4804#define IEMOPMEDIAOPTF3IMM8_INIT_VARS(a_InstrNm) \
4805 IEMOPMEDIAOPTF3IMM8_INIT_VARS_EX(RT_CONCAT3(iemAImpl_,a_InstrNm,_u128), RT_CONCAT3(iemAImpl_,a_InstrNm,_u256),\
4806 RT_CONCAT3(iemAImpl_,a_InstrNm,_u128_fallback), RT_CONCAT3(iemAImpl_,a_InstrNm,_u256_fallback))
4807/** @} */
4808
4809
4810/**
4811 * Function table for blend type instruction taking three full sized media source
4812 * registers and one full sized destination register, but no additional state
4813 * (AVX).
4814 */
4815typedef struct IEMOPBLENDOP
4816{
4817 PFNIEMAIMPLAVXBLENDU128 pfnU128;
4818 PFNIEMAIMPLAVXBLENDU256 pfnU256;
4819} IEMOPBLENDOP;
4820/** Pointer to a media operation function table for 4 full sized ops (AVX). */
4821typedef IEMOPBLENDOP const *PCIEMOPBLENDOP;
4822
4823/** @def IEMOPBLENDOP_INIT_VARS_EX
4824 * Declares a s_Host (x86 & amd64 only) and a s_Fallback variable with the
4825 * given functions as initializers. For use in AVX functions where a pair of
4826 * functions are only used once and the function table need not be public. */
4827#ifndef TST_IEM_CHECK_MC
4828# if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
4829# define IEMOPBLENDOP_INIT_VARS_EX(a_pfnHostU128, a_pfnHostU256, a_pfnFallbackU128, a_pfnFallbackU256) \
4830 static IEMOPBLENDOP const s_Host = { a_pfnHostU128, a_pfnHostU256 }; \
4831 static IEMOPBLENDOP const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
4832# else
4833# define IEMOPBLENDOP_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) \
4834 static IEMOPBLENDOP const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
4835# endif
4836#else
4837# define IEMOPBLENDOP_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) (void)0
4838#endif
4839/** @def IEMOPBLENDOP_INIT_VARS
4840 * Generate AVX function tables for the @a a_InstrNm instruction.
4841 * @sa IEMOPBLENDOP_INIT_VARS_EX */
4842#define IEMOPBLENDOP_INIT_VARS(a_InstrNm) \
4843 IEMOPBLENDOP_INIT_VARS_EX(RT_CONCAT3(iemAImpl_,a_InstrNm,_u128), RT_CONCAT3(iemAImpl_,a_InstrNm,_u256),\
4844 RT_CONCAT3(iemAImpl_,a_InstrNm,_u128_fallback), RT_CONCAT3(iemAImpl_,a_InstrNm,_u256_fallback))
4845
4846
4847/** @name SSE/AVX single/double precision floating point operations.
4848 * @{ */
4849typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLFPSSEF2U128,(uint32_t uMxCsrIn, PX86XMMREG pResult, PCX86XMMREG puSrc1, PCX86XMMREG puSrc2));
4850typedef FNIEMAIMPLFPSSEF2U128 *PFNIEMAIMPLFPSSEF2U128;
4851typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLFPSSEF2U128R32,(uint32_t uMxCsrIn, PX86XMMREG Result, PCX86XMMREG puSrc1, PCRTFLOAT32U pr32Src2));
4852typedef FNIEMAIMPLFPSSEF2U128R32 *PFNIEMAIMPLFPSSEF2U128R32;
4853typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLFPSSEF2U128R64,(uint32_t uMxCsrIn, PX86XMMREG pResult, PCX86XMMREG puSrc1, PCRTFLOAT64U pr64Src2));
4854typedef FNIEMAIMPLFPSSEF2U128R64 *PFNIEMAIMPLFPSSEF2U128R64;
4855
4856typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLFPAVXF3U128,(uint32_t uMxCsrIn, PX86XMMREG pResult, PCX86XMMREG puSrc1, PCX86XMMREG puSrc2));
4857typedef FNIEMAIMPLFPAVXF3U128 *PFNIEMAIMPLFPAVXF3U128;
4858typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLFPAVXF3U128R32,(uint32_t uMxCsrIn, PX86XMMREG pResult, PCX86XMMREG puSrc1, PCRTFLOAT32U pr32Src2));
4859typedef FNIEMAIMPLFPAVXF3U128R32 *PFNIEMAIMPLFPAVXF3U128R32;
4860typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLFPAVXF3U128R64,(uint32_t uMxCsrIn, PX86XMMREG pResult, PCX86XMMREG puSrc1, PCRTFLOAT64U pr64Src2));
4861typedef FNIEMAIMPLFPAVXF3U128R64 *PFNIEMAIMPLFPAVXF3U128R64;
4862
4863typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLFPAVXF3U256,(uint32_t uMxCsrIn, PX86YMMREG pResult, PCX86YMMREG puSrc1, PCX86YMMREG puSrc2));
4864typedef FNIEMAIMPLFPAVXF3U256 *PFNIEMAIMPLFPAVXF3U256;
4865
4866FNIEMAIMPLFPSSEF2U128 iemAImpl_addps_u128;
4867FNIEMAIMPLFPSSEF2U128 iemAImpl_addpd_u128;
4868FNIEMAIMPLFPSSEF2U128 iemAImpl_mulps_u128;
4869FNIEMAIMPLFPSSEF2U128 iemAImpl_mulpd_u128;
4870FNIEMAIMPLFPSSEF2U128 iemAImpl_subps_u128;
4871FNIEMAIMPLFPSSEF2U128 iemAImpl_subpd_u128;
4872FNIEMAIMPLFPSSEF2U128 iemAImpl_minps_u128;
4873FNIEMAIMPLFPSSEF2U128 iemAImpl_minpd_u128;
4874FNIEMAIMPLFPSSEF2U128 iemAImpl_divps_u128;
4875FNIEMAIMPLFPSSEF2U128 iemAImpl_divpd_u128;
4876FNIEMAIMPLFPSSEF2U128 iemAImpl_maxps_u128;
4877FNIEMAIMPLFPSSEF2U128 iemAImpl_maxpd_u128;
4878FNIEMAIMPLFPSSEF2U128 iemAImpl_haddps_u128;
4879FNIEMAIMPLFPSSEF2U128 iemAImpl_haddpd_u128;
4880FNIEMAIMPLFPSSEF2U128 iemAImpl_hsubps_u128;
4881FNIEMAIMPLFPSSEF2U128 iemAImpl_hsubpd_u128;
4882FNIEMAIMPLFPSSEF2U128 iemAImpl_sqrtps_u128;
4883FNIEMAIMPLFPSSEF2U128 iemAImpl_rsqrtps_u128;
4884FNIEMAIMPLFPSSEF2U128 iemAImpl_sqrtpd_u128;
4885FNIEMAIMPLFPSSEF2U128 iemAImpl_rcpps_u128;
4886FNIEMAIMPLFPSSEF2U128 iemAImpl_addsubps_u128;
4887FNIEMAIMPLFPSSEF2U128 iemAImpl_addsubpd_u128;
4888
4889FNIEMAIMPLFPSSEF2U128 iemAImpl_cvtpd2ps_u128;
4890IEM_DECL_IMPL_PROTO(uint32_t, iemAImpl_cvtps2pd_u128,(uint32_t uMxCsrIn, PX86XMMREG pResult, uint64_t const *pu64Src));
4891
4892FNIEMAIMPLFPSSEF2U128 iemAImpl_cvtdq2ps_u128;
4893FNIEMAIMPLFPSSEF2U128 iemAImpl_cvtps2dq_u128;
4894FNIEMAIMPLFPSSEF2U128 iemAImpl_cvttps2dq_u128;
4895FNIEMAIMPLFPSSEF2U128 iemAImpl_cvttpd2dq_u128;
4896FNIEMAIMPLFPSSEF2U128 iemAImpl_cvtdq2pd_u128;
4897FNIEMAIMPLFPSSEF2U128 iemAImpl_cvtpd2dq_u128;
4898
4899FNIEMAIMPLFPSSEF2U128R32 iemAImpl_addss_u128_r32;
4900FNIEMAIMPLFPSSEF2U128R64 iemAImpl_addsd_u128_r64;
4901FNIEMAIMPLFPSSEF2U128R32 iemAImpl_mulss_u128_r32;
4902FNIEMAIMPLFPSSEF2U128R64 iemAImpl_mulsd_u128_r64;
4903FNIEMAIMPLFPSSEF2U128R32 iemAImpl_subss_u128_r32;
4904FNIEMAIMPLFPSSEF2U128R64 iemAImpl_subsd_u128_r64;
4905FNIEMAIMPLFPSSEF2U128R32 iemAImpl_minss_u128_r32;
4906FNIEMAIMPLFPSSEF2U128R64 iemAImpl_minsd_u128_r64;
4907FNIEMAIMPLFPSSEF2U128R32 iemAImpl_divss_u128_r32;
4908FNIEMAIMPLFPSSEF2U128R64 iemAImpl_divsd_u128_r64;
4909FNIEMAIMPLFPSSEF2U128R32 iemAImpl_maxss_u128_r32;
4910FNIEMAIMPLFPSSEF2U128R64 iemAImpl_maxsd_u128_r64;
4911FNIEMAIMPLFPSSEF2U128R32 iemAImpl_cvtss2sd_u128_r32;
4912FNIEMAIMPLFPSSEF2U128R64 iemAImpl_cvtsd2ss_u128_r64;
4913FNIEMAIMPLFPSSEF2U128R32 iemAImpl_sqrtss_u128_r32;
4914FNIEMAIMPLFPSSEF2U128R64 iemAImpl_sqrtsd_u128_r64;
4915FNIEMAIMPLFPSSEF2U128R32 iemAImpl_rsqrtss_u128_r32;
4916FNIEMAIMPLFPSSEF2U128R32 iemAImpl_rcpss_u128_r32;
4917
4918FNIEMAIMPLMEDIAF3U128 iemAImpl_vaddps_u128, iemAImpl_vaddps_u128_fallback;
4919FNIEMAIMPLMEDIAF3U128 iemAImpl_vaddpd_u128, iemAImpl_vaddpd_u128_fallback;
4920FNIEMAIMPLMEDIAF3U128 iemAImpl_vmulps_u128, iemAImpl_vmulps_u128_fallback;
4921FNIEMAIMPLMEDIAF3U128 iemAImpl_vmulpd_u128, iemAImpl_vmulpd_u128_fallback;
4922FNIEMAIMPLMEDIAF3U128 iemAImpl_vsubps_u128, iemAImpl_vsubps_u128_fallback;
4923FNIEMAIMPLMEDIAF3U128 iemAImpl_vsubpd_u128, iemAImpl_vsubpd_u128_fallback;
4924FNIEMAIMPLMEDIAF3U128 iemAImpl_vminps_u128, iemAImpl_vminps_u128_fallback;
4925FNIEMAIMPLMEDIAF3U128 iemAImpl_vminpd_u128, iemAImpl_vminpd_u128_fallback;
4926FNIEMAIMPLMEDIAF3U128 iemAImpl_vdivps_u128, iemAImpl_vdivps_u128_fallback;
4927FNIEMAIMPLMEDIAF3U128 iemAImpl_vdivpd_u128, iemAImpl_vdivpd_u128_fallback;
4928FNIEMAIMPLMEDIAF3U128 iemAImpl_vmaxps_u128, iemAImpl_vmaxps_u128_fallback;
4929FNIEMAIMPLMEDIAF3U128 iemAImpl_vmaxpd_u128, iemAImpl_vmaxpd_u128_fallback;
4930FNIEMAIMPLMEDIAF3U128 iemAImpl_vhaddps_u128, iemAImpl_vhaddps_u128_fallback;
4931FNIEMAIMPLMEDIAF3U128 iemAImpl_vhaddpd_u128, iemAImpl_vhaddpd_u128_fallback;
4932FNIEMAIMPLMEDIAF3U128 iemAImpl_vhsubps_u128, iemAImpl_vhsubps_u128_fallback;
4933FNIEMAIMPLMEDIAF3U128 iemAImpl_vhsubpd_u128, iemAImpl_vhsubpd_u128_fallback;
4934FNIEMAIMPLMEDIAF2U128 iemAImpl_vsqrtps_u128, iemAImpl_vsqrtps_u128_fallback;
4935FNIEMAIMPLMEDIAF2U128 iemAImpl_vsqrtpd_u128, iemAImpl_vsqrtpd_u128_fallback;
4936FNIEMAIMPLMEDIAF2U128 iemAImpl_vrsqrtps_u128, iemAImpl_vrsqrtps_u128_fallback;
4937FNIEMAIMPLMEDIAF2U128 iemAImpl_vrcpps_u128, iemAImpl_vrcpps_u128_fallback;
4938FNIEMAIMPLMEDIAF3U128 iemAImpl_vaddsubps_u128, iemAImpl_vaddsubps_u128_fallback;
4939FNIEMAIMPLMEDIAF3U128 iemAImpl_vaddsubpd_u128, iemAImpl_vaddsubpd_u128_fallback;
4940FNIEMAIMPLMEDIAF2U128 iemAImpl_vcvtdq2ps_u128, iemAImpl_vcvtdq2ps_u128_fallback;
4941FNIEMAIMPLMEDIAF2U128 iemAImpl_vcvtps2dq_u128, iemAImpl_vcvtps2dq_u128_fallback;
4942FNIEMAIMPLMEDIAF2U128 iemAImpl_vcvttps2dq_u128, iemAImpl_vcvttps2dq_u128_fallback;
4943IEM_DECL_IMPL_PROTO(uint32_t, iemAImpl_vcvtpd2ps_u128_u128,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86XMMREG puSrc));
4944IEM_DECL_IMPL_PROTO(uint32_t, iemAImpl_vcvtpd2ps_u128_u128_fallback,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86XMMREG puSrc));
4945IEM_DECL_IMPL_PROTO(uint32_t, iemAImpl_vcvttpd2dq_u128_u128,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86XMMREG puSrc));
4946IEM_DECL_IMPL_PROTO(uint32_t, iemAImpl_vcvttpd2dq_u128_u128_fallback,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86XMMREG puSrc));
4947IEM_DECL_IMPL_PROTO(uint32_t, iemAImpl_vcvtpd2dq_u128_u128,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86XMMREG puSrc));
4948IEM_DECL_IMPL_PROTO(uint32_t, iemAImpl_vcvtpd2dq_u128_u128_fallback,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86XMMREG puSrc));
4949
4950
4951FNIEMAIMPLFPAVXF3U128R32 iemAImpl_vaddss_u128_r32, iemAImpl_vaddss_u128_r32_fallback;
4952FNIEMAIMPLFPAVXF3U128R64 iemAImpl_vaddsd_u128_r64, iemAImpl_vaddsd_u128_r64_fallback;
4953FNIEMAIMPLFPAVXF3U128R32 iemAImpl_vmulss_u128_r32, iemAImpl_vmulss_u128_r32_fallback;
4954FNIEMAIMPLFPAVXF3U128R64 iemAImpl_vmulsd_u128_r64, iemAImpl_vmulsd_u128_r64_fallback;
4955FNIEMAIMPLFPAVXF3U128R32 iemAImpl_vsubss_u128_r32, iemAImpl_vsubss_u128_r32_fallback;
4956FNIEMAIMPLFPAVXF3U128R64 iemAImpl_vsubsd_u128_r64, iemAImpl_vsubsd_u128_r64_fallback;
4957FNIEMAIMPLFPAVXF3U128R32 iemAImpl_vminss_u128_r32, iemAImpl_vminss_u128_r32_fallback;
4958FNIEMAIMPLFPAVXF3U128R64 iemAImpl_vminsd_u128_r64, iemAImpl_vminsd_u128_r64_fallback;
4959FNIEMAIMPLFPAVXF3U128R32 iemAImpl_vdivss_u128_r32, iemAImpl_vdivss_u128_r32_fallback;
4960FNIEMAIMPLFPAVXF3U128R64 iemAImpl_vdivsd_u128_r64, iemAImpl_vdivsd_u128_r64_fallback;
4961FNIEMAIMPLFPAVXF3U128R32 iemAImpl_vmaxss_u128_r32, iemAImpl_vmaxss_u128_r32_fallback;
4962FNIEMAIMPLFPAVXF3U128R64 iemAImpl_vmaxsd_u128_r64, iemAImpl_vmaxsd_u128_r64_fallback;
4963FNIEMAIMPLFPAVXF3U128R32 iemAImpl_vsqrtss_u128_r32, iemAImpl_vsqrtss_u128_r32_fallback;
4964FNIEMAIMPLFPAVXF3U128R64 iemAImpl_vsqrtsd_u128_r64, iemAImpl_vsqrtsd_u128_r64_fallback;
4965FNIEMAIMPLFPAVXF3U128R32 iemAImpl_vrsqrtss_u128_r32, iemAImpl_vrsqrtss_u128_r32_fallback;
4966FNIEMAIMPLFPAVXF3U128R32 iemAImpl_vrcpss_u128_r32, iemAImpl_vrcpss_u128_r32_fallback;
4967FNIEMAIMPLFPAVXF3U128R32 iemAImpl_vcvtss2sd_u128_r32, iemAImpl_vcvtss2sd_u128_r32_fallback;
4968FNIEMAIMPLFPAVXF3U128R64 iemAImpl_vcvtsd2ss_u128_r64, iemAImpl_vcvtsd2ss_u128_r64_fallback;
4969
4970
4971FNIEMAIMPLFPAVXF3U256 iemAImpl_vaddps_u256, iemAImpl_vaddps_u256_fallback;
4972FNIEMAIMPLFPAVXF3U256 iemAImpl_vaddpd_u256, iemAImpl_vaddpd_u256_fallback;
4973FNIEMAIMPLFPAVXF3U256 iemAImpl_vmulps_u256, iemAImpl_vmulps_u256_fallback;
4974FNIEMAIMPLFPAVXF3U256 iemAImpl_vmulpd_u256, iemAImpl_vmulpd_u256_fallback;
4975FNIEMAIMPLFPAVXF3U256 iemAImpl_vsubps_u256, iemAImpl_vsubps_u256_fallback;
4976FNIEMAIMPLFPAVXF3U256 iemAImpl_vsubpd_u256, iemAImpl_vsubpd_u256_fallback;
4977FNIEMAIMPLFPAVXF3U256 iemAImpl_vminps_u256, iemAImpl_vminps_u256_fallback;
4978FNIEMAIMPLFPAVXF3U256 iemAImpl_vminpd_u256, iemAImpl_vminpd_u256_fallback;
4979FNIEMAIMPLFPAVXF3U256 iemAImpl_vdivps_u256, iemAImpl_vdivps_u256_fallback;
4980FNIEMAIMPLFPAVXF3U256 iemAImpl_vdivpd_u256, iemAImpl_vdivpd_u256_fallback;
4981FNIEMAIMPLFPAVXF3U256 iemAImpl_vmaxps_u256, iemAImpl_vmaxps_u256_fallback;
4982FNIEMAIMPLFPAVXF3U256 iemAImpl_vmaxpd_u256, iemAImpl_vmaxpd_u256_fallback;
4983FNIEMAIMPLFPAVXF3U256 iemAImpl_vhaddps_u256, iemAImpl_vhaddps_u256_fallback;
4984FNIEMAIMPLFPAVXF3U256 iemAImpl_vhaddpd_u256, iemAImpl_vhaddpd_u256_fallback;
4985FNIEMAIMPLFPAVXF3U256 iemAImpl_vhsubps_u256, iemAImpl_vhsubps_u256_fallback;
4986FNIEMAIMPLFPAVXF3U256 iemAImpl_vhsubpd_u256, iemAImpl_vhsubpd_u256_fallback;
4987FNIEMAIMPLMEDIAF3U256 iemAImpl_vaddsubps_u256, iemAImpl_vaddsubps_u256_fallback;
4988FNIEMAIMPLMEDIAF3U256 iemAImpl_vaddsubpd_u256, iemAImpl_vaddsubpd_u256_fallback;
4989FNIEMAIMPLMEDIAF2U256 iemAImpl_vsqrtps_u256, iemAImpl_vsqrtps_u256_fallback;
4990FNIEMAIMPLMEDIAF2U256 iemAImpl_vsqrtpd_u256, iemAImpl_vsqrtpd_u256_fallback;
4991FNIEMAIMPLMEDIAF2U256 iemAImpl_vrsqrtps_u256, iemAImpl_vrsqrtps_u256_fallback;
4992FNIEMAIMPLMEDIAF2U256 iemAImpl_vrcpps_u256, iemAImpl_vrcpps_u256_fallback;
4993FNIEMAIMPLMEDIAF2U256 iemAImpl_vcvtdq2ps_u256, iemAImpl_vcvtdq2ps_u256_fallback;
4994FNIEMAIMPLMEDIAF2U256 iemAImpl_vcvtps2dq_u256, iemAImpl_vcvtps2dq_u256_fallback;
4995FNIEMAIMPLMEDIAF2U256 iemAImpl_vcvttps2dq_u256, iemAImpl_vcvttps2dq_u256_fallback;
4996IEM_DECL_IMPL_PROTO(uint32_t, iemAImpl_vcvtpd2ps_u128_u256,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86YMMREG puSrc));
4997IEM_DECL_IMPL_PROTO(uint32_t, iemAImpl_vcvtpd2ps_u128_u256_fallback,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86YMMREG puSrc));
4998IEM_DECL_IMPL_PROTO(uint32_t, iemAImpl_vcvttpd2dq_u128_u256,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86YMMREG puSrc));
4999IEM_DECL_IMPL_PROTO(uint32_t, iemAImpl_vcvttpd2dq_u128_u256_fallback,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86YMMREG puSrc));
5000IEM_DECL_IMPL_PROTO(uint32_t, iemAImpl_vcvtpd2dq_u128_u256,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86YMMREG puSrc));
5001IEM_DECL_IMPL_PROTO(uint32_t, iemAImpl_vcvtpd2dq_u128_u256_fallback,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86YMMREG puSrc));
5002/** @} */
5003
5004/** @name C instruction implementations for anything slightly complicated.
5005 * @{ */
5006
5007/**
5008 * For typedef'ing or declaring a C instruction implementation function taking
5009 * no extra arguments.
5010 *
5011 * @param a_Name The name of the type.
5012 */
5013# define IEM_CIMPL_DECL_TYPE_0(a_Name) \
5014 IEM_DECL_IMPL_TYPE(VBOXSTRICTRC, a_Name, (PVMCPUCC pVCpu, uint8_t cbInstr))
5015/**
5016 * For defining a C instruction implementation function taking no extra
5017 * arguments.
5018 *
5019 * @param a_Name The name of the function
5020 */
5021# define IEM_CIMPL_DEF_0(a_Name) \
5022 IEM_DECL_IMPL_DEF(VBOXSTRICTRC, a_Name, (PVMCPUCC pVCpu, uint8_t cbInstr))
5023/**
5024 * Prototype version of IEM_CIMPL_DEF_0.
5025 */
5026# define IEM_CIMPL_PROTO_0(a_Name) \
5027 IEM_DECL_IMPL_PROTO(VBOXSTRICTRC, a_Name, (PVMCPUCC pVCpu, uint8_t cbInstr))
5028/**
5029 * For calling a C instruction implementation function taking no extra
5030 * arguments.
5031 *
5032 * This special call macro adds default arguments to the call and allow us to
5033 * change these later.
5034 *
5035 * @param a_fn The name of the function.
5036 */
5037# define IEM_CIMPL_CALL_0(a_fn) a_fn(pVCpu, cbInstr)
5038
5039/** Type for a C instruction implementation function taking no extra
5040 * arguments. */
5041typedef IEM_CIMPL_DECL_TYPE_0(FNIEMCIMPL0);
5042/** Function pointer type for a C instruction implementation function taking
5043 * no extra arguments. */
5044typedef FNIEMCIMPL0 *PFNIEMCIMPL0;
5045
5046/**
5047 * For typedef'ing or declaring a C instruction implementation function taking
5048 * one extra argument.
5049 *
5050 * @param a_Name The name of the type.
5051 * @param a_Type0 The argument type.
5052 * @param a_Arg0 The argument name.
5053 */
5054# define IEM_CIMPL_DECL_TYPE_1(a_Name, a_Type0, a_Arg0) \
5055 IEM_DECL_IMPL_TYPE(VBOXSTRICTRC, a_Name, (PVMCPUCC pVCpu, uint8_t cbInstr, a_Type0 a_Arg0))
5056/**
5057 * For defining a C instruction implementation function taking one extra
5058 * argument.
5059 *
5060 * @param a_Name The name of the function
5061 * @param a_Type0 The argument type.
5062 * @param a_Arg0 The argument name.
5063 */
5064# define IEM_CIMPL_DEF_1(a_Name, a_Type0, a_Arg0) \
5065 IEM_DECL_IMPL_DEF(VBOXSTRICTRC, a_Name, (PVMCPUCC pVCpu, uint8_t cbInstr, a_Type0 a_Arg0))
5066/**
5067 * Prototype version of IEM_CIMPL_DEF_1.
5068 */
5069# define IEM_CIMPL_PROTO_1(a_Name, a_Type0, a_Arg0) \
5070 IEM_DECL_IMPL_PROTO(VBOXSTRICTRC, a_Name, (PVMCPUCC pVCpu, uint8_t cbInstr, a_Type0 a_Arg0))
5071/**
5072 * For calling a C instruction implementation function taking one extra
5073 * argument.
5074 *
5075 * This special call macro adds default arguments to the call and allow us to
5076 * change these later.
5077 *
5078 * @param a_fn The name of the function.
5079 * @param a0 The name of the 1st argument.
5080 */
5081# define IEM_CIMPL_CALL_1(a_fn, a0) a_fn(pVCpu, cbInstr, (a0))
5082
5083/**
5084 * For typedef'ing or declaring a C instruction implementation function taking
5085 * two extra arguments.
5086 *
5087 * @param a_Name The name of the type.
5088 * @param a_Type0 The type of the 1st argument
5089 * @param a_Arg0 The name of the 1st argument.
5090 * @param a_Type1 The type of the 2nd argument.
5091 * @param a_Arg1 The name of the 2nd argument.
5092 */
5093# define IEM_CIMPL_DECL_TYPE_2(a_Name, a_Type0, a_Arg0, a_Type1, a_Arg1) \
5094 IEM_DECL_IMPL_TYPE(VBOXSTRICTRC, a_Name, (PVMCPUCC pVCpu, uint8_t cbInstr, a_Type0 a_Arg0, a_Type1 a_Arg1))
5095/**
5096 * For defining a C instruction implementation function taking two extra
5097 * arguments.
5098 *
5099 * @param a_Name The name of the function.
5100 * @param a_Type0 The type of the 1st argument
5101 * @param a_Arg0 The name of the 1st argument.
5102 * @param a_Type1 The type of the 2nd argument.
5103 * @param a_Arg1 The name of the 2nd argument.
5104 */
5105# define IEM_CIMPL_DEF_2(a_Name, a_Type0, a_Arg0, a_Type1, a_Arg1) \
5106 IEM_DECL_IMPL_DEF(VBOXSTRICTRC, a_Name, (PVMCPUCC pVCpu, uint8_t cbInstr, a_Type0 a_Arg0, a_Type1 a_Arg1))
5107/**
5108 * Prototype version of IEM_CIMPL_DEF_2.
5109 */
5110# define IEM_CIMPL_PROTO_2(a_Name, a_Type0, a_Arg0, a_Type1, a_Arg1) \
5111 IEM_DECL_IMPL_PROTO(VBOXSTRICTRC, a_Name, (PVMCPUCC pVCpu, uint8_t cbInstr, a_Type0 a_Arg0, a_Type1 a_Arg1))
5112/**
5113 * For calling a C instruction implementation function taking two extra
5114 * arguments.
5115 *
5116 * This special call macro adds default arguments to the call and allow us to
5117 * change these later.
5118 *
5119 * @param a_fn The name of the function.
5120 * @param a0 The name of the 1st argument.
5121 * @param a1 The name of the 2nd argument.
5122 */
5123# define IEM_CIMPL_CALL_2(a_fn, a0, a1) a_fn(pVCpu, cbInstr, (a0), (a1))
5124
5125/**
5126 * For typedef'ing or declaring a C instruction implementation function taking
5127 * three extra arguments.
5128 *
5129 * @param a_Name The name of the type.
5130 * @param a_Type0 The type of the 1st argument
5131 * @param a_Arg0 The name of the 1st argument.
5132 * @param a_Type1 The type of the 2nd argument.
5133 * @param a_Arg1 The name of the 2nd argument.
5134 * @param a_Type2 The type of the 3rd argument.
5135 * @param a_Arg2 The name of the 3rd argument.
5136 */
5137# define IEM_CIMPL_DECL_TYPE_3(a_Name, a_Type0, a_Arg0, a_Type1, a_Arg1, a_Type2, a_Arg2) \
5138 IEM_DECL_IMPL_TYPE(VBOXSTRICTRC, a_Name, (PVMCPUCC pVCpu, uint8_t cbInstr, a_Type0 a_Arg0, a_Type1 a_Arg1, a_Type2 a_Arg2))
5139/**
5140 * For defining a C instruction implementation function taking three extra
5141 * arguments.
5142 *
5143 * @param a_Name The name of the function.
5144 * @param a_Type0 The type of the 1st argument
5145 * @param a_Arg0 The name of the 1st argument.
5146 * @param a_Type1 The type of the 2nd argument.
5147 * @param a_Arg1 The name of the 2nd argument.
5148 * @param a_Type2 The type of the 3rd argument.
5149 * @param a_Arg2 The name of the 3rd argument.
5150 */
5151# define IEM_CIMPL_DEF_3(a_Name, a_Type0, a_Arg0, a_Type1, a_Arg1, a_Type2, a_Arg2) \
5152 IEM_DECL_IMPL_DEF(VBOXSTRICTRC, a_Name, (PVMCPUCC pVCpu, uint8_t cbInstr, a_Type0 a_Arg0, a_Type1 a_Arg1, a_Type2 a_Arg2))
5153/**
5154 * Prototype version of IEM_CIMPL_DEF_3.
5155 */
5156# define IEM_CIMPL_PROTO_3(a_Name, a_Type0, a_Arg0, a_Type1, a_Arg1, a_Type2, a_Arg2) \
5157 IEM_DECL_IMPL_PROTO(VBOXSTRICTRC, a_Name, (PVMCPUCC pVCpu, uint8_t cbInstr, a_Type0 a_Arg0, a_Type1 a_Arg1, a_Type2 a_Arg2))
5158/**
5159 * For calling a C instruction implementation function taking three extra
5160 * arguments.
5161 *
5162 * This special call macro adds default arguments to the call and allow us to
5163 * change these later.
5164 *
5165 * @param a_fn The name of the function.
5166 * @param a0 The name of the 1st argument.
5167 * @param a1 The name of the 2nd argument.
5168 * @param a2 The name of the 3rd argument.
5169 */
5170# define IEM_CIMPL_CALL_3(a_fn, a0, a1, a2) a_fn(pVCpu, cbInstr, (a0), (a1), (a2))
5171
5172
5173/**
5174 * For typedef'ing or declaring a C instruction implementation function taking
5175 * four extra arguments.
5176 *
5177 * @param a_Name The name of the type.
5178 * @param a_Type0 The type of the 1st argument
5179 * @param a_Arg0 The name of the 1st argument.
5180 * @param a_Type1 The type of the 2nd argument.
5181 * @param a_Arg1 The name of the 2nd argument.
5182 * @param a_Type2 The type of the 3rd argument.
5183 * @param a_Arg2 The name of the 3rd argument.
5184 * @param a_Type3 The type of the 4th argument.
5185 * @param a_Arg3 The name of the 4th argument.
5186 */
5187# define IEM_CIMPL_DECL_TYPE_4(a_Name, a_Type0, a_Arg0, a_Type1, a_Arg1, a_Type2, a_Arg2, a_Type3, a_Arg3) \
5188 IEM_DECL_IMPL_TYPE(VBOXSTRICTRC, a_Name, (PVMCPUCC pVCpu, uint8_t cbInstr, a_Type0 a_Arg0, a_Type1 a_Arg1, a_Type2 a_Arg2, a_Type3 a_Arg3))
5189/**
5190 * For defining a C instruction implementation function taking four extra
5191 * arguments.
5192 *
5193 * @param a_Name The name of the function.
5194 * @param a_Type0 The type of the 1st argument
5195 * @param a_Arg0 The name of the 1st argument.
5196 * @param a_Type1 The type of the 2nd argument.
5197 * @param a_Arg1 The name of the 2nd argument.
5198 * @param a_Type2 The type of the 3rd argument.
5199 * @param a_Arg2 The name of the 3rd argument.
5200 * @param a_Type3 The type of the 4th argument.
5201 * @param a_Arg3 The name of the 4th argument.
5202 */
5203# define IEM_CIMPL_DEF_4(a_Name, a_Type0, a_Arg0, a_Type1, a_Arg1, a_Type2, a_Arg2, a_Type3, a_Arg3) \
5204 IEM_DECL_IMPL_DEF(VBOXSTRICTRC, a_Name, (PVMCPUCC pVCpu, uint8_t cbInstr, a_Type0 a_Arg0, a_Type1 a_Arg1, \
5205 a_Type2 a_Arg2, a_Type3 a_Arg3))
5206/**
5207 * Prototype version of IEM_CIMPL_DEF_4.
5208 */
5209# define IEM_CIMPL_PROTO_4(a_Name, a_Type0, a_Arg0, a_Type1, a_Arg1, a_Type2, a_Arg2, a_Type3, a_Arg3) \
5210 IEM_DECL_IMPL_PROTO(VBOXSTRICTRC, a_Name, (PVMCPUCC pVCpu, uint8_t cbInstr, a_Type0 a_Arg0, a_Type1 a_Arg1, \
5211 a_Type2 a_Arg2, a_Type3 a_Arg3))
5212/**
5213 * For calling a C instruction implementation function taking four extra
5214 * arguments.
5215 *
5216 * This special call macro adds default arguments to the call and allow us to
5217 * change these later.
5218 *
5219 * @param a_fn The name of the function.
5220 * @param a0 The name of the 1st argument.
5221 * @param a1 The name of the 2nd argument.
5222 * @param a2 The name of the 3rd argument.
5223 * @param a3 The name of the 4th argument.
5224 */
5225# define IEM_CIMPL_CALL_4(a_fn, a0, a1, a2, a3) a_fn(pVCpu, cbInstr, (a0), (a1), (a2), (a3))
5226
5227
5228/**
5229 * For typedef'ing or declaring a C instruction implementation function taking
5230 * five extra arguments.
5231 *
5232 * @param a_Name The name of the type.
5233 * @param a_Type0 The type of the 1st argument
5234 * @param a_Arg0 The name of the 1st argument.
5235 * @param a_Type1 The type of the 2nd argument.
5236 * @param a_Arg1 The name of the 2nd argument.
5237 * @param a_Type2 The type of the 3rd argument.
5238 * @param a_Arg2 The name of the 3rd argument.
5239 * @param a_Type3 The type of the 4th argument.
5240 * @param a_Arg3 The name of the 4th argument.
5241 * @param a_Type4 The type of the 5th argument.
5242 * @param a_Arg4 The name of the 5th argument.
5243 */
5244# define IEM_CIMPL_DECL_TYPE_5(a_Name, a_Type0, a_Arg0, a_Type1, a_Arg1, a_Type2, a_Arg2, a_Type3, a_Arg3, a_Type4, a_Arg4) \
5245 IEM_DECL_IMPL_TYPE(VBOXSTRICTRC, a_Name, (PVMCPUCC pVCpu, uint8_t cbInstr, \
5246 a_Type0 a_Arg0, a_Type1 a_Arg1, a_Type2 a_Arg2, \
5247 a_Type3 a_Arg3, a_Type4 a_Arg4))
5248/**
5249 * For defining a C instruction implementation function taking five extra
5250 * arguments.
5251 *
5252 * @param a_Name The name of the function.
5253 * @param a_Type0 The type of the 1st argument
5254 * @param a_Arg0 The name of the 1st argument.
5255 * @param a_Type1 The type of the 2nd argument.
5256 * @param a_Arg1 The name of the 2nd argument.
5257 * @param a_Type2 The type of the 3rd argument.
5258 * @param a_Arg2 The name of the 3rd argument.
5259 * @param a_Type3 The type of the 4th argument.
5260 * @param a_Arg3 The name of the 4th argument.
5261 * @param a_Type4 The type of the 5th argument.
5262 * @param a_Arg4 The name of the 5th argument.
5263 */
5264# define IEM_CIMPL_DEF_5(a_Name, a_Type0, a_Arg0, a_Type1, a_Arg1, a_Type2, a_Arg2, a_Type3, a_Arg3, a_Type4, a_Arg4) \
5265 IEM_DECL_IMPL_DEF(VBOXSTRICTRC, a_Name, (PVMCPUCC pVCpu, uint8_t cbInstr, a_Type0 a_Arg0, a_Type1 a_Arg1, \
5266 a_Type2 a_Arg2, a_Type3 a_Arg3, a_Type4 a_Arg4))
5267/**
5268 * Prototype version of IEM_CIMPL_DEF_5.
5269 */
5270# define IEM_CIMPL_PROTO_5(a_Name, a_Type0, a_Arg0, a_Type1, a_Arg1, a_Type2, a_Arg2, a_Type3, a_Arg3, a_Type4, a_Arg4) \
5271 IEM_DECL_IMPL_PROTO(VBOXSTRICTRC, a_Name, (PVMCPUCC pVCpu, uint8_t cbInstr, a_Type0 a_Arg0, a_Type1 a_Arg1, \
5272 a_Type2 a_Arg2, a_Type3 a_Arg3, a_Type4 a_Arg4))
5273/**
5274 * For calling a C instruction implementation function taking five extra
5275 * arguments.
5276 *
5277 * This special call macro adds default arguments to the call and allow us to
5278 * change these later.
5279 *
5280 * @param a_fn The name of the function.
5281 * @param a0 The name of the 1st argument.
5282 * @param a1 The name of the 2nd argument.
5283 * @param a2 The name of the 3rd argument.
5284 * @param a3 The name of the 4th argument.
5285 * @param a4 The name of the 5th argument.
5286 */
5287# define IEM_CIMPL_CALL_5(a_fn, a0, a1, a2, a3, a4) a_fn(pVCpu, cbInstr, (a0), (a1), (a2), (a3), (a4))
5288
5289/** @} */
5290
5291
5292/** @name Opcode Decoder Function Types.
5293 * @{ */
5294
5295/** @typedef PFNIEMOP
5296 * Pointer to an opcode decoder function.
5297 */
5298
5299/** @def FNIEMOP_DEF
5300 * Define an opcode decoder function.
5301 *
5302 * We're using macors for this so that adding and removing parameters as well as
5303 * tweaking compiler specific attributes becomes easier. See FNIEMOP_CALL
5304 *
5305 * @param a_Name The function name.
5306 */
5307
5308/** @typedef PFNIEMOPRM
5309 * Pointer to an opcode decoder function with RM byte.
5310 */
5311
5312/** @def FNIEMOPRM_DEF
5313 * Define an opcode decoder function with RM byte.
5314 *
5315 * We're using macors for this so that adding and removing parameters as well as
5316 * tweaking compiler specific attributes becomes easier. See FNIEMOP_CALL_1
5317 *
5318 * @param a_Name The function name.
5319 */
5320
5321#if defined(__GNUC__) && defined(RT_ARCH_X86)
5322typedef VBOXSTRICTRC (__attribute__((__fastcall__)) * PFNIEMOP)(PVMCPUCC pVCpu);
5323typedef VBOXSTRICTRC (__attribute__((__fastcall__)) * PFNIEMOPRM)(PVMCPUCC pVCpu, uint8_t bRm);
5324# define FNIEMOP_DEF(a_Name) \
5325 IEM_STATIC VBOXSTRICTRC __attribute__((__fastcall__, __nothrow__)) a_Name(PVMCPUCC pVCpu)
5326# define FNIEMOP_DEF_1(a_Name, a_Type0, a_Name0) \
5327 IEM_STATIC VBOXSTRICTRC __attribute__((__fastcall__, __nothrow__)) a_Name(PVMCPUCC pVCpu, a_Type0 a_Name0)
5328# define FNIEMOP_DEF_2(a_Name, a_Type0, a_Name0, a_Type1, a_Name1) \
5329 IEM_STATIC VBOXSTRICTRC __attribute__((__fastcall__, __nothrow__)) a_Name(PVMCPUCC pVCpu, a_Type0 a_Name0, a_Type1 a_Name1)
5330
5331#elif defined(_MSC_VER) && defined(RT_ARCH_X86)
5332typedef VBOXSTRICTRC (__fastcall * PFNIEMOP)(PVMCPUCC pVCpu);
5333typedef VBOXSTRICTRC (__fastcall * PFNIEMOPRM)(PVMCPUCC pVCpu, uint8_t bRm);
5334# define FNIEMOP_DEF(a_Name) \
5335 IEM_STATIC /*__declspec(naked)*/ VBOXSTRICTRC __fastcall a_Name(PVMCPUCC pVCpu) IEM_NOEXCEPT_MAY_LONGJMP
5336# define FNIEMOP_DEF_1(a_Name, a_Type0, a_Name0) \
5337 IEM_STATIC /*__declspec(naked)*/ VBOXSTRICTRC __fastcall a_Name(PVMCPUCC pVCpu, a_Type0 a_Name0) IEM_NOEXCEPT_MAY_LONGJMP
5338# define FNIEMOP_DEF_2(a_Name, a_Type0, a_Name0, a_Type1, a_Name1) \
5339 IEM_STATIC /*__declspec(naked)*/ VBOXSTRICTRC __fastcall a_Name(PVMCPUCC pVCpu, a_Type0 a_Name0, a_Type1 a_Name1) IEM_NOEXCEPT_MAY_LONGJMP
5340
5341#elif defined(__GNUC__) && !defined(IEM_WITH_THROW_CATCH)
5342typedef VBOXSTRICTRC (* PFNIEMOP)(PVMCPUCC pVCpu);
5343typedef VBOXSTRICTRC (* PFNIEMOPRM)(PVMCPUCC pVCpu, uint8_t bRm);
5344# define FNIEMOP_DEF(a_Name) \
5345 IEM_STATIC VBOXSTRICTRC __attribute__((__nothrow__)) a_Name(PVMCPUCC pVCpu)
5346# define FNIEMOP_DEF_1(a_Name, a_Type0, a_Name0) \
5347 IEM_STATIC VBOXSTRICTRC __attribute__((__nothrow__)) a_Name(PVMCPUCC pVCpu, a_Type0 a_Name0)
5348# define FNIEMOP_DEF_2(a_Name, a_Type0, a_Name0, a_Type1, a_Name1) \
5349 IEM_STATIC VBOXSTRICTRC __attribute__((__nothrow__)) a_Name(PVMCPUCC pVCpu, a_Type0 a_Name0, a_Type1 a_Name1)
5350
5351#else
5352typedef VBOXSTRICTRC (* PFNIEMOP)(PVMCPUCC pVCpu);
5353typedef VBOXSTRICTRC (* PFNIEMOPRM)(PVMCPUCC pVCpu, uint8_t bRm);
5354# define FNIEMOP_DEF(a_Name) \
5355 IEM_STATIC VBOXSTRICTRC a_Name(PVMCPUCC pVCpu) IEM_NOEXCEPT_MAY_LONGJMP
5356# define FNIEMOP_DEF_1(a_Name, a_Type0, a_Name0) \
5357 IEM_STATIC VBOXSTRICTRC a_Name(PVMCPUCC pVCpu, a_Type0 a_Name0) IEM_NOEXCEPT_MAY_LONGJMP
5358# define FNIEMOP_DEF_2(a_Name, a_Type0, a_Name0, a_Type1, a_Name1) \
5359 IEM_STATIC VBOXSTRICTRC a_Name(PVMCPUCC pVCpu, a_Type0 a_Name0, a_Type1 a_Name1) IEM_NOEXCEPT_MAY_LONGJMP
5360
5361#endif
5362#define FNIEMOPRM_DEF(a_Name) FNIEMOP_DEF_1(a_Name, uint8_t, bRm)
5363
5364/**
5365 * Call an opcode decoder function.
5366 *
5367 * We're using macors for this so that adding and removing parameters can be
5368 * done as we please. See FNIEMOP_DEF.
5369 */
5370#define FNIEMOP_CALL(a_pfn) (a_pfn)(pVCpu)
5371
5372/**
5373 * Call a common opcode decoder function taking one extra argument.
5374 *
5375 * We're using macors for this so that adding and removing parameters can be
5376 * done as we please. See FNIEMOP_DEF_1.
5377 */
5378#define FNIEMOP_CALL_1(a_pfn, a0) (a_pfn)(pVCpu, a0)
5379
5380/**
5381 * Call a common opcode decoder function taking one extra argument.
5382 *
5383 * We're using macors for this so that adding and removing parameters can be
5384 * done as we please. See FNIEMOP_DEF_1.
5385 */
5386#define FNIEMOP_CALL_2(a_pfn, a0, a1) (a_pfn)(pVCpu, a0, a1)
5387/** @} */
5388
5389
5390/** @name Misc Helpers
5391 * @{ */
5392
5393/** Used to shut up GCC warnings about variables that 'may be used uninitialized'
5394 * due to GCC lacking knowledge about the value range of a switch. */
5395#if RT_CPLUSPLUS_PREREQ(202000)
5396# define IEM_NOT_REACHED_DEFAULT_CASE_RET() default: [[unlikely]] AssertFailedReturn(VERR_IPE_NOT_REACHED_DEFAULT_CASE)
5397#else
5398# define IEM_NOT_REACHED_DEFAULT_CASE_RET() default: AssertFailedReturn(VERR_IPE_NOT_REACHED_DEFAULT_CASE)
5399#endif
5400
5401/** Variant of IEM_NOT_REACHED_DEFAULT_CASE_RET that returns a custom value. */
5402#if RT_CPLUSPLUS_PREREQ(202000)
5403# define IEM_NOT_REACHED_DEFAULT_CASE_RET2(a_RetValue) default: [[unlikely]] AssertFailedReturn(a_RetValue)
5404#else
5405# define IEM_NOT_REACHED_DEFAULT_CASE_RET2(a_RetValue) default: AssertFailedReturn(a_RetValue)
5406#endif
5407
5408/**
5409 * Returns IEM_RETURN_ASPECT_NOT_IMPLEMENTED, and in debug builds logs the
5410 * occation.
5411 */
5412#ifdef LOG_ENABLED
5413# define IEM_RETURN_ASPECT_NOT_IMPLEMENTED() \
5414 do { \
5415 /*Log*/ LogAlways(("%s: returning IEM_RETURN_ASPECT_NOT_IMPLEMENTED (line %d)\n", __FUNCTION__, __LINE__)); \
5416 return VERR_IEM_ASPECT_NOT_IMPLEMENTED; \
5417 } while (0)
5418#else
5419# define IEM_RETURN_ASPECT_NOT_IMPLEMENTED() \
5420 return VERR_IEM_ASPECT_NOT_IMPLEMENTED
5421#endif
5422
5423/**
5424 * Returns IEM_RETURN_ASPECT_NOT_IMPLEMENTED, and in debug builds logs the
5425 * occation using the supplied logger statement.
5426 *
5427 * @param a_LoggerArgs What to log on failure.
5428 */
5429#ifdef LOG_ENABLED
5430# define IEM_RETURN_ASPECT_NOT_IMPLEMENTED_LOG(a_LoggerArgs) \
5431 do { \
5432 LogAlways((LOG_FN_FMT ": ", __PRETTY_FUNCTION__)); LogAlways(a_LoggerArgs); \
5433 /*LogFunc(a_LoggerArgs);*/ \
5434 return VERR_IEM_ASPECT_NOT_IMPLEMENTED; \
5435 } while (0)
5436#else
5437# define IEM_RETURN_ASPECT_NOT_IMPLEMENTED_LOG(a_LoggerArgs) \
5438 return VERR_IEM_ASPECT_NOT_IMPLEMENTED
5439#endif
5440
5441/**
5442 * Gets the CPU mode (from fExec) as a IEMMODE value.
5443 *
5444 * @returns IEMMODE
5445 * @param a_pVCpu The cross context virtual CPU structure of the calling thread.
5446 */
5447#define IEM_GET_CPU_MODE(a_pVCpu) ((a_pVCpu)->iem.s.fExec & IEM_F_MODE_CPUMODE_MASK)
5448
5449/**
5450 * Check if we're currently executing in real or virtual 8086 mode.
5451 *
5452 * @returns @c true if it is, @c false if not.
5453 * @param a_pVCpu The cross context virtual CPU structure of the calling thread.
5454 */
5455#define IEM_IS_REAL_OR_V86_MODE(a_pVCpu) (( ((a_pVCpu)->iem.s.fExec ^ IEM_F_MODE_X86_PROT_MASK) \
5456 & (IEM_F_MODE_X86_V86_MASK | IEM_F_MODE_X86_PROT_MASK)) != 0)
5457
5458/**
5459 * Check if we're currently executing in virtual 8086 mode.
5460 *
5461 * @returns @c true if it is, @c false if not.
5462 * @param a_pVCpu The cross context virtual CPU structure of the calling thread.
5463 */
5464#define IEM_IS_V86_MODE(a_pVCpu) (((a_pVCpu)->iem.s.fExec & IEM_F_MODE_X86_V86_MASK) != 0)
5465
5466/**
5467 * Check if we're currently executing in long mode.
5468 *
5469 * @returns @c true if it is, @c false if not.
5470 * @param a_pVCpu The cross context virtual CPU structure of the calling thread.
5471 */
5472#define IEM_IS_LONG_MODE(a_pVCpu) (CPUMIsGuestInLongModeEx(IEM_GET_CTX(a_pVCpu)))
5473
5474/**
5475 * Check if we're currently executing in a 16-bit code segment.
5476 *
5477 * @returns @c true if it is, @c false if not.
5478 * @param a_pVCpu The cross context virtual CPU structure of the calling thread.
5479 */
5480#define IEM_IS_16BIT_CODE(a_pVCpu) (IEM_GET_CPU_MODE(a_pVCpu) == IEMMODE_16BIT)
5481
5482/**
5483 * Check if we're currently executing in a 32-bit code segment.
5484 *
5485 * @returns @c true if it is, @c false if not.
5486 * @param a_pVCpu The cross context virtual CPU structure of the calling thread.
5487 */
5488#define IEM_IS_32BIT_CODE(a_pVCpu) (IEM_GET_CPU_MODE(a_pVCpu) == IEMMODE_32BIT)
5489
5490/**
5491 * Check if we're currently executing in a 64-bit code segment.
5492 *
5493 * @returns @c true if it is, @c false if not.
5494 * @param a_pVCpu The cross context virtual CPU structure of the calling thread.
5495 */
5496#define IEM_IS_64BIT_CODE(a_pVCpu) (IEM_GET_CPU_MODE(a_pVCpu) == IEMMODE_64BIT)
5497
5498/**
5499 * Check if we're currently executing in real mode.
5500 *
5501 * @returns @c true if it is, @c false if not.
5502 * @param a_pVCpu The cross context virtual CPU structure of the calling thread.
5503 */
5504#define IEM_IS_REAL_MODE(a_pVCpu) (!((a_pVCpu)->iem.s.fExec & IEM_F_MODE_X86_PROT_MASK))
5505
5506/**
5507 * Gets the current protection level (CPL).
5508 *
5509 * @returns 0..3
5510 * @param a_pVCpu The cross context virtual CPU structure of the calling thread.
5511 */
5512#define IEM_GET_CPL(a_pVCpu) (((a_pVCpu)->iem.s.fExec >> IEM_F_X86_CPL_SHIFT) & IEM_F_X86_CPL_SMASK)
5513
5514/**
5515 * Sets the current protection level (CPL).
5516 *
5517 * @param a_pVCpu The cross context virtual CPU structure of the calling thread.
5518 */
5519#define IEM_SET_CPL(a_pVCpu, a_uCpl) \
5520 do { (a_pVCpu)->iem.s.fExec = ((a_pVCpu)->iem.s.fExec & ~IEM_F_X86_CPL_MASK) | ((a_uCpl) << IEM_F_X86_CPL_SHIFT); } while (0)
5521
5522/**
5523 * Returns a (const) pointer to the CPUMFEATURES for the guest CPU.
5524 * @returns PCCPUMFEATURES
5525 * @param a_pVCpu The cross context virtual CPU structure of the calling thread.
5526 */
5527#define IEM_GET_GUEST_CPU_FEATURES(a_pVCpu) (&((a_pVCpu)->CTX_SUFF(pVM)->cpum.ro.GuestFeatures))
5528
5529/**
5530 * Returns a (const) pointer to the CPUMFEATURES for the host CPU.
5531 * @returns PCCPUMFEATURES
5532 * @param a_pVCpu The cross context virtual CPU structure of the calling thread.
5533 */
5534#define IEM_GET_HOST_CPU_FEATURES(a_pVCpu) (&g_CpumHostFeatures.s)
5535
5536/**
5537 * Evaluates to true if we're presenting an Intel CPU to the guest.
5538 */
5539#define IEM_IS_GUEST_CPU_INTEL(a_pVCpu) ( (a_pVCpu)->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL )
5540
5541/**
5542 * Evaluates to true if we're presenting an AMD CPU to the guest.
5543 */
5544#define IEM_IS_GUEST_CPU_AMD(a_pVCpu) ( (a_pVCpu)->iem.s.enmCpuVendor == CPUMCPUVENDOR_AMD || (a_pVCpu)->iem.s.enmCpuVendor == CPUMCPUVENDOR_HYGON )
5545
5546/**
5547 * Check if the address is canonical.
5548 */
5549#define IEM_IS_CANONICAL(a_u64Addr) X86_IS_CANONICAL(a_u64Addr)
5550
5551/** Checks if the ModR/M byte is in register mode or not. */
5552#define IEM_IS_MODRM_REG_MODE(a_bRm) ( ((a_bRm) & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT) )
5553/** Checks if the ModR/M byte is in memory mode or not. */
5554#define IEM_IS_MODRM_MEM_MODE(a_bRm) ( ((a_bRm) & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT) )
5555
5556/**
5557 * Gets the register (reg) part of a ModR/M encoding, with REX.R added in.
5558 *
5559 * For use during decoding.
5560 */
5561#define IEM_GET_MODRM_REG(a_pVCpu, a_bRm) ( (((a_bRm) >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | (a_pVCpu)->iem.s.uRexReg )
5562/**
5563 * Gets the r/m part of a ModR/M encoding as a register index, with REX.B added in.
5564 *
5565 * For use during decoding.
5566 */
5567#define IEM_GET_MODRM_RM(a_pVCpu, a_bRm) ( ((a_bRm) & X86_MODRM_RM_MASK) | (a_pVCpu)->iem.s.uRexB )
5568
5569/**
5570 * Gets the register (reg) part of a ModR/M encoding, without REX.R.
5571 *
5572 * For use during decoding.
5573 */
5574#define IEM_GET_MODRM_REG_8(a_bRm) ( (((a_bRm) >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) )
5575/**
5576 * Gets the r/m part of a ModR/M encoding as a register index, without REX.B.
5577 *
5578 * For use during decoding.
5579 */
5580#define IEM_GET_MODRM_RM_8(a_bRm) ( ((a_bRm) & X86_MODRM_RM_MASK) )
5581
5582/**
5583 * Gets the register (reg) part of a ModR/M encoding as an extended 8-bit
5584 * register index, with REX.R added in.
5585 *
5586 * For use during decoding.
5587 *
5588 * @see iemGRegRefU8Ex, iemGRegFetchU8Ex, iemGRegStoreU8Ex
5589 */
5590#define IEM_GET_MODRM_REG_EX8(a_pVCpu, a_bRm) \
5591 ( (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX) \
5592 || !((a_bRm) & (4 << X86_MODRM_REG_SHIFT)) /* IEM_GET_MODRM_REG(pVCpu, a_bRm) < 4 */ \
5593 ? IEM_GET_MODRM_REG(pVCpu, a_bRm) : (((a_bRm) >> X86_MODRM_REG_SHIFT) & 3) | 16)
5594/**
5595 * Gets the r/m part of a ModR/M encoding as an extended 8-bit register index,
5596 * with REX.B added in.
5597 *
5598 * For use during decoding.
5599 *
5600 * @see iemGRegRefU8Ex, iemGRegFetchU8Ex, iemGRegStoreU8Ex
5601 */
5602#define IEM_GET_MODRM_RM_EX8(a_pVCpu, a_bRm) \
5603 ( (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX) \
5604 || !((a_bRm) & 4) /* IEM_GET_MODRM_RM(pVCpu, a_bRm) < 4 */ \
5605 ? IEM_GET_MODRM_RM(pVCpu, a_bRm) : ((a_bRm) & 3) | 16)
5606
5607/**
5608 * Combines the prefix REX and ModR/M byte for passing to
5609 * iemOpHlpCalcRmEffAddrThreadedAddr64().
5610 *
5611 * @returns The ModRM byte but with bit 3 set to REX.B and bit 4 to REX.X.
5612 * The two bits are part of the REG sub-field, which isn't needed in
5613 * iemOpHlpCalcRmEffAddrThreadedAddr64().
5614 *
5615 * For use during decoding/recompiling.
5616 */
5617#define IEM_GET_MODRM_EX(a_pVCpu, a_bRm) \
5618 ( ((a_bRm) & ~X86_MODRM_REG_MASK) \
5619 | (uint8_t)( (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X)) >> (25 - 3) ) )
5620AssertCompile(IEM_OP_PRF_REX_B == RT_BIT_32(25));
5621AssertCompile(IEM_OP_PRF_REX_X == RT_BIT_32(26));
5622
5623/**
5624 * Gets the effective VEX.VVVV value.
5625 *
5626 * The 4th bit is ignored if not 64-bit code.
5627 * @returns effective V-register value.
5628 * @param a_pVCpu The cross context virtual CPU structure of the calling thread.
5629 */
5630#define IEM_GET_EFFECTIVE_VVVV(a_pVCpu) \
5631 (IEM_IS_64BIT_CODE(a_pVCpu) ? (a_pVCpu)->iem.s.uVex3rdReg : (a_pVCpu)->iem.s.uVex3rdReg & 7)
5632
5633
5634/**
5635 * Gets the register (reg) part of a the special 4th register byte used by
5636 * vblendvps and vblendvpd.
5637 *
5638 * For use during decoding.
5639 */
5640#define IEM_GET_IMM8_REG(a_pVCpu, a_bRegImm8) \
5641 (IEM_IS_64BIT_CODE(a_pVCpu) ? (a_bRegImm8) >> 4 : ((a_bRegImm8) >> 4) & 7)
5642
5643
5644/**
5645 * Checks if we're executing inside an AMD-V or VT-x guest.
5646 */
5647#if defined(VBOX_WITH_NESTED_HWVIRT_VMX) || defined(VBOX_WITH_NESTED_HWVIRT_SVM)
5648# define IEM_IS_IN_GUEST(a_pVCpu) RT_BOOL((a_pVCpu)->iem.s.fExec & IEM_F_X86_CTX_IN_GUEST)
5649#else
5650# define IEM_IS_IN_GUEST(a_pVCpu) false
5651#endif
5652
5653
5654#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
5655
5656/**
5657 * Check if the guest has entered VMX root operation.
5658 */
5659# define IEM_VMX_IS_ROOT_MODE(a_pVCpu) (CPUMIsGuestInVmxRootMode(IEM_GET_CTX(a_pVCpu)))
5660
5661/**
5662 * Check if the guest has entered VMX non-root operation.
5663 */
5664# define IEM_VMX_IS_NON_ROOT_MODE(a_pVCpu) ( ((a_pVCpu)->iem.s.fExec & (IEM_F_X86_CTX_VMX | IEM_F_X86_CTX_IN_GUEST)) \
5665 == (IEM_F_X86_CTX_VMX | IEM_F_X86_CTX_IN_GUEST) )
5666
5667/**
5668 * Check if the nested-guest has the given Pin-based VM-execution control set.
5669 */
5670# define IEM_VMX_IS_PINCTLS_SET(a_pVCpu, a_PinCtl) (CPUMIsGuestVmxPinCtlsSet(IEM_GET_CTX(a_pVCpu), (a_PinCtl)))
5671
5672/**
5673 * Check if the nested-guest has the given Processor-based VM-execution control set.
5674 */
5675# define IEM_VMX_IS_PROCCTLS_SET(a_pVCpu, a_ProcCtl) (CPUMIsGuestVmxProcCtlsSet(IEM_GET_CTX(a_pVCpu), (a_ProcCtl)))
5676
5677/**
5678 * Check if the nested-guest has the given Secondary Processor-based VM-execution
5679 * control set.
5680 */
5681# define IEM_VMX_IS_PROCCTLS2_SET(a_pVCpu, a_ProcCtl2) (CPUMIsGuestVmxProcCtls2Set(IEM_GET_CTX(a_pVCpu), (a_ProcCtl2)))
5682
5683/** Gets the guest-physical address of the shadows VMCS for the given VCPU. */
5684# define IEM_VMX_GET_SHADOW_VMCS(a_pVCpu) ((a_pVCpu)->cpum.GstCtx.hwvirt.vmx.GCPhysShadowVmcs)
5685
5686/** Whether a shadow VMCS is present for the given VCPU. */
5687# define IEM_VMX_HAS_SHADOW_VMCS(a_pVCpu) RT_BOOL(IEM_VMX_GET_SHADOW_VMCS(a_pVCpu) != NIL_RTGCPHYS)
5688
5689/** Gets the VMXON region pointer. */
5690# define IEM_VMX_GET_VMXON_PTR(a_pVCpu) ((a_pVCpu)->cpum.GstCtx.hwvirt.vmx.GCPhysVmxon)
5691
5692/** Gets the guest-physical address of the current VMCS for the given VCPU. */
5693# define IEM_VMX_GET_CURRENT_VMCS(a_pVCpu) ((a_pVCpu)->cpum.GstCtx.hwvirt.vmx.GCPhysVmcs)
5694
5695/** Whether a current VMCS is present for the given VCPU. */
5696# define IEM_VMX_HAS_CURRENT_VMCS(a_pVCpu) RT_BOOL(IEM_VMX_GET_CURRENT_VMCS(a_pVCpu) != NIL_RTGCPHYS)
5697
5698/** Assigns the guest-physical address of the current VMCS for the given VCPU. */
5699# define IEM_VMX_SET_CURRENT_VMCS(a_pVCpu, a_GCPhysVmcs) \
5700 do \
5701 { \
5702 Assert((a_GCPhysVmcs) != NIL_RTGCPHYS); \
5703 (a_pVCpu)->cpum.GstCtx.hwvirt.vmx.GCPhysVmcs = (a_GCPhysVmcs); \
5704 } while (0)
5705
5706/** Clears any current VMCS for the given VCPU. */
5707# define IEM_VMX_CLEAR_CURRENT_VMCS(a_pVCpu) \
5708 do \
5709 { \
5710 (a_pVCpu)->cpum.GstCtx.hwvirt.vmx.GCPhysVmcs = NIL_RTGCPHYS; \
5711 } while (0)
5712
5713/**
5714 * Invokes the VMX VM-exit handler for an instruction intercept.
5715 */
5716# define IEM_VMX_VMEXIT_INSTR_RET(a_pVCpu, a_uExitReason, a_cbInstr) \
5717 do { return iemVmxVmexitInstr((a_pVCpu), (a_uExitReason), (a_cbInstr)); } while (0)
5718
5719/**
5720 * Invokes the VMX VM-exit handler for an instruction intercept where the
5721 * instruction provides additional VM-exit information.
5722 */
5723# define IEM_VMX_VMEXIT_INSTR_NEEDS_INFO_RET(a_pVCpu, a_uExitReason, a_uInstrId, a_cbInstr) \
5724 do { return iemVmxVmexitInstrNeedsInfo((a_pVCpu), (a_uExitReason), (a_uInstrId), (a_cbInstr)); } while (0)
5725
5726/**
5727 * Invokes the VMX VM-exit handler for a task switch.
5728 */
5729# define IEM_VMX_VMEXIT_TASK_SWITCH_RET(a_pVCpu, a_enmTaskSwitch, a_SelNewTss, a_cbInstr) \
5730 do { return iemVmxVmexitTaskSwitch((a_pVCpu), (a_enmTaskSwitch), (a_SelNewTss), (a_cbInstr)); } while (0)
5731
5732/**
5733 * Invokes the VMX VM-exit handler for MWAIT.
5734 */
5735# define IEM_VMX_VMEXIT_MWAIT_RET(a_pVCpu, a_fMonitorArmed, a_cbInstr) \
5736 do { return iemVmxVmexitInstrMwait((a_pVCpu), (a_fMonitorArmed), (a_cbInstr)); } while (0)
5737
5738/**
5739 * Invokes the VMX VM-exit handler for EPT faults.
5740 */
5741# define IEM_VMX_VMEXIT_EPT_RET(a_pVCpu, a_pPtWalk, a_fAccess, a_fSlatFail, a_cbInstr) \
5742 do { return iemVmxVmexitEpt(a_pVCpu, a_pPtWalk, a_fAccess, a_fSlatFail, a_cbInstr); } while (0)
5743
5744/**
5745 * Invokes the VMX VM-exit handler.
5746 */
5747# define IEM_VMX_VMEXIT_TRIPLE_FAULT_RET(a_pVCpu, a_uExitReason, a_uExitQual) \
5748 do { return iemVmxVmexit((a_pVCpu), (a_uExitReason), (a_uExitQual)); } while (0)
5749
5750#else
5751# define IEM_VMX_IS_ROOT_MODE(a_pVCpu) (false)
5752# define IEM_VMX_IS_NON_ROOT_MODE(a_pVCpu) (false)
5753# define IEM_VMX_IS_PINCTLS_SET(a_pVCpu, a_cbInstr) (false)
5754# define IEM_VMX_IS_PROCCTLS_SET(a_pVCpu, a_cbInstr) (false)
5755# define IEM_VMX_IS_PROCCTLS2_SET(a_pVCpu, a_cbInstr) (false)
5756# define IEM_VMX_VMEXIT_INSTR_RET(a_pVCpu, a_uExitReason, a_cbInstr) do { return VERR_VMX_IPE_1; } while (0)
5757# define IEM_VMX_VMEXIT_INSTR_NEEDS_INFO_RET(a_pVCpu, a_uExitReason, a_uInstrId, a_cbInstr) do { return VERR_VMX_IPE_1; } while (0)
5758# define IEM_VMX_VMEXIT_TASK_SWITCH_RET(a_pVCpu, a_enmTaskSwitch, a_SelNewTss, a_cbInstr) do { return VERR_VMX_IPE_1; } while (0)
5759# define IEM_VMX_VMEXIT_MWAIT_RET(a_pVCpu, a_fMonitorArmed, a_cbInstr) do { return VERR_VMX_IPE_1; } while (0)
5760# define IEM_VMX_VMEXIT_EPT_RET(a_pVCpu, a_pPtWalk, a_fAccess, a_fSlatFail, a_cbInstr) do { return VERR_VMX_IPE_1; } while (0)
5761# define IEM_VMX_VMEXIT_TRIPLE_FAULT_RET(a_pVCpu, a_uExitReason, a_uExitQual) do { return VERR_VMX_IPE_1; } while (0)
5762
5763#endif
5764
5765#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
5766/**
5767 * Checks if we're executing a guest using AMD-V.
5768 */
5769# define IEM_SVM_IS_IN_GUEST(a_pVCpu) ( (a_pVCpu->iem.s.fExec & (IEM_F_X86_CTX_SVM | IEM_F_X86_CTX_IN_GUEST)) \
5770 == (IEM_F_X86_CTX_SVM | IEM_F_X86_CTX_IN_GUEST))
5771/**
5772 * Check if an SVM control/instruction intercept is set.
5773 */
5774# define IEM_SVM_IS_CTRL_INTERCEPT_SET(a_pVCpu, a_Intercept) \
5775 (IEM_SVM_IS_IN_GUEST(a_pVCpu) && CPUMIsGuestSvmCtrlInterceptSet(a_pVCpu, IEM_GET_CTX(a_pVCpu), (a_Intercept)))
5776
5777/**
5778 * Check if an SVM read CRx intercept is set.
5779 */
5780# define IEM_SVM_IS_READ_CR_INTERCEPT_SET(a_pVCpu, a_uCr) \
5781 (IEM_SVM_IS_IN_GUEST(a_pVCpu) && CPUMIsGuestSvmReadCRxInterceptSet(a_pVCpu, IEM_GET_CTX(a_pVCpu), (a_uCr)))
5782
5783/**
5784 * Check if an SVM write CRx intercept is set.
5785 */
5786# define IEM_SVM_IS_WRITE_CR_INTERCEPT_SET(a_pVCpu, a_uCr) \
5787 (IEM_SVM_IS_IN_GUEST(a_pVCpu) && CPUMIsGuestSvmWriteCRxInterceptSet(a_pVCpu, IEM_GET_CTX(a_pVCpu), (a_uCr)))
5788
5789/**
5790 * Check if an SVM read DRx intercept is set.
5791 */
5792# define IEM_SVM_IS_READ_DR_INTERCEPT_SET(a_pVCpu, a_uDr) \
5793 (IEM_SVM_IS_IN_GUEST(a_pVCpu) && CPUMIsGuestSvmReadDRxInterceptSet(a_pVCpu, IEM_GET_CTX(a_pVCpu), (a_uDr)))
5794
5795/**
5796 * Check if an SVM write DRx intercept is set.
5797 */
5798# define IEM_SVM_IS_WRITE_DR_INTERCEPT_SET(a_pVCpu, a_uDr) \
5799 (IEM_SVM_IS_IN_GUEST(a_pVCpu) && CPUMIsGuestSvmWriteDRxInterceptSet(a_pVCpu, IEM_GET_CTX(a_pVCpu), (a_uDr)))
5800
5801/**
5802 * Check if an SVM exception intercept is set.
5803 */
5804# define IEM_SVM_IS_XCPT_INTERCEPT_SET(a_pVCpu, a_uVector) \
5805 (IEM_SVM_IS_IN_GUEST(a_pVCpu) && CPUMIsGuestSvmXcptInterceptSet(a_pVCpu, IEM_GET_CTX(a_pVCpu), (a_uVector)))
5806
5807/**
5808 * Invokes the SVM \#VMEXIT handler for the nested-guest.
5809 */
5810# define IEM_SVM_VMEXIT_RET(a_pVCpu, a_uExitCode, a_uExitInfo1, a_uExitInfo2) \
5811 do { return iemSvmVmexit((a_pVCpu), (a_uExitCode), (a_uExitInfo1), (a_uExitInfo2)); } while (0)
5812
5813/**
5814 * Invokes the 'MOV CRx' SVM \#VMEXIT handler after constructing the
5815 * corresponding decode assist information.
5816 */
5817# define IEM_SVM_CRX_VMEXIT_RET(a_pVCpu, a_uExitCode, a_enmAccessCrX, a_iGReg) \
5818 do \
5819 { \
5820 uint64_t uExitInfo1; \
5821 if ( IEM_GET_GUEST_CPU_FEATURES(a_pVCpu)->fSvmDecodeAssists \
5822 && (a_enmAccessCrX) == IEMACCESSCRX_MOV_CRX) \
5823 uExitInfo1 = SVM_EXIT1_MOV_CRX_MASK | ((a_iGReg) & 7); \
5824 else \
5825 uExitInfo1 = 0; \
5826 IEM_SVM_VMEXIT_RET(a_pVCpu, a_uExitCode, uExitInfo1, 0); \
5827 } while (0)
5828
5829/** Check and handles SVM nested-guest instruction intercept and updates
5830 * NRIP if needed.
5831 */
5832# define IEM_SVM_CHECK_INSTR_INTERCEPT(a_pVCpu, a_Intercept, a_uExitCode, a_uExitInfo1, a_uExitInfo2, a_cbInstr) \
5833 do \
5834 { \
5835 if (IEM_SVM_IS_CTRL_INTERCEPT_SET(a_pVCpu, a_Intercept)) \
5836 { \
5837 IEM_SVM_UPDATE_NRIP(a_pVCpu, a_cbInstr); \
5838 IEM_SVM_VMEXIT_RET(a_pVCpu, a_uExitCode, a_uExitInfo1, a_uExitInfo2); \
5839 } \
5840 } while (0)
5841
5842/** Checks and handles SVM nested-guest CR0 read intercept. */
5843# define IEM_SVM_CHECK_READ_CR0_INTERCEPT(a_pVCpu, a_uExitInfo1, a_uExitInfo2, a_cbInstr) \
5844 do \
5845 { \
5846 if (!IEM_SVM_IS_READ_CR_INTERCEPT_SET(a_pVCpu, 0)) \
5847 { /* probably likely */ } \
5848 else \
5849 { \
5850 IEM_SVM_UPDATE_NRIP(a_pVCpu, a_cbInstr); \
5851 IEM_SVM_VMEXIT_RET(a_pVCpu, SVM_EXIT_READ_CR0, a_uExitInfo1, a_uExitInfo2); \
5852 } \
5853 } while (0)
5854
5855/**
5856 * Updates the NextRIP (NRI) field in the nested-guest VMCB.
5857 */
5858# define IEM_SVM_UPDATE_NRIP(a_pVCpu, a_cbInstr) \
5859 do { \
5860 if (IEM_GET_GUEST_CPU_FEATURES(a_pVCpu)->fSvmNextRipSave) \
5861 CPUMGuestSvmUpdateNRip(a_pVCpu, IEM_GET_CTX(a_pVCpu), (a_cbInstr)); \
5862 } while (0)
5863
5864#else
5865# define IEM_SVM_IS_CTRL_INTERCEPT_SET(a_pVCpu, a_Intercept) (false)
5866# define IEM_SVM_IS_READ_CR_INTERCEPT_SET(a_pVCpu, a_uCr) (false)
5867# define IEM_SVM_IS_WRITE_CR_INTERCEPT_SET(a_pVCpu, a_uCr) (false)
5868# define IEM_SVM_IS_READ_DR_INTERCEPT_SET(a_pVCpu, a_uDr) (false)
5869# define IEM_SVM_IS_WRITE_DR_INTERCEPT_SET(a_pVCpu, a_uDr) (false)
5870# define IEM_SVM_IS_XCPT_INTERCEPT_SET(a_pVCpu, a_uVector) (false)
5871# define IEM_SVM_VMEXIT_RET(a_pVCpu, a_uExitCode, a_uExitInfo1, a_uExitInfo2) do { return VERR_SVM_IPE_1; } while (0)
5872# define IEM_SVM_CRX_VMEXIT_RET(a_pVCpu, a_uExitCode, a_enmAccessCrX, a_iGReg) do { return VERR_SVM_IPE_1; } while (0)
5873# define IEM_SVM_CHECK_INSTR_INTERCEPT(a_pVCpu, a_Intercept, a_uExitCode, \
5874 a_uExitInfo1, a_uExitInfo2, a_cbInstr) do { } while (0)
5875# define IEM_SVM_CHECK_READ_CR0_INTERCEPT(a_pVCpu, a_uExitInfo1, a_uExitInfo2, a_cbInstr) do { } while (0)
5876# define IEM_SVM_UPDATE_NRIP(a_pVCpu, a_cbInstr) do { } while (0)
5877
5878#endif
5879
5880/** @} */
5881
5882uint32_t iemCalcExecDbgFlagsSlow(PVMCPUCC pVCpu);
5883VBOXSTRICTRC iemExecInjectPendingTrap(PVMCPUCC pVCpu);
5884
5885
5886/**
5887 * Selector descriptor table entry as fetched by iemMemFetchSelDesc.
5888 */
5889typedef union IEMSELDESC
5890{
5891 /** The legacy view. */
5892 X86DESC Legacy;
5893 /** The long mode view. */
5894 X86DESC64 Long;
5895} IEMSELDESC;
5896/** Pointer to a selector descriptor table entry. */
5897typedef IEMSELDESC *PIEMSELDESC;
5898
5899/** @name Raising Exceptions.
5900 * @{ */
5901VBOXSTRICTRC iemTaskSwitch(PVMCPUCC pVCpu, IEMTASKSWITCH enmTaskSwitch, uint32_t uNextEip, uint32_t fFlags,
5902 uint16_t uErr, uint64_t uCr2, RTSEL SelTSS, PIEMSELDESC pNewDescTSS) RT_NOEXCEPT;
5903
5904VBOXSTRICTRC iemRaiseXcptOrInt(PVMCPUCC pVCpu, uint8_t cbInstr, uint8_t u8Vector, uint32_t fFlags,
5905 uint16_t uErr, uint64_t uCr2) RT_NOEXCEPT;
5906#ifdef IEM_WITH_SETJMP
5907DECL_NO_RETURN(void) iemRaiseXcptOrIntJmp(PVMCPUCC pVCpu, uint8_t cbInstr, uint8_t u8Vector,
5908 uint32_t fFlags, uint16_t uErr, uint64_t uCr2) IEM_NOEXCEPT_MAY_LONGJMP;
5909#endif
5910VBOXSTRICTRC iemRaiseDivideError(PVMCPUCC pVCpu) RT_NOEXCEPT;
5911#ifdef IEM_WITH_SETJMP
5912DECL_NO_RETURN(void) iemRaiseDivideErrorJmp(PVMCPUCC pVCpu) IEM_NOEXCEPT_MAY_LONGJMP;
5913#endif
5914VBOXSTRICTRC iemRaiseDebugException(PVMCPUCC pVCpu) RT_NOEXCEPT;
5915VBOXSTRICTRC iemRaiseBoundRangeExceeded(PVMCPUCC pVCpu) RT_NOEXCEPT;
5916VBOXSTRICTRC iemRaiseUndefinedOpcode(PVMCPUCC pVCpu) RT_NOEXCEPT;
5917#ifdef IEM_WITH_SETJMP
5918DECL_NO_RETURN(void) iemRaiseUndefinedOpcodeJmp(PVMCPUCC pVCpu) IEM_NOEXCEPT_MAY_LONGJMP;
5919#endif
5920VBOXSTRICTRC iemRaiseDeviceNotAvailable(PVMCPUCC pVCpu) RT_NOEXCEPT;
5921#ifdef IEM_WITH_SETJMP
5922DECL_NO_RETURN(void) iemRaiseDeviceNotAvailableJmp(PVMCPUCC pVCpu) IEM_NOEXCEPT_MAY_LONGJMP;
5923#endif
5924VBOXSTRICTRC iemRaiseTaskSwitchFaultWithErr(PVMCPUCC pVCpu, uint16_t uErr) RT_NOEXCEPT;
5925VBOXSTRICTRC iemRaiseTaskSwitchFaultCurrentTSS(PVMCPUCC pVCpu) RT_NOEXCEPT;
5926VBOXSTRICTRC iemRaiseTaskSwitchFault0(PVMCPUCC pVCpu) RT_NOEXCEPT;
5927VBOXSTRICTRC iemRaiseTaskSwitchFaultBySelector(PVMCPUCC pVCpu, uint16_t uSel) RT_NOEXCEPT;
5928/*VBOXSTRICTRC iemRaiseSelectorNotPresent(PVMCPUCC pVCpu, uint32_t iSegReg, uint32_t fAccess) RT_NOEXCEPT;*/
5929VBOXSTRICTRC iemRaiseSelectorNotPresentWithErr(PVMCPUCC pVCpu, uint16_t uErr) RT_NOEXCEPT;
5930VBOXSTRICTRC iemRaiseSelectorNotPresentBySelector(PVMCPUCC pVCpu, uint16_t uSel) RT_NOEXCEPT;
5931VBOXSTRICTRC iemRaiseStackSelectorNotPresentBySelector(PVMCPUCC pVCpu, uint16_t uSel) RT_NOEXCEPT;
5932VBOXSTRICTRC iemRaiseStackSelectorNotPresentWithErr(PVMCPUCC pVCpu, uint16_t uErr) RT_NOEXCEPT;
5933VBOXSTRICTRC iemRaiseGeneralProtectionFault(PVMCPUCC pVCpu, uint16_t uErr) RT_NOEXCEPT;
5934VBOXSTRICTRC iemRaiseGeneralProtectionFault0(PVMCPUCC pVCpu) RT_NOEXCEPT;
5935#ifdef IEM_WITH_SETJMP
5936DECL_NO_RETURN(void) iemRaiseGeneralProtectionFault0Jmp(PVMCPUCC pVCpu) IEM_NOEXCEPT_MAY_LONGJMP;
5937#endif
5938VBOXSTRICTRC iemRaiseGeneralProtectionFaultBySelector(PVMCPUCC pVCpu, RTSEL Sel) RT_NOEXCEPT;
5939VBOXSTRICTRC iemRaiseNotCanonical(PVMCPUCC pVCpu) RT_NOEXCEPT;
5940VBOXSTRICTRC iemRaiseSelectorBounds(PVMCPUCC pVCpu, uint32_t iSegReg, uint32_t fAccess) RT_NOEXCEPT;
5941#ifdef IEM_WITH_SETJMP
5942DECL_NO_RETURN(void) iemRaiseSelectorBoundsJmp(PVMCPUCC pVCpu, uint32_t iSegReg, uint32_t fAccess) IEM_NOEXCEPT_MAY_LONGJMP;
5943#endif
5944VBOXSTRICTRC iemRaiseSelectorBoundsBySelector(PVMCPUCC pVCpu, RTSEL Sel) RT_NOEXCEPT;
5945#ifdef IEM_WITH_SETJMP
5946DECL_NO_RETURN(void) iemRaiseSelectorBoundsBySelectorJmp(PVMCPUCC pVCpu, RTSEL Sel) IEM_NOEXCEPT_MAY_LONGJMP;
5947#endif
5948VBOXSTRICTRC iemRaiseSelectorInvalidAccess(PVMCPUCC pVCpu, uint32_t iSegReg, uint32_t fAccess) RT_NOEXCEPT;
5949#ifdef IEM_WITH_SETJMP
5950DECL_NO_RETURN(void) iemRaiseSelectorInvalidAccessJmp(PVMCPUCC pVCpu, uint32_t iSegReg, uint32_t fAccess) IEM_NOEXCEPT_MAY_LONGJMP;
5951#endif
5952VBOXSTRICTRC iemRaisePageFault(PVMCPUCC pVCpu, RTGCPTR GCPtrWhere, uint32_t cbAccess, uint32_t fAccess, int rc) RT_NOEXCEPT;
5953#ifdef IEM_WITH_SETJMP
5954DECL_NO_RETURN(void) iemRaisePageFaultJmp(PVMCPUCC pVCpu, RTGCPTR GCPtrWhere, uint32_t cbAccess, uint32_t fAccess, int rc) IEM_NOEXCEPT_MAY_LONGJMP;
5955#endif
5956VBOXSTRICTRC iemRaiseMathFault(PVMCPUCC pVCpu) RT_NOEXCEPT;
5957#ifdef IEM_WITH_SETJMP
5958DECL_NO_RETURN(void) iemRaiseMathFaultJmp(PVMCPUCC pVCpu) IEM_NOEXCEPT_MAY_LONGJMP;
5959#endif
5960VBOXSTRICTRC iemRaiseAlignmentCheckException(PVMCPUCC pVCpu) RT_NOEXCEPT;
5961#ifdef IEM_WITH_SETJMP
5962DECL_NO_RETURN(void) iemRaiseAlignmentCheckExceptionJmp(PVMCPUCC pVCpu) IEM_NOEXCEPT_MAY_LONGJMP;
5963#endif
5964VBOXSTRICTRC iemRaiseSimdFpException(PVMCPUCC pVCpu) RT_NOEXCEPT;
5965#ifdef IEM_WITH_SETJMP
5966DECL_NO_RETURN(void) iemRaiseSimdFpExceptionJmp(PVMCPUCC pVCpu) IEM_NOEXCEPT_MAY_LONGJMP;
5967#endif
5968
5969void iemLogSyscallRealModeInt(PVMCPUCC pVCpu, uint8_t u8Vector, uint8_t cbInstr);
5970void iemLogSyscallProtModeInt(PVMCPUCC pVCpu, uint8_t u8Vector, uint8_t cbInstr);
5971
5972IEM_CIMPL_DEF_0(iemCImplRaiseDivideError);
5973IEM_CIMPL_DEF_0(iemCImplRaiseInvalidLockPrefix);
5974IEM_CIMPL_DEF_0(iemCImplRaiseInvalidOpcode);
5975
5976/**
5977 * Macro for calling iemCImplRaiseDivideError().
5978 *
5979 * This is for things that will _always_ decode to an \#DE, taking the
5980 * recompiler into consideration and everything.
5981 *
5982 * @return Strict VBox status code.
5983 */
5984#define IEMOP_RAISE_DIVIDE_ERROR_RET() IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_XCPT, 0, iemCImplRaiseDivideError)
5985
5986/**
5987 * Macro for calling iemCImplRaiseInvalidLockPrefix().
5988 *
5989 * This is for things that will _always_ decode to an \#UD, taking the
5990 * recompiler into consideration and everything.
5991 *
5992 * @return Strict VBox status code.
5993 */
5994#define IEMOP_RAISE_INVALID_LOCK_PREFIX_RET() IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_XCPT, 0, iemCImplRaiseInvalidLockPrefix)
5995
5996/**
5997 * Macro for calling iemCImplRaiseInvalidOpcode() for decode/static \#UDs.
5998 *
5999 * This is for things that will _always_ decode to an \#UD, taking the
6000 * recompiler into consideration and everything.
6001 *
6002 * @return Strict VBox status code.
6003 */
6004#define IEMOP_RAISE_INVALID_OPCODE_RET() IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_XCPT, 0, iemCImplRaiseInvalidOpcode)
6005
6006/**
6007 * Macro for calling iemCImplRaiseInvalidOpcode() for runtime-style \#UDs.
6008 *
6009 * Using this macro means you've got _buggy_ _code_ and are doing things that
6010 * belongs exclusively in IEMAllCImpl.cpp during decoding.
6011 *
6012 * @return Strict VBox status code.
6013 * @see IEMOP_RAISE_INVALID_OPCODE_RET
6014 */
6015#define IEMOP_RAISE_INVALID_OPCODE_RUNTIME_RET() IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_XCPT, 0, iemCImplRaiseInvalidOpcode)
6016
6017/** @} */
6018
6019/** @name Register Access.
6020 * @{ */
6021VBOXSTRICTRC iemRegRipRelativeJumpS8AndFinishClearingRF(PVMCPUCC pVCpu, uint8_t cbInstr, int8_t offNextInstr,
6022 IEMMODE enmEffOpSize) RT_NOEXCEPT;
6023VBOXSTRICTRC iemRegRipRelativeJumpS16AndFinishClearingRF(PVMCPUCC pVCpu, uint8_t cbInstr, int16_t offNextInstr) RT_NOEXCEPT;
6024VBOXSTRICTRC iemRegRipRelativeJumpS32AndFinishClearingRF(PVMCPUCC pVCpu, uint8_t cbInstr, int32_t offNextInstr,
6025 IEMMODE enmEffOpSize) RT_NOEXCEPT;
6026/** @} */
6027
6028/** @name FPU access and helpers.
6029 * @{ */
6030void iemFpuPushResult(PVMCPUCC pVCpu, PIEMFPURESULT pResult, uint16_t uFpuOpcode) RT_NOEXCEPT;
6031void iemFpuPushResultWithMemOp(PVMCPUCC pVCpu, PIEMFPURESULT pResult, uint8_t iEffSeg, RTGCPTR GCPtrEff, uint16_t uFpuOpcode) RT_NOEXCEPT;
6032void iemFpuPushResultTwo(PVMCPUCC pVCpu, PIEMFPURESULTTWO pResult, uint16_t uFpuOpcode) RT_NOEXCEPT;
6033void iemFpuStoreResult(PVMCPUCC pVCpu, PIEMFPURESULT pResult, uint8_t iStReg, uint16_t uFpuOpcode) RT_NOEXCEPT;
6034void iemFpuStoreResultThenPop(PVMCPUCC pVCpu, PIEMFPURESULT pResult, uint8_t iStReg, uint16_t uFpuOpcode) RT_NOEXCEPT;
6035void iemFpuStoreResultWithMemOp(PVMCPUCC pVCpu, PIEMFPURESULT pResult, uint8_t iStReg,
6036 uint8_t iEffSeg, RTGCPTR GCPtrEff, uint16_t uFpuOpcode) RT_NOEXCEPT;
6037void iemFpuStoreResultWithMemOpThenPop(PVMCPUCC pVCpu, PIEMFPURESULT pResult, uint8_t iStReg,
6038 uint8_t iEffSeg, RTGCPTR GCPtrEff, uint16_t uFpuOpcode) RT_NOEXCEPT;
6039void iemFpuUpdateOpcodeAndIp(PVMCPUCC pVCpu, uint16_t uFpuOpcode) RT_NOEXCEPT;
6040void iemFpuUpdateFSW(PVMCPUCC pVCpu, uint16_t u16FSW, uint16_t uFpuOpcode) RT_NOEXCEPT;
6041void iemFpuUpdateFSWThenPop(PVMCPUCC pVCpu, uint16_t u16FSW, uint16_t uFpuOpcode) RT_NOEXCEPT;
6042void iemFpuUpdateFSWWithMemOp(PVMCPUCC pVCpu, uint16_t u16FSW, uint8_t iEffSeg, RTGCPTR GCPtrEff, uint16_t uFpuOpcode) RT_NOEXCEPT;
6043void iemFpuUpdateFSWThenPopPop(PVMCPUCC pVCpu, uint16_t u16FSW, uint16_t uFpuOpcode) RT_NOEXCEPT;
6044void iemFpuUpdateFSWWithMemOpThenPop(PVMCPUCC pVCpu, uint16_t u16FSW, uint8_t iEffSeg, RTGCPTR GCPtrEff, uint16_t uFpuOpcode) RT_NOEXCEPT;
6045void iemFpuStackUnderflow(PVMCPUCC pVCpu, uint8_t iStReg, uint16_t uFpuOpcode) RT_NOEXCEPT;
6046void iemFpuStackUnderflowWithMemOp(PVMCPUCC pVCpu, uint8_t iStReg, uint8_t iEffSeg, RTGCPTR GCPtrEff, uint16_t uFpuOpcode) RT_NOEXCEPT;
6047void iemFpuStackUnderflowThenPop(PVMCPUCC pVCpu, uint8_t iStReg, uint16_t uFpuOpcode) RT_NOEXCEPT;
6048void iemFpuStackUnderflowWithMemOpThenPop(PVMCPUCC pVCpu, uint8_t iStReg, uint8_t iEffSeg, RTGCPTR GCPtrEff, uint16_t uFpuOpcode) RT_NOEXCEPT;
6049void iemFpuStackUnderflowThenPopPop(PVMCPUCC pVCpu, uint16_t uFpuOpcode) RT_NOEXCEPT;
6050void iemFpuStackPushUnderflow(PVMCPUCC pVCpu, uint16_t uFpuOpcode) RT_NOEXCEPT;
6051void iemFpuStackPushUnderflowTwo(PVMCPUCC pVCpu, uint16_t uFpuOpcode) RT_NOEXCEPT;
6052void iemFpuStackPushOverflow(PVMCPUCC pVCpu, uint16_t uFpuOpcode) RT_NOEXCEPT;
6053void iemFpuStackPushOverflowWithMemOp(PVMCPUCC pVCpu, uint8_t iEffSeg, RTGCPTR GCPtrEff, uint16_t uFpuOpcode) RT_NOEXCEPT;
6054/** @} */
6055
6056/** @name SSE+AVX SIMD access and helpers.
6057 * @{ */
6058void iemSseUpdateMxcsr(PVMCPUCC pVCpu, uint32_t fMxcsr) RT_NOEXCEPT;
6059/** @} */
6060
6061/** @name Memory access.
6062 * @{ */
6063
6064/** Report a \#GP instead of \#AC and do not restrict to ring-3 */
6065#define IEM_MEMMAP_F_ALIGN_GP RT_BIT_32(16)
6066/** SSE access that should report a \#GP instead of \#AC, unless MXCSR.MM=1
6067 * when it works like normal \#AC. Always used with IEM_MEMMAP_F_ALIGN_GP. */
6068#define IEM_MEMMAP_F_ALIGN_SSE RT_BIT_32(17)
6069/** If \#AC is applicable, raise it. Always used with IEM_MEMMAP_F_ALIGN_GP.
6070 * Users include FXSAVE & FXRSTOR. */
6071#define IEM_MEMMAP_F_ALIGN_GP_OR_AC RT_BIT_32(18)
6072
6073VBOXSTRICTRC iemMemMap(PVMCPUCC pVCpu, void **ppvMem, uint8_t *pbUnmapInfo, size_t cbMem, uint8_t iSegReg, RTGCPTR GCPtrMem,
6074 uint32_t fAccess, uint32_t uAlignCtl) RT_NOEXCEPT;
6075VBOXSTRICTRC iemMemCommitAndUnmap(PVMCPUCC pVCpu, uint8_t bUnmapInfo) RT_NOEXCEPT;
6076#ifndef IN_RING3
6077VBOXSTRICTRC iemMemCommitAndUnmapPostponeTroubleToR3(PVMCPUCC pVCpu, uint8_t bUnmapInfo) RT_NOEXCEPT;
6078#endif
6079void iemMemRollbackAndUnmap(PVMCPUCC pVCpu, uint8_t bUnmapInfo) RT_NOEXCEPT;
6080void iemMemRollback(PVMCPUCC pVCpu) RT_NOEXCEPT;
6081VBOXSTRICTRC iemMemApplySegment(PVMCPUCC pVCpu, uint32_t fAccess, uint8_t iSegReg, size_t cbMem, PRTGCPTR pGCPtrMem) RT_NOEXCEPT;
6082VBOXSTRICTRC iemMemMarkSelDescAccessed(PVMCPUCC pVCpu, uint16_t uSel) RT_NOEXCEPT;
6083VBOXSTRICTRC iemMemPageTranslateAndCheckAccess(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint32_t cbAccess, uint32_t fAccess, PRTGCPHYS pGCPhysMem) RT_NOEXCEPT;
6084
6085void iemOpcodeFlushLight(PVMCPUCC pVCpu, uint8_t cbInstr);
6086void iemOpcodeFlushHeavy(PVMCPUCC pVCpu, uint8_t cbInstr);
6087#ifdef IEM_WITH_CODE_TLB
6088void iemOpcodeFetchBytesJmp(PVMCPUCC pVCpu, size_t cbDst, void *pvDst) IEM_NOEXCEPT_MAY_LONGJMP;
6089#else
6090VBOXSTRICTRC iemOpcodeFetchMoreBytes(PVMCPUCC pVCpu, size_t cbMin) RT_NOEXCEPT;
6091#endif
6092#ifdef IEM_WITH_SETJMP
6093uint8_t iemOpcodeGetNextU8SlowJmp(PVMCPUCC pVCpu) IEM_NOEXCEPT_MAY_LONGJMP;
6094uint16_t iemOpcodeGetNextU16SlowJmp(PVMCPUCC pVCpu) IEM_NOEXCEPT_MAY_LONGJMP;
6095uint32_t iemOpcodeGetNextU32SlowJmp(PVMCPUCC pVCpu) IEM_NOEXCEPT_MAY_LONGJMP;
6096uint64_t iemOpcodeGetNextU64SlowJmp(PVMCPUCC pVCpu) IEM_NOEXCEPT_MAY_LONGJMP;
6097#else
6098VBOXSTRICTRC iemOpcodeGetNextU8Slow(PVMCPUCC pVCpu, uint8_t *pb) RT_NOEXCEPT;
6099VBOXSTRICTRC iemOpcodeGetNextS8SxU16Slow(PVMCPUCC pVCpu, uint16_t *pu16) RT_NOEXCEPT;
6100VBOXSTRICTRC iemOpcodeGetNextS8SxU32Slow(PVMCPUCC pVCpu, uint32_t *pu32) RT_NOEXCEPT;
6101VBOXSTRICTRC iemOpcodeGetNextS8SxU64Slow(PVMCPUCC pVCpu, uint64_t *pu64) RT_NOEXCEPT;
6102VBOXSTRICTRC iemOpcodeGetNextU16Slow(PVMCPUCC pVCpu, uint16_t *pu16) RT_NOEXCEPT;
6103VBOXSTRICTRC iemOpcodeGetNextU16ZxU32Slow(PVMCPUCC pVCpu, uint32_t *pu32) RT_NOEXCEPT;
6104VBOXSTRICTRC iemOpcodeGetNextU16ZxU64Slow(PVMCPUCC pVCpu, uint64_t *pu64) RT_NOEXCEPT;
6105VBOXSTRICTRC iemOpcodeGetNextU32Slow(PVMCPUCC pVCpu, uint32_t *pu32) RT_NOEXCEPT;
6106VBOXSTRICTRC iemOpcodeGetNextU32ZxU64Slow(PVMCPUCC pVCpu, uint64_t *pu64) RT_NOEXCEPT;
6107VBOXSTRICTRC iemOpcodeGetNextS32SxU64Slow(PVMCPUCC pVCpu, uint64_t *pu64) RT_NOEXCEPT;
6108VBOXSTRICTRC iemOpcodeGetNextU64Slow(PVMCPUCC pVCpu, uint64_t *pu64) RT_NOEXCEPT;
6109#endif
6110
6111VBOXSTRICTRC iemMemFetchDataU8(PVMCPUCC pVCpu, uint8_t *pu8Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
6112VBOXSTRICTRC iemMemFetchDataU16(PVMCPUCC pVCpu, uint16_t *pu16Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
6113VBOXSTRICTRC iemMemFetchDataU32(PVMCPUCC pVCpu, uint32_t *pu32Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
6114VBOXSTRICTRC iemMemFetchDataU32_ZX_U64(PVMCPUCC pVCpu, uint64_t *pu64Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
6115VBOXSTRICTRC iemMemFetchDataU64(PVMCPUCC pVCpu, uint64_t *pu64Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
6116VBOXSTRICTRC iemMemFetchDataU64AlignedU128(PVMCPUCC pVCpu, uint64_t *pu64Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
6117VBOXSTRICTRC iemMemFetchDataR80(PVMCPUCC pVCpu, PRTFLOAT80U pr80Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
6118VBOXSTRICTRC iemMemFetchDataD80(PVMCPUCC pVCpu, PRTPBCD80U pd80Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
6119VBOXSTRICTRC iemMemFetchDataU128(PVMCPUCC pVCpu, PRTUINT128U pu128Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
6120VBOXSTRICTRC iemMemFetchDataU128NoAc(PVMCPUCC pVCpu, PRTUINT128U pu128Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
6121VBOXSTRICTRC iemMemFetchDataU128AlignedSse(PVMCPUCC pVCpu, PRTUINT128U pu128Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
6122VBOXSTRICTRC iemMemFetchDataU256(PVMCPUCC pVCpu, PRTUINT256U pu256Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
6123VBOXSTRICTRC iemMemFetchDataU256NoAc(PVMCPUCC pVCpu, PRTUINT256U pu256Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
6124VBOXSTRICTRC iemMemFetchDataU256AlignedAvx(PVMCPUCC pVCpu, PRTUINT256U pu256Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
6125VBOXSTRICTRC iemMemFetchDataXdtr(PVMCPUCC pVCpu, uint16_t *pcbLimit, PRTGCPTR pGCPtrBase, uint8_t iSegReg,
6126 RTGCPTR GCPtrMem, IEMMODE enmOpSize) RT_NOEXCEPT;
6127#ifdef IEM_WITH_SETJMP
6128uint8_t iemMemFetchDataU8SafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6129uint16_t iemMemFetchDataU16SafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6130uint32_t iemMemFetchDataU32SafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6131uint32_t iemMemFetchDataU32NoAcSafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6132uint32_t iemMemFlatFetchDataU32SafeJmp(PVMCPUCC pVCpu, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6133uint64_t iemMemFetchDataU64SafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6134uint64_t iemMemFetchDataU64NoAcSafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6135uint64_t iemMemFetchDataU64AlignedU128SafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6136void iemMemFetchDataR80SafeJmp(PVMCPUCC pVCpu, PRTFLOAT80U pr80Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6137void iemMemFetchDataD80SafeJmp(PVMCPUCC pVCpu, PRTPBCD80U pd80Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6138void iemMemFetchDataU128SafeJmp(PVMCPUCC pVCpu, PRTUINT128U pu128Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6139void iemMemFetchDataU128NoAcSafeJmp(PVMCPUCC pVCpu, PRTUINT128U pu128Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6140void iemMemFetchDataU128AlignedSseSafeJmp(PVMCPUCC pVCpu, PRTUINT128U pu128Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6141void iemMemFetchDataU256SafeJmp(PVMCPUCC pVCpu, PRTUINT256U pu256Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6142void iemMemFetchDataU256NoAcSafeJmp(PVMCPUCC pVCpu, PRTUINT256U pu256Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6143void iemMemFetchDataU256AlignedAvxSafeJmp(PVMCPUCC pVCpu, PRTUINT256U pu256Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6144# if 0 /* these are inlined now */
6145uint8_t iemMemFetchDataU8Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6146uint16_t iemMemFetchDataU16Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6147uint32_t iemMemFetchDataU32Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6148uint32_t iemMemFlatFetchDataU32Jmp(PVMCPUCC pVCpu, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6149uint64_t iemMemFetchDataU64Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6150uint64_t iemMemFetchDataU64AlignedU128Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6151void iemMemFetchDataR80Jmp(PVMCPUCC pVCpu, PRTFLOAT80U pr80Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6152void iemMemFetchDataD80Jmp(PVMCPUCC pVCpu, PRTPBCD80U pd80Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6153void iemMemFetchDataU128Jmp(PVMCPUCC pVCpu, PRTUINT128U pu128Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6154void iemMemFetchDataU128NoAcJmp(PVMCPUCC pVCpu, PRTUINT128U pu128Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6155void iemMemFetchDataU128AlignedSseJmp(PVMCPUCC pVCpu, PRTUINT128U pu128Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6156void iemMemFetchDataU256NoAcJmp(PVMCPUCC pVCpu, PRTUINT256U pu256Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6157void iemMemFetchDataU256AlignedAvxJmp(PVMCPUCC pVCpu, PRTUINT256U pu256Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6158# endif
6159void iemMemFetchDataU256Jmp(PVMCPUCC pVCpu, PRTUINT256U pu256Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6160#endif
6161
6162VBOXSTRICTRC iemMemFetchSysU8(PVMCPUCC pVCpu, uint8_t *pu8Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
6163VBOXSTRICTRC iemMemFetchSysU16(PVMCPUCC pVCpu, uint16_t *pu16Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
6164VBOXSTRICTRC iemMemFetchSysU32(PVMCPUCC pVCpu, uint32_t *pu32Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
6165VBOXSTRICTRC iemMemFetchSysU64(PVMCPUCC pVCpu, uint64_t *pu64Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
6166VBOXSTRICTRC iemMemFetchSelDesc(PVMCPUCC pVCpu, PIEMSELDESC pDesc, uint16_t uSel, uint8_t uXcpt) RT_NOEXCEPT;
6167
6168VBOXSTRICTRC iemMemStoreDataU8(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint8_t u8Value) RT_NOEXCEPT;
6169VBOXSTRICTRC iemMemStoreDataU16(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint16_t u16Value) RT_NOEXCEPT;
6170VBOXSTRICTRC iemMemStoreDataU32(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint32_t u32Value) RT_NOEXCEPT;
6171VBOXSTRICTRC iemMemStoreDataU64(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint64_t u64Value) RT_NOEXCEPT;
6172VBOXSTRICTRC iemMemStoreDataU128(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, RTUINT128U u128Value) RT_NOEXCEPT;
6173VBOXSTRICTRC iemMemStoreDataU128NoAc(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, RTUINT128U u128Value) RT_NOEXCEPT;
6174VBOXSTRICTRC iemMemStoreDataU128AlignedSse(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, RTUINT128U u128Value) RT_NOEXCEPT;
6175VBOXSTRICTRC iemMemStoreDataU256(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTUINT256U pu256Value) RT_NOEXCEPT;
6176VBOXSTRICTRC iemMemStoreDataU256NoAc(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTUINT256U pu256Value) RT_NOEXCEPT;
6177VBOXSTRICTRC iemMemStoreDataU256AlignedAvx(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTUINT256U pu256Value) RT_NOEXCEPT;
6178VBOXSTRICTRC iemMemStoreDataXdtr(PVMCPUCC pVCpu, uint16_t cbLimit, RTGCPTR GCPtrBase, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
6179#ifdef IEM_WITH_SETJMP
6180void iemMemStoreDataU8SafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint8_t u8Value) IEM_NOEXCEPT_MAY_LONGJMP;
6181void iemMemStoreDataU16SafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint16_t u16Value) IEM_NOEXCEPT_MAY_LONGJMP;
6182void iemMemStoreDataU32SafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint32_t u32Value) IEM_NOEXCEPT_MAY_LONGJMP;
6183void iemMemStoreDataU64SafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint64_t u64Value) IEM_NOEXCEPT_MAY_LONGJMP;
6184void iemMemStoreDataU128SafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTUINT128U u128Value) IEM_NOEXCEPT_MAY_LONGJMP;
6185void iemMemStoreDataU128NoAcSafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTUINT128U pu128Value) IEM_NOEXCEPT_MAY_LONGJMP;
6186void iemMemStoreDataU128AlignedSseSafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTUINT128U pu128Value) IEM_NOEXCEPT_MAY_LONGJMP;
6187void iemMemStoreDataU256SafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTUINT256U pu256Value) IEM_NOEXCEPT_MAY_LONGJMP;
6188void iemMemStoreDataU256NoAcSafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTUINT256U pu256Value) IEM_NOEXCEPT_MAY_LONGJMP;
6189void iemMemStoreDataU256AlignedAvxSafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTUINT256U pu256Value) IEM_NOEXCEPT_MAY_LONGJMP;
6190void iemMemStoreDataR80SafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTFLOAT80U pr80Value) IEM_NOEXCEPT_MAY_LONGJMP;
6191void iemMemStoreDataD80SafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTPBCD80U pd80Value) IEM_NOEXCEPT_MAY_LONGJMP;
6192#if 0
6193void iemMemStoreDataU8Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint8_t u8Value) IEM_NOEXCEPT_MAY_LONGJMP;
6194void iemMemStoreDataU16Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint16_t u16Value) IEM_NOEXCEPT_MAY_LONGJMP;
6195void iemMemStoreDataU32Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint32_t u32Value) IEM_NOEXCEPT_MAY_LONGJMP;
6196void iemMemStoreDataU64Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint64_t u64Value) IEM_NOEXCEPT_MAY_LONGJMP;
6197void iemMemStoreDataU128Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, RTUINT128U u128Value) IEM_NOEXCEPT_MAY_LONGJMP;
6198void iemMemStoreDataNoAcU128Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, RTUINT128U u128Value) IEM_NOEXCEPT_MAY_LONGJMP;
6199void iemMemStoreDataU256NoAcJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTUINT256U pu256Value) IEM_NOEXCEPT_MAY_LONGJMP;
6200void iemMemStoreDataU256AlignedAvxJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTUINT256U pu256Value) IEM_NOEXCEPT_MAY_LONGJMP;
6201#endif
6202void iemMemStoreDataU128AlignedSseJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, RTUINT128U u128Value) IEM_NOEXCEPT_MAY_LONGJMP;
6203void iemMemStoreDataU256Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTUINT256U pu256Value) IEM_NOEXCEPT_MAY_LONGJMP;
6204#endif
6205
6206#ifdef IEM_WITH_SETJMP
6207uint8_t *iemMemMapDataU8RwSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6208uint8_t *iemMemMapDataU8AtSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6209uint8_t *iemMemMapDataU8WoSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6210uint8_t const *iemMemMapDataU8RoSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6211uint16_t *iemMemMapDataU16RwSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6212uint16_t *iemMemMapDataU16AtSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6213uint16_t *iemMemMapDataU16WoSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6214uint16_t const *iemMemMapDataU16RoSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6215uint32_t *iemMemMapDataU32RwSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6216uint32_t *iemMemMapDataU32AtSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6217uint32_t *iemMemMapDataU32WoSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6218uint32_t const *iemMemMapDataU32RoSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6219uint64_t *iemMemMapDataU64RwSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6220uint64_t *iemMemMapDataU64AtSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6221uint64_t *iemMemMapDataU64WoSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6222uint64_t const *iemMemMapDataU64RoSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6223PRTFLOAT80U iemMemMapDataR80RwSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6224PRTFLOAT80U iemMemMapDataR80WoSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6225PCRTFLOAT80U iemMemMapDataR80RoSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6226PRTPBCD80U iemMemMapDataD80RwSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6227PRTPBCD80U iemMemMapDataD80WoSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6228PCRTPBCD80U iemMemMapDataD80RoSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6229PRTUINT128U iemMemMapDataU128RwSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6230PRTUINT128U iemMemMapDataU128AtSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6231PRTUINT128U iemMemMapDataU128WoSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6232PCRTUINT128U iemMemMapDataU128RoSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6233
6234void iemMemCommitAndUnmapJmp(PVMCPUCC pVCpu, uint8_t bUnmapInfo) IEM_NOEXCEPT_MAY_LONGJMP;
6235void iemMemCommitAndUnmapRwSafeJmp(PVMCPUCC pVCpu, uint8_t bUnmapInfo) IEM_NOEXCEPT_MAY_LONGJMP;
6236void iemMemCommitAndUnmapAtSafeJmp(PVMCPUCC pVCpu, uint8_t bUnmapInfo) IEM_NOEXCEPT_MAY_LONGJMP;
6237void iemMemCommitAndUnmapWoSafeJmp(PVMCPUCC pVCpu, uint8_t bUnmapInfo) IEM_NOEXCEPT_MAY_LONGJMP;
6238void iemMemCommitAndUnmapRoSafeJmp(PVMCPUCC pVCpu, uint8_t bUnmapInfo) IEM_NOEXCEPT_MAY_LONGJMP;
6239void iemMemRollbackAndUnmapWoSafe(PVMCPUCC pVCpu, uint8_t bUnmapInfo) RT_NOEXCEPT;
6240#endif
6241
6242VBOXSTRICTRC iemMemStackPushBeginSpecial(PVMCPUCC pVCpu, size_t cbMem, uint32_t cbAlign,
6243 void **ppvMem, uint8_t *pbUnmapInfo, uint64_t *puNewRsp) RT_NOEXCEPT;
6244VBOXSTRICTRC iemMemStackPushCommitSpecial(PVMCPUCC pVCpu, uint8_t bUnmapInfo, uint64_t uNewRsp) RT_NOEXCEPT;
6245VBOXSTRICTRC iemMemStackPushU16(PVMCPUCC pVCpu, uint16_t u16Value) RT_NOEXCEPT;
6246VBOXSTRICTRC iemMemStackPushU32(PVMCPUCC pVCpu, uint32_t u32Value) RT_NOEXCEPT;
6247VBOXSTRICTRC iemMemStackPushU64(PVMCPUCC pVCpu, uint64_t u64Value) RT_NOEXCEPT;
6248VBOXSTRICTRC iemMemStackPushU16Ex(PVMCPUCC pVCpu, uint16_t u16Value, PRTUINT64U pTmpRsp) RT_NOEXCEPT;
6249VBOXSTRICTRC iemMemStackPushU32Ex(PVMCPUCC pVCpu, uint32_t u32Value, PRTUINT64U pTmpRsp) RT_NOEXCEPT;
6250VBOXSTRICTRC iemMemStackPushU64Ex(PVMCPUCC pVCpu, uint64_t u64Value, PRTUINT64U pTmpRsp) RT_NOEXCEPT;
6251VBOXSTRICTRC iemMemStackPushU32SReg(PVMCPUCC pVCpu, uint32_t u32Value) RT_NOEXCEPT;
6252VBOXSTRICTRC iemMemStackPopBeginSpecial(PVMCPUCC pVCpu, size_t cbMem, uint32_t cbAlign,
6253 void const **ppvMem, uint8_t *pbUnmapInfo, uint64_t *puNewRsp) RT_NOEXCEPT;
6254VBOXSTRICTRC iemMemStackPopContinueSpecial(PVMCPUCC pVCpu, size_t off, size_t cbMem,
6255 void const **ppvMem, uint8_t *pbUnmapInfo, uint64_t uCurNewRsp) RT_NOEXCEPT;
6256VBOXSTRICTRC iemMemStackPopDoneSpecial(PVMCPUCC pVCpu, uint8_t bUnmapInfo) RT_NOEXCEPT;
6257VBOXSTRICTRC iemMemStackPopU16(PVMCPUCC pVCpu, uint16_t *pu16Value) RT_NOEXCEPT;
6258VBOXSTRICTRC iemMemStackPopU32(PVMCPUCC pVCpu, uint32_t *pu32Value) RT_NOEXCEPT;
6259VBOXSTRICTRC iemMemStackPopU64(PVMCPUCC pVCpu, uint64_t *pu64Value) RT_NOEXCEPT;
6260VBOXSTRICTRC iemMemStackPopU16Ex(PVMCPUCC pVCpu, uint16_t *pu16Value, PRTUINT64U pTmpRsp) RT_NOEXCEPT;
6261VBOXSTRICTRC iemMemStackPopU32Ex(PVMCPUCC pVCpu, uint32_t *pu32Value, PRTUINT64U pTmpRsp) RT_NOEXCEPT;
6262VBOXSTRICTRC iemMemStackPopU64Ex(PVMCPUCC pVCpu, uint64_t *pu64Value, PRTUINT64U pTmpRsp) RT_NOEXCEPT;
6263
6264#ifdef IEM_WITH_SETJMP
6265void iemMemStackPushU16SafeJmp(PVMCPUCC pVCpu, uint16_t uValue) IEM_NOEXCEPT_MAY_LONGJMP;
6266void iemMemStackPushU32SafeJmp(PVMCPUCC pVCpu, uint32_t uValue) IEM_NOEXCEPT_MAY_LONGJMP;
6267void iemMemStackPushU32SRegSafeJmp(PVMCPUCC pVCpu, uint32_t uValue) IEM_NOEXCEPT_MAY_LONGJMP;
6268void iemMemStackPushU64SafeJmp(PVMCPUCC pVCpu, uint64_t uValue) IEM_NOEXCEPT_MAY_LONGJMP;
6269void iemMemStackPopGRegU16SafeJmp(PVMCPUCC pVCpu, uint8_t iGReg) IEM_NOEXCEPT_MAY_LONGJMP;
6270void iemMemStackPopGRegU32SafeJmp(PVMCPUCC pVCpu, uint8_t iGReg) IEM_NOEXCEPT_MAY_LONGJMP;
6271void iemMemStackPopGRegU64SafeJmp(PVMCPUCC pVCpu, uint8_t iGReg) IEM_NOEXCEPT_MAY_LONGJMP;
6272
6273void iemMemFlat32StackPushU16SafeJmp(PVMCPUCC pVCpu, uint16_t uValue) IEM_NOEXCEPT_MAY_LONGJMP;
6274void iemMemFlat32StackPushU32SafeJmp(PVMCPUCC pVCpu, uint32_t uValue) IEM_NOEXCEPT_MAY_LONGJMP;
6275void iemMemFlat32StackPushU32SRegSafeJmp(PVMCPUCC pVCpu, uint32_t uValue) IEM_NOEXCEPT_MAY_LONGJMP;
6276void iemMemFlat32StackPopGRegU16SafeJmp(PVMCPUCC pVCpu, uint8_t iGReg) IEM_NOEXCEPT_MAY_LONGJMP;
6277void iemMemFlat32StackPopGRegU32SafeJmp(PVMCPUCC pVCpu, uint8_t iGReg) IEM_NOEXCEPT_MAY_LONGJMP;
6278
6279void iemMemFlat64StackPushU16SafeJmp(PVMCPUCC pVCpu, uint16_t uValue) IEM_NOEXCEPT_MAY_LONGJMP;
6280void iemMemFlat64StackPushU64SafeJmp(PVMCPUCC pVCpu, uint64_t uValue) IEM_NOEXCEPT_MAY_LONGJMP;
6281void iemMemFlat64StackPopGRegU16SafeJmp(PVMCPUCC pVCpu, uint8_t iGReg) IEM_NOEXCEPT_MAY_LONGJMP;
6282void iemMemFlat64StackPopGRegU64SafeJmp(PVMCPUCC pVCpu, uint8_t iGReg) IEM_NOEXCEPT_MAY_LONGJMP;
6283
6284void iemMemStoreStackU16SafeJmp(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint16_t uValue) IEM_NOEXCEPT_MAY_LONGJMP;
6285void iemMemStoreStackU32SafeJmp(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint32_t uValue) IEM_NOEXCEPT_MAY_LONGJMP;
6286void iemMemStoreStackU32SRegSafeJmp(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint32_t uValue) IEM_NOEXCEPT_MAY_LONGJMP;
6287void iemMemStoreStackU64SafeJmp(PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint64_t uValue) IEM_NOEXCEPT_MAY_LONGJMP;
6288
6289uint16_t iemMemFetchStackU16SafeJmp(PVMCPUCC pVCpu, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6290uint32_t iemMemFetchStackU32SafeJmp(PVMCPUCC pVCpu, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6291uint64_t iemMemFetchStackU64SafeJmp(PVMCPUCC pVCpu, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
6292
6293#endif
6294
6295/** @} */
6296
6297/** @name IEMAllCImpl.cpp
6298 * @note sed -e '/IEM_CIMPL_DEF_/!d' -e 's/IEM_CIMPL_DEF_/IEM_CIMPL_PROTO_/' -e 's/$/;/'
6299 * @{ */
6300IEM_CIMPL_PROTO_2(iemCImpl_pop_mem16, uint16_t, iEffSeg, RTGCPTR, GCPtrEffDst);
6301IEM_CIMPL_PROTO_2(iemCImpl_pop_mem32, uint16_t, iEffSeg, RTGCPTR, GCPtrEffDst);
6302IEM_CIMPL_PROTO_2(iemCImpl_pop_mem64, uint16_t, iEffSeg, RTGCPTR, GCPtrEffDst);
6303IEM_CIMPL_PROTO_0(iemCImpl_popa_16);
6304IEM_CIMPL_PROTO_0(iemCImpl_popa_32);
6305IEM_CIMPL_PROTO_0(iemCImpl_pusha_16);
6306IEM_CIMPL_PROTO_0(iemCImpl_pusha_32);
6307IEM_CIMPL_PROTO_1(iemCImpl_pushf, IEMMODE, enmEffOpSize);
6308IEM_CIMPL_PROTO_1(iemCImpl_popf, IEMMODE, enmEffOpSize);
6309IEM_CIMPL_PROTO_3(iemCImpl_FarJmp, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmEffOpSize);
6310IEM_CIMPL_PROTO_3(iemCImpl_callf, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmEffOpSize);
6311typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmEffOpSize);
6312typedef FNIEMCIMPLFARBRANCH *PFNIEMCIMPLFARBRANCH;
6313IEM_CIMPL_PROTO_2(iemCImpl_retf, IEMMODE, enmEffOpSize, uint16_t, cbPop);
6314IEM_CIMPL_PROTO_3(iemCImpl_enter, IEMMODE, enmEffOpSize, uint16_t, cbFrame, uint8_t, cParameters);
6315IEM_CIMPL_PROTO_1(iemCImpl_leave, IEMMODE, enmEffOpSize);
6316IEM_CIMPL_PROTO_2(iemCImpl_int, uint8_t, u8Int, IEMINT, enmInt);
6317IEM_CIMPL_PROTO_1(iemCImpl_iret_real_v8086, IEMMODE, enmEffOpSize);
6318IEM_CIMPL_PROTO_4(iemCImpl_iret_prot_v8086, uint32_t, uNewEip, uint16_t, uNewCs, uint32_t, uNewFlags, uint64_t, uNewRsp);
6319IEM_CIMPL_PROTO_1(iemCImpl_iret_prot_NestedTask, IEMMODE, enmEffOpSize);
6320IEM_CIMPL_PROTO_1(iemCImpl_iret_prot, IEMMODE, enmEffOpSize);
6321IEM_CIMPL_PROTO_1(iemCImpl_iret_64bit, IEMMODE, enmEffOpSize);
6322IEM_CIMPL_PROTO_1(iemCImpl_iret, IEMMODE, enmEffOpSize);
6323IEM_CIMPL_PROTO_0(iemCImpl_loadall286);
6324IEM_CIMPL_PROTO_0(iemCImpl_syscall);
6325IEM_CIMPL_PROTO_1(iemCImpl_sysret, IEMMODE, enmEffOpSize);
6326IEM_CIMPL_PROTO_0(iemCImpl_sysenter);
6327IEM_CIMPL_PROTO_1(iemCImpl_sysexit, IEMMODE, enmEffOpSize);
6328IEM_CIMPL_PROTO_2(iemCImpl_LoadSReg, uint8_t, iSegReg, uint16_t, uSel);
6329IEM_CIMPL_PROTO_2(iemCImpl_load_SReg, uint8_t, iSegReg, uint16_t, uSel);
6330IEM_CIMPL_PROTO_2(iemCImpl_pop_Sreg, uint8_t, iSegReg, IEMMODE, enmEffOpSize);
6331IEM_CIMPL_PROTO_5(iemCImpl_load_SReg_Greg, uint16_t, uSel, uint64_t, offSeg, uint8_t, iSegReg, uint8_t, iGReg, IEMMODE, enmEffOpSize);
6332IEM_CIMPL_PROTO_2(iemCImpl_VerX, uint16_t, uSel, bool, fWrite);
6333IEM_CIMPL_PROTO_3(iemCImpl_LarLsl_u64, uint64_t *, pu64Dst, uint16_t, uSel, bool, fIsLar);
6334IEM_CIMPL_PROTO_3(iemCImpl_LarLsl_u16, uint16_t *, pu16Dst, uint16_t, uSel, bool, fIsLar);
6335IEM_CIMPL_PROTO_3(iemCImpl_lgdt, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc, IEMMODE, enmEffOpSize);
6336IEM_CIMPL_PROTO_2(iemCImpl_sgdt, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst);
6337IEM_CIMPL_PROTO_3(iemCImpl_lidt, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc, IEMMODE, enmEffOpSize);
6338IEM_CIMPL_PROTO_2(iemCImpl_sidt, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst);
6339IEM_CIMPL_PROTO_1(iemCImpl_lldt, uint16_t, uNewLdt);
6340IEM_CIMPL_PROTO_2(iemCImpl_sldt_reg, uint8_t, iGReg, uint8_t, enmEffOpSize);
6341IEM_CIMPL_PROTO_2(iemCImpl_sldt_mem, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst);
6342IEM_CIMPL_PROTO_1(iemCImpl_ltr, uint16_t, uNewTr);
6343IEM_CIMPL_PROTO_2(iemCImpl_str_reg, uint8_t, iGReg, uint8_t, enmEffOpSize);
6344IEM_CIMPL_PROTO_2(iemCImpl_str_mem, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst);
6345IEM_CIMPL_PROTO_2(iemCImpl_mov_Rd_Cd, uint8_t, iGReg, uint8_t, iCrReg);
6346IEM_CIMPL_PROTO_2(iemCImpl_smsw_reg, uint8_t, iGReg, uint8_t, enmEffOpSize);
6347IEM_CIMPL_PROTO_2(iemCImpl_smsw_mem, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst);
6348IEM_CIMPL_PROTO_4(iemCImpl_load_CrX, uint8_t, iCrReg, uint64_t, uNewCrX, IEMACCESSCRX, enmAccessCrX, uint8_t, iGReg);
6349IEM_CIMPL_PROTO_2(iemCImpl_mov_Cd_Rd, uint8_t, iCrReg, uint8_t, iGReg);
6350IEM_CIMPL_PROTO_2(iemCImpl_lmsw, uint16_t, u16NewMsw, RTGCPTR, GCPtrEffDst);
6351IEM_CIMPL_PROTO_0(iemCImpl_clts);
6352IEM_CIMPL_PROTO_2(iemCImpl_mov_Rd_Dd, uint8_t, iGReg, uint8_t, iDrReg);
6353IEM_CIMPL_PROTO_2(iemCImpl_mov_Dd_Rd, uint8_t, iDrReg, uint8_t, iGReg);
6354IEM_CIMPL_PROTO_2(iemCImpl_mov_Rd_Td, uint8_t, iGReg, uint8_t, iTrReg);
6355IEM_CIMPL_PROTO_2(iemCImpl_mov_Td_Rd, uint8_t, iTrReg, uint8_t, iGReg);
6356IEM_CIMPL_PROTO_1(iemCImpl_invlpg, RTGCPTR, GCPtrPage);
6357IEM_CIMPL_PROTO_3(iemCImpl_invpcid, uint8_t, iEffSeg, RTGCPTR, GCPtrInvpcidDesc, uint64_t, uInvpcidType);
6358IEM_CIMPL_PROTO_0(iemCImpl_invd);
6359IEM_CIMPL_PROTO_0(iemCImpl_wbinvd);
6360IEM_CIMPL_PROTO_0(iemCImpl_rsm);
6361IEM_CIMPL_PROTO_0(iemCImpl_rdtsc);
6362IEM_CIMPL_PROTO_0(iemCImpl_rdtscp);
6363IEM_CIMPL_PROTO_0(iemCImpl_rdpmc);
6364IEM_CIMPL_PROTO_0(iemCImpl_rdmsr);
6365IEM_CIMPL_PROTO_0(iemCImpl_wrmsr);
6366IEM_CIMPL_PROTO_3(iemCImpl_in, uint16_t, u16Port, uint8_t, cbReg, uint8_t, bImmAndEffAddrMode);
6367IEM_CIMPL_PROTO_2(iemCImpl_in_eAX_DX, uint8_t, cbReg, IEMMODE, enmEffAddrMode);
6368IEM_CIMPL_PROTO_3(iemCImpl_out, uint16_t, u16Port, uint8_t, cbReg, uint8_t, bImmAndEffAddrMode);
6369IEM_CIMPL_PROTO_2(iemCImpl_out_DX_eAX, uint8_t, cbReg, IEMMODE, enmEffAddrMode);
6370IEM_CIMPL_PROTO_0(iemCImpl_cli);
6371IEM_CIMPL_PROTO_0(iemCImpl_sti);
6372IEM_CIMPL_PROTO_0(iemCImpl_hlt);
6373IEM_CIMPL_PROTO_1(iemCImpl_monitor, uint8_t, iEffSeg);
6374IEM_CIMPL_PROTO_0(iemCImpl_mwait);
6375IEM_CIMPL_PROTO_0(iemCImpl_swapgs);
6376IEM_CIMPL_PROTO_0(iemCImpl_cpuid);
6377IEM_CIMPL_PROTO_1(iemCImpl_aad, uint8_t, bImm);
6378IEM_CIMPL_PROTO_1(iemCImpl_aam, uint8_t, bImm);
6379IEM_CIMPL_PROTO_0(iemCImpl_daa);
6380IEM_CIMPL_PROTO_0(iemCImpl_das);
6381IEM_CIMPL_PROTO_0(iemCImpl_aaa);
6382IEM_CIMPL_PROTO_0(iemCImpl_aas);
6383IEM_CIMPL_PROTO_3(iemCImpl_bound_16, int16_t, idxArray, int16_t, idxLowerBound, int16_t, idxUpperBound);
6384IEM_CIMPL_PROTO_3(iemCImpl_bound_32, int32_t, idxArray, int32_t, idxLowerBound, int32_t, idxUpperBound);
6385IEM_CIMPL_PROTO_0(iemCImpl_xgetbv);
6386IEM_CIMPL_PROTO_0(iemCImpl_xsetbv);
6387IEM_CIMPL_PROTO_5(iemCImpl_cmpxchg16b_fallback_rendezvous, PRTUINT128U, pu128Dst, PRTUINT128U, pu128RaxRdx,
6388 PRTUINT128U, pu128RbxRcx, uint32_t *, pEFlags, uint8_t, bUnmapInfo);
6389IEM_CIMPL_PROTO_2(iemCImpl_clflush_clflushopt, uint8_t, iEffSeg, RTGCPTR, GCPtrEff);
6390IEM_CIMPL_PROTO_1(iemCImpl_finit, bool, fCheckXcpts);
6391IEM_CIMPL_PROTO_3(iemCImpl_fxsave, uint8_t, iEffSeg, RTGCPTR, GCPtrEff, IEMMODE, enmEffOpSize);
6392IEM_CIMPL_PROTO_3(iemCImpl_fxrstor, uint8_t, iEffSeg, RTGCPTR, GCPtrEff, IEMMODE, enmEffOpSize);
6393IEM_CIMPL_PROTO_3(iemCImpl_xsave, uint8_t, iEffSeg, RTGCPTR, GCPtrEff, IEMMODE, enmEffOpSize);
6394IEM_CIMPL_PROTO_3(iemCImpl_xrstor, uint8_t, iEffSeg, RTGCPTR, GCPtrEff, IEMMODE, enmEffOpSize);
6395IEM_CIMPL_PROTO_2(iemCImpl_stmxcsr, uint8_t, iEffSeg, RTGCPTR, GCPtrEff);
6396IEM_CIMPL_PROTO_2(iemCImpl_vstmxcsr, uint8_t, iEffSeg, RTGCPTR, GCPtrEff);
6397IEM_CIMPL_PROTO_2(iemCImpl_ldmxcsr, uint8_t, iEffSeg, RTGCPTR, GCPtrEff);
6398IEM_CIMPL_PROTO_2(iemCImpl_vldmxcsr, uint8_t, iEffSeg, RTGCPTR, GCPtrEff);
6399IEM_CIMPL_PROTO_3(iemCImpl_fnstenv, IEMMODE, enmEffOpSize, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst);
6400IEM_CIMPL_PROTO_3(iemCImpl_fnsave, IEMMODE, enmEffOpSize, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst);
6401IEM_CIMPL_PROTO_3(iemCImpl_fldenv, IEMMODE, enmEffOpSize, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc);
6402IEM_CIMPL_PROTO_3(iemCImpl_frstor, IEMMODE, enmEffOpSize, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc);
6403IEM_CIMPL_PROTO_1(iemCImpl_fldcw, uint16_t, u16Fcw);
6404IEM_CIMPL_PROTO_2(iemCImpl_fxch_underflow, uint8_t, iStReg, uint16_t, uFpuOpcode);
6405IEM_CIMPL_PROTO_3(iemCImpl_fcomi_fucomi, uint8_t, iStReg, bool, fUCmp, uint32_t, uPopAndFpuOpcode);
6406IEM_CIMPL_PROTO_2(iemCImpl_rdseed, uint8_t, iReg, IEMMODE, enmEffOpSize);
6407IEM_CIMPL_PROTO_2(iemCImpl_rdrand, uint8_t, iReg, IEMMODE, enmEffOpSize);
6408IEM_CIMPL_PROTO_4(iemCImpl_vmaskmovps_load_u128, uint8_t, iXRegDst, uint8_t, iXRegMsk, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc);
6409IEM_CIMPL_PROTO_4(iemCImpl_vmaskmovps_load_u256, uint8_t, iYRegDst, uint8_t, iYRegMsk, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc);
6410IEM_CIMPL_PROTO_4(iemCImpl_vmaskmovps_store_u128, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst, uint8_t, iXRegMsk, uint8_t, iXRegSrc);
6411IEM_CIMPL_PROTO_4(iemCImpl_vmaskmovps_store_u256, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst, uint8_t, iYRegMsk, uint8_t, iYRegSrc);
6412IEM_CIMPL_PROTO_4(iemCImpl_vpmaskmovd_load_u128, uint8_t, iXRegDst, uint8_t, iXRegMsk, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc);
6413IEM_CIMPL_PROTO_4(iemCImpl_vpmaskmovd_load_u256, uint8_t, iYRegDst, uint8_t, iYRegMsk, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc);
6414IEM_CIMPL_PROTO_4(iemCImpl_vpmaskmovd_store_u128, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst, uint8_t, iXRegMsk, uint8_t, iXRegSrc);
6415IEM_CIMPL_PROTO_4(iemCImpl_vpmaskmovd_store_u256, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst, uint8_t, iYRegMsk, uint8_t, iYRegSrc);
6416IEM_CIMPL_PROTO_4(iemCImpl_vmaskmovpd_load_u128, uint8_t, iXRegDst, uint8_t, iXRegMsk, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc);
6417IEM_CIMPL_PROTO_4(iemCImpl_vmaskmovpd_load_u256, uint8_t, iYRegDst, uint8_t, iYRegMsk, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc);
6418IEM_CIMPL_PROTO_4(iemCImpl_vmaskmovpd_store_u128, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst, uint8_t, iXRegMsk, uint8_t, iXRegSrc);
6419IEM_CIMPL_PROTO_4(iemCImpl_vmaskmovpd_store_u256, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst, uint8_t, iYRegMsk, uint8_t, iYRegSrc);
6420IEM_CIMPL_PROTO_4(iemCImpl_vpmaskmovq_load_u128, uint8_t, iXRegDst, uint8_t, iXRegMsk, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc);
6421IEM_CIMPL_PROTO_4(iemCImpl_vpmaskmovq_load_u256, uint8_t, iYRegDst, uint8_t, iYRegMsk, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc);
6422IEM_CIMPL_PROTO_4(iemCImpl_vpmaskmovq_store_u128, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst, uint8_t, iXRegMsk, uint8_t, iXRegSrc);
6423IEM_CIMPL_PROTO_4(iemCImpl_vpmaskmovq_store_u256, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst, uint8_t, iYRegMsk, uint8_t, iYRegSrc);
6424
6425/** @} */
6426
6427/** @name IEMAllCImplStrInstr.cpp.h
6428 * @note sed -e '/IEM_CIMPL_DEF_/!d' -e 's/IEM_CIMPL_DEF_/IEM_CIMPL_PROTO_/' -e 's/$/;/' -e 's/RT_CONCAT4(//' \
6429 * -e 's/,ADDR_SIZE)/64/g' -e 's/,OP_SIZE,/64/g' -e 's/,OP_rAX,/rax/g' IEMAllCImplStrInstr.cpp.h
6430 * @{ */
6431IEM_CIMPL_PROTO_1(iemCImpl_repe_cmps_op8_addr16, uint8_t, iEffSeg);
6432IEM_CIMPL_PROTO_1(iemCImpl_repne_cmps_op8_addr16, uint8_t, iEffSeg);
6433IEM_CIMPL_PROTO_0(iemCImpl_repe_scas_al_m16);
6434IEM_CIMPL_PROTO_0(iemCImpl_repne_scas_al_m16);
6435IEM_CIMPL_PROTO_1(iemCImpl_rep_movs_op8_addr16, uint8_t, iEffSeg);
6436IEM_CIMPL_PROTO_0(iemCImpl_stos_al_m16);
6437IEM_CIMPL_PROTO_1(iemCImpl_lods_al_m16, int8_t, iEffSeg);
6438IEM_CIMPL_PROTO_1(iemCImpl_ins_op8_addr16, bool, fIoChecked);
6439IEM_CIMPL_PROTO_1(iemCImpl_rep_ins_op8_addr16, bool, fIoChecked);
6440IEM_CIMPL_PROTO_2(iemCImpl_outs_op8_addr16, uint8_t, iEffSeg, bool, fIoChecked);
6441IEM_CIMPL_PROTO_2(iemCImpl_rep_outs_op8_addr16, uint8_t, iEffSeg, bool, fIoChecked);
6442
6443IEM_CIMPL_PROTO_1(iemCImpl_repe_cmps_op16_addr16, uint8_t, iEffSeg);
6444IEM_CIMPL_PROTO_1(iemCImpl_repne_cmps_op16_addr16, uint8_t, iEffSeg);
6445IEM_CIMPL_PROTO_0(iemCImpl_repe_scas_ax_m16);
6446IEM_CIMPL_PROTO_0(iemCImpl_repne_scas_ax_m16);
6447IEM_CIMPL_PROTO_1(iemCImpl_rep_movs_op16_addr16, uint8_t, iEffSeg);
6448IEM_CIMPL_PROTO_0(iemCImpl_stos_ax_m16);
6449IEM_CIMPL_PROTO_1(iemCImpl_lods_ax_m16, int8_t, iEffSeg);
6450IEM_CIMPL_PROTO_1(iemCImpl_ins_op16_addr16, bool, fIoChecked);
6451IEM_CIMPL_PROTO_1(iemCImpl_rep_ins_op16_addr16, bool, fIoChecked);
6452IEM_CIMPL_PROTO_2(iemCImpl_outs_op16_addr16, uint8_t, iEffSeg, bool, fIoChecked);
6453IEM_CIMPL_PROTO_2(iemCImpl_rep_outs_op16_addr16, uint8_t, iEffSeg, bool, fIoChecked);
6454
6455IEM_CIMPL_PROTO_1(iemCImpl_repe_cmps_op32_addr16, uint8_t, iEffSeg);
6456IEM_CIMPL_PROTO_1(iemCImpl_repne_cmps_op32_addr16, uint8_t, iEffSeg);
6457IEM_CIMPL_PROTO_0(iemCImpl_repe_scas_eax_m16);
6458IEM_CIMPL_PROTO_0(iemCImpl_repne_scas_eax_m16);
6459IEM_CIMPL_PROTO_1(iemCImpl_rep_movs_op32_addr16, uint8_t, iEffSeg);
6460IEM_CIMPL_PROTO_0(iemCImpl_stos_eax_m16);
6461IEM_CIMPL_PROTO_1(iemCImpl_lods_eax_m16, int8_t, iEffSeg);
6462IEM_CIMPL_PROTO_1(iemCImpl_ins_op32_addr16, bool, fIoChecked);
6463IEM_CIMPL_PROTO_1(iemCImpl_rep_ins_op32_addr16, bool, fIoChecked);
6464IEM_CIMPL_PROTO_2(iemCImpl_outs_op32_addr16, uint8_t, iEffSeg, bool, fIoChecked);
6465IEM_CIMPL_PROTO_2(iemCImpl_rep_outs_op32_addr16, uint8_t, iEffSeg, bool, fIoChecked);
6466
6467
6468IEM_CIMPL_PROTO_1(iemCImpl_repe_cmps_op8_addr32, uint8_t, iEffSeg);
6469IEM_CIMPL_PROTO_1(iemCImpl_repne_cmps_op8_addr32, uint8_t, iEffSeg);
6470IEM_CIMPL_PROTO_0(iemCImpl_repe_scas_al_m32);
6471IEM_CIMPL_PROTO_0(iemCImpl_repne_scas_al_m32);
6472IEM_CIMPL_PROTO_1(iemCImpl_rep_movs_op8_addr32, uint8_t, iEffSeg);
6473IEM_CIMPL_PROTO_0(iemCImpl_stos_al_m32);
6474IEM_CIMPL_PROTO_1(iemCImpl_lods_al_m32, int8_t, iEffSeg);
6475IEM_CIMPL_PROTO_1(iemCImpl_ins_op8_addr32, bool, fIoChecked);
6476IEM_CIMPL_PROTO_1(iemCImpl_rep_ins_op8_addr32, bool, fIoChecked);
6477IEM_CIMPL_PROTO_2(iemCImpl_outs_op8_addr32, uint8_t, iEffSeg, bool, fIoChecked);
6478IEM_CIMPL_PROTO_2(iemCImpl_rep_outs_op8_addr32, uint8_t, iEffSeg, bool, fIoChecked);
6479
6480IEM_CIMPL_PROTO_1(iemCImpl_repe_cmps_op16_addr32, uint8_t, iEffSeg);
6481IEM_CIMPL_PROTO_1(iemCImpl_repne_cmps_op16_addr32, uint8_t, iEffSeg);
6482IEM_CIMPL_PROTO_0(iemCImpl_repe_scas_ax_m32);
6483IEM_CIMPL_PROTO_0(iemCImpl_repne_scas_ax_m32);
6484IEM_CIMPL_PROTO_1(iemCImpl_rep_movs_op16_addr32, uint8_t, iEffSeg);
6485IEM_CIMPL_PROTO_0(iemCImpl_stos_ax_m32);
6486IEM_CIMPL_PROTO_1(iemCImpl_lods_ax_m32, int8_t, iEffSeg);
6487IEM_CIMPL_PROTO_1(iemCImpl_ins_op16_addr32, bool, fIoChecked);
6488IEM_CIMPL_PROTO_1(iemCImpl_rep_ins_op16_addr32, bool, fIoChecked);
6489IEM_CIMPL_PROTO_2(iemCImpl_outs_op16_addr32, uint8_t, iEffSeg, bool, fIoChecked);
6490IEM_CIMPL_PROTO_2(iemCImpl_rep_outs_op16_addr32, uint8_t, iEffSeg, bool, fIoChecked);
6491
6492IEM_CIMPL_PROTO_1(iemCImpl_repe_cmps_op32_addr32, uint8_t, iEffSeg);
6493IEM_CIMPL_PROTO_1(iemCImpl_repne_cmps_op32_addr32, uint8_t, iEffSeg);
6494IEM_CIMPL_PROTO_0(iemCImpl_repe_scas_eax_m32);
6495IEM_CIMPL_PROTO_0(iemCImpl_repne_scas_eax_m32);
6496IEM_CIMPL_PROTO_1(iemCImpl_rep_movs_op32_addr32, uint8_t, iEffSeg);
6497IEM_CIMPL_PROTO_0(iemCImpl_stos_eax_m32);
6498IEM_CIMPL_PROTO_1(iemCImpl_lods_eax_m32, int8_t, iEffSeg);
6499IEM_CIMPL_PROTO_1(iemCImpl_ins_op32_addr32, bool, fIoChecked);
6500IEM_CIMPL_PROTO_1(iemCImpl_rep_ins_op32_addr32, bool, fIoChecked);
6501IEM_CIMPL_PROTO_2(iemCImpl_outs_op32_addr32, uint8_t, iEffSeg, bool, fIoChecked);
6502IEM_CIMPL_PROTO_2(iemCImpl_rep_outs_op32_addr32, uint8_t, iEffSeg, bool, fIoChecked);
6503
6504IEM_CIMPL_PROTO_1(iemCImpl_repe_cmps_op64_addr32, uint8_t, iEffSeg);
6505IEM_CIMPL_PROTO_1(iemCImpl_repne_cmps_op64_addr32, uint8_t, iEffSeg);
6506IEM_CIMPL_PROTO_0(iemCImpl_repe_scas_rax_m32);
6507IEM_CIMPL_PROTO_0(iemCImpl_repne_scas_rax_m32);
6508IEM_CIMPL_PROTO_1(iemCImpl_rep_movs_op64_addr32, uint8_t, iEffSeg);
6509IEM_CIMPL_PROTO_0(iemCImpl_stos_rax_m32);
6510IEM_CIMPL_PROTO_1(iemCImpl_lods_rax_m32, int8_t, iEffSeg);
6511IEM_CIMPL_PROTO_1(iemCImpl_ins_op64_addr32, bool, fIoChecked);
6512IEM_CIMPL_PROTO_1(iemCImpl_rep_ins_op64_addr32, bool, fIoChecked);
6513IEM_CIMPL_PROTO_2(iemCImpl_outs_op64_addr32, uint8_t, iEffSeg, bool, fIoChecked);
6514IEM_CIMPL_PROTO_2(iemCImpl_rep_outs_op64_addr32, uint8_t, iEffSeg, bool, fIoChecked);
6515
6516
6517IEM_CIMPL_PROTO_1(iemCImpl_repe_cmps_op8_addr64, uint8_t, iEffSeg);
6518IEM_CIMPL_PROTO_1(iemCImpl_repne_cmps_op8_addr64, uint8_t, iEffSeg);
6519IEM_CIMPL_PROTO_0(iemCImpl_repe_scas_al_m64);
6520IEM_CIMPL_PROTO_0(iemCImpl_repne_scas_al_m64);
6521IEM_CIMPL_PROTO_1(iemCImpl_rep_movs_op8_addr64, uint8_t, iEffSeg);
6522IEM_CIMPL_PROTO_0(iemCImpl_stos_al_m64);
6523IEM_CIMPL_PROTO_1(iemCImpl_lods_al_m64, int8_t, iEffSeg);
6524IEM_CIMPL_PROTO_1(iemCImpl_ins_op8_addr64, bool, fIoChecked);
6525IEM_CIMPL_PROTO_1(iemCImpl_rep_ins_op8_addr64, bool, fIoChecked);
6526IEM_CIMPL_PROTO_2(iemCImpl_outs_op8_addr64, uint8_t, iEffSeg, bool, fIoChecked);
6527IEM_CIMPL_PROTO_2(iemCImpl_rep_outs_op8_addr64, uint8_t, iEffSeg, bool, fIoChecked);
6528
6529IEM_CIMPL_PROTO_1(iemCImpl_repe_cmps_op16_addr64, uint8_t, iEffSeg);
6530IEM_CIMPL_PROTO_1(iemCImpl_repne_cmps_op16_addr64, uint8_t, iEffSeg);
6531IEM_CIMPL_PROTO_0(iemCImpl_repe_scas_ax_m64);
6532IEM_CIMPL_PROTO_0(iemCImpl_repne_scas_ax_m64);
6533IEM_CIMPL_PROTO_1(iemCImpl_rep_movs_op16_addr64, uint8_t, iEffSeg);
6534IEM_CIMPL_PROTO_0(iemCImpl_stos_ax_m64);
6535IEM_CIMPL_PROTO_1(iemCImpl_lods_ax_m64, int8_t, iEffSeg);
6536IEM_CIMPL_PROTO_1(iemCImpl_ins_op16_addr64, bool, fIoChecked);
6537IEM_CIMPL_PROTO_1(iemCImpl_rep_ins_op16_addr64, bool, fIoChecked);
6538IEM_CIMPL_PROTO_2(iemCImpl_outs_op16_addr64, uint8_t, iEffSeg, bool, fIoChecked);
6539IEM_CIMPL_PROTO_2(iemCImpl_rep_outs_op16_addr64, uint8_t, iEffSeg, bool, fIoChecked);
6540
6541IEM_CIMPL_PROTO_1(iemCImpl_repe_cmps_op32_addr64, uint8_t, iEffSeg);
6542IEM_CIMPL_PROTO_1(iemCImpl_repne_cmps_op32_addr64, uint8_t, iEffSeg);
6543IEM_CIMPL_PROTO_0(iemCImpl_repe_scas_eax_m64);
6544IEM_CIMPL_PROTO_0(iemCImpl_repne_scas_eax_m64);
6545IEM_CIMPL_PROTO_1(iemCImpl_rep_movs_op32_addr64, uint8_t, iEffSeg);
6546IEM_CIMPL_PROTO_0(iemCImpl_stos_eax_m64);
6547IEM_CIMPL_PROTO_1(iemCImpl_lods_eax_m64, int8_t, iEffSeg);
6548IEM_CIMPL_PROTO_1(iemCImpl_ins_op32_addr64, bool, fIoChecked);
6549IEM_CIMPL_PROTO_1(iemCImpl_rep_ins_op32_addr64, bool, fIoChecked);
6550IEM_CIMPL_PROTO_2(iemCImpl_outs_op32_addr64, uint8_t, iEffSeg, bool, fIoChecked);
6551IEM_CIMPL_PROTO_2(iemCImpl_rep_outs_op32_addr64, uint8_t, iEffSeg, bool, fIoChecked);
6552
6553IEM_CIMPL_PROTO_1(iemCImpl_repe_cmps_op64_addr64, uint8_t, iEffSeg);
6554IEM_CIMPL_PROTO_1(iemCImpl_repne_cmps_op64_addr64, uint8_t, iEffSeg);
6555IEM_CIMPL_PROTO_0(iemCImpl_repe_scas_rax_m64);
6556IEM_CIMPL_PROTO_0(iemCImpl_repne_scas_rax_m64);
6557IEM_CIMPL_PROTO_1(iemCImpl_rep_movs_op64_addr64, uint8_t, iEffSeg);
6558IEM_CIMPL_PROTO_0(iemCImpl_stos_rax_m64);
6559IEM_CIMPL_PROTO_1(iemCImpl_lods_rax_m64, int8_t, iEffSeg);
6560IEM_CIMPL_PROTO_1(iemCImpl_ins_op64_addr64, bool, fIoChecked);
6561IEM_CIMPL_PROTO_1(iemCImpl_rep_ins_op64_addr64, bool, fIoChecked);
6562IEM_CIMPL_PROTO_2(iemCImpl_outs_op64_addr64, uint8_t, iEffSeg, bool, fIoChecked);
6563IEM_CIMPL_PROTO_2(iemCImpl_rep_outs_op64_addr64, uint8_t, iEffSeg, bool, fIoChecked);
6564/** @} */
6565
6566#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
6567VBOXSTRICTRC iemVmxVmexit(PVMCPUCC pVCpu, uint32_t uExitReason, uint64_t u64ExitQual) RT_NOEXCEPT;
6568VBOXSTRICTRC iemVmxVmexitInstr(PVMCPUCC pVCpu, uint32_t uExitReason, uint8_t cbInstr) RT_NOEXCEPT;
6569VBOXSTRICTRC iemVmxVmexitInstrNeedsInfo(PVMCPUCC pVCpu, uint32_t uExitReason, VMXINSTRID uInstrId, uint8_t cbInstr) RT_NOEXCEPT;
6570VBOXSTRICTRC iemVmxVmexitTaskSwitch(PVMCPUCC pVCpu, IEMTASKSWITCH enmTaskSwitch, RTSEL SelNewTss, uint8_t cbInstr) RT_NOEXCEPT;
6571VBOXSTRICTRC iemVmxVmexitEvent(PVMCPUCC pVCpu, uint8_t uVector, uint32_t fFlags, uint32_t uErrCode, uint64_t uCr2, uint8_t cbInstr) RT_NOEXCEPT;
6572VBOXSTRICTRC iemVmxVmexitEventDoubleFault(PVMCPUCC pVCpu) RT_NOEXCEPT;
6573VBOXSTRICTRC iemVmxVmexitEpt(PVMCPUCC pVCpu, PPGMPTWALKFAST pWalk, uint32_t fAccess, uint32_t fSlatFail, uint8_t cbInstr) RT_NOEXCEPT;
6574VBOXSTRICTRC iemVmxVmexitPreemptTimer(PVMCPUCC pVCpu) RT_NOEXCEPT;
6575VBOXSTRICTRC iemVmxVmexitInstrMwait(PVMCPUCC pVCpu, bool fMonitorHwArmed, uint8_t cbInstr) RT_NOEXCEPT;
6576VBOXSTRICTRC iemVmxVmexitInstrIo(PVMCPUCC pVCpu, VMXINSTRID uInstrId, uint16_t u16Port,
6577 bool fImm, uint8_t cbAccess, uint8_t cbInstr) RT_NOEXCEPT;
6578VBOXSTRICTRC iemVmxVmexitInstrStrIo(PVMCPUCC pVCpu, VMXINSTRID uInstrId, uint16_t u16Port, uint8_t cbAccess,
6579 bool fRep, VMXEXITINSTRINFO ExitInstrInfo, uint8_t cbInstr) RT_NOEXCEPT;
6580VBOXSTRICTRC iemVmxVmexitInstrMovDrX(PVMCPUCC pVCpu, VMXINSTRID uInstrId, uint8_t iDrReg, uint8_t iGReg, uint8_t cbInstr) RT_NOEXCEPT;
6581VBOXSTRICTRC iemVmxVmexitInstrMovToCr8(PVMCPUCC pVCpu, uint8_t iGReg, uint8_t cbInstr) RT_NOEXCEPT;
6582VBOXSTRICTRC iemVmxVmexitInstrMovFromCr8(PVMCPUCC pVCpu, uint8_t iGReg, uint8_t cbInstr) RT_NOEXCEPT;
6583VBOXSTRICTRC iemVmxVmexitInstrMovToCr3(PVMCPUCC pVCpu, uint64_t uNewCr3, uint8_t iGReg, uint8_t cbInstr) RT_NOEXCEPT;
6584VBOXSTRICTRC iemVmxVmexitInstrMovFromCr3(PVMCPUCC pVCpu, uint8_t iGReg, uint8_t cbInstr) RT_NOEXCEPT;
6585VBOXSTRICTRC iemVmxVmexitInstrMovToCr0Cr4(PVMCPUCC pVCpu, uint8_t iCrReg, uint64_t *puNewCrX, uint8_t iGReg, uint8_t cbInstr) RT_NOEXCEPT;
6586VBOXSTRICTRC iemVmxVmexitInstrClts(PVMCPUCC pVCpu, uint8_t cbInstr) RT_NOEXCEPT;
6587VBOXSTRICTRC iemVmxVmexitInstrLmsw(PVMCPUCC pVCpu, uint32_t uGuestCr0, uint16_t *pu16NewMsw,
6588 RTGCPTR GCPtrEffDst, uint8_t cbInstr) RT_NOEXCEPT;
6589VBOXSTRICTRC iemVmxVmexitInstrInvlpg(PVMCPUCC pVCpu, RTGCPTR GCPtrPage, uint8_t cbInstr) RT_NOEXCEPT;
6590VBOXSTRICTRC iemVmxApicWriteEmulation(PVMCPUCC pVCpu) RT_NOEXCEPT;
6591VBOXSTRICTRC iemVmxVirtApicAccessUnused(PVMCPUCC pVCpu, PRTGCPHYS pGCPhysAccess, size_t cbAccess, uint32_t fAccess) RT_NOEXCEPT;
6592uint32_t iemVmxVirtApicReadRaw32(PVMCPUCC pVCpu, uint16_t offReg) RT_NOEXCEPT;
6593void iemVmxVirtApicWriteRaw32(PVMCPUCC pVCpu, uint16_t offReg, uint32_t uReg) RT_NOEXCEPT;
6594VBOXSTRICTRC iemVmxInvvpid(PVMCPUCC pVCpu, uint8_t cbInstr, uint8_t iEffSeg, RTGCPTR GCPtrInvvpidDesc,
6595 uint64_t u64InvvpidType, PCVMXVEXITINFO pExitInfo) RT_NOEXCEPT;
6596bool iemVmxIsRdmsrWrmsrInterceptSet(PCVMCPU pVCpu, uint32_t uExitReason, uint32_t idMsr) RT_NOEXCEPT;
6597IEM_CIMPL_PROTO_0(iemCImpl_vmxoff);
6598IEM_CIMPL_PROTO_2(iemCImpl_vmxon, uint8_t, iEffSeg, RTGCPTR, GCPtrVmxon);
6599IEM_CIMPL_PROTO_0(iemCImpl_vmlaunch);
6600IEM_CIMPL_PROTO_0(iemCImpl_vmresume);
6601IEM_CIMPL_PROTO_2(iemCImpl_vmptrld, uint8_t, iEffSeg, RTGCPTR, GCPtrVmcs);
6602IEM_CIMPL_PROTO_2(iemCImpl_vmptrst, uint8_t, iEffSeg, RTGCPTR, GCPtrVmcs);
6603IEM_CIMPL_PROTO_2(iemCImpl_vmclear, uint8_t, iEffSeg, RTGCPTR, GCPtrVmcs);
6604IEM_CIMPL_PROTO_2(iemCImpl_vmwrite_reg, uint64_t, u64Val, uint64_t, u64VmcsField);
6605IEM_CIMPL_PROTO_3(iemCImpl_vmwrite_mem, uint8_t, iEffSeg, RTGCPTR, GCPtrVal, uint32_t, u64VmcsField);
6606IEM_CIMPL_PROTO_2(iemCImpl_vmread_reg64, uint64_t *, pu64Dst, uint64_t, u64VmcsField);
6607IEM_CIMPL_PROTO_2(iemCImpl_vmread_reg32, uint64_t *, pu32Dst, uint32_t, u32VmcsField);
6608IEM_CIMPL_PROTO_3(iemCImpl_vmread_mem_reg64, uint8_t, iEffSeg, RTGCPTR, GCPtrDst, uint32_t, u64VmcsField);
6609IEM_CIMPL_PROTO_3(iemCImpl_vmread_mem_reg32, uint8_t, iEffSeg, RTGCPTR, GCPtrDst, uint32_t, u32VmcsField);
6610IEM_CIMPL_PROTO_3(iemCImpl_invvpid, uint8_t, iEffSeg, RTGCPTR, GCPtrInvvpidDesc, uint64_t, uInvvpidType);
6611IEM_CIMPL_PROTO_3(iemCImpl_invept, uint8_t, iEffSeg, RTGCPTR, GCPtrInveptDesc, uint64_t, uInveptType);
6612IEM_CIMPL_PROTO_0(iemCImpl_vmx_pause);
6613#endif
6614
6615#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
6616VBOXSTRICTRC iemSvmVmexit(PVMCPUCC pVCpu, uint64_t uExitCode, uint64_t uExitInfo1, uint64_t uExitInfo2) RT_NOEXCEPT;
6617VBOXSTRICTRC iemHandleSvmEventIntercept(PVMCPUCC pVCpu, uint8_t cbInstr, uint8_t u8Vector, uint32_t fFlags, uint32_t uErr, uint64_t uCr2) RT_NOEXCEPT;
6618VBOXSTRICTRC iemSvmHandleIOIntercept(PVMCPUCC pVCpu, uint16_t u16Port, SVMIOIOTYPE enmIoType, uint8_t cbReg,
6619 uint8_t cAddrSizeBits, uint8_t iEffSeg, bool fRep, bool fStrIo, uint8_t cbInstr) RT_NOEXCEPT;
6620VBOXSTRICTRC iemSvmHandleMsrIntercept(PVMCPUCC pVCpu, uint32_t idMsr, bool fWrite, uint8_t cbInstr) RT_NOEXCEPT;
6621IEM_CIMPL_PROTO_0(iemCImpl_vmrun);
6622IEM_CIMPL_PROTO_0(iemCImpl_vmload);
6623IEM_CIMPL_PROTO_0(iemCImpl_vmsave);
6624IEM_CIMPL_PROTO_0(iemCImpl_clgi);
6625IEM_CIMPL_PROTO_0(iemCImpl_stgi);
6626IEM_CIMPL_PROTO_0(iemCImpl_invlpga);
6627IEM_CIMPL_PROTO_0(iemCImpl_skinit);
6628IEM_CIMPL_PROTO_0(iemCImpl_svm_pause);
6629#endif
6630
6631IEM_CIMPL_PROTO_0(iemCImpl_vmcall); /* vmx */
6632IEM_CIMPL_PROTO_0(iemCImpl_vmmcall); /* svm */
6633IEM_CIMPL_PROTO_1(iemCImpl_Hypercall, uint16_t, uDisOpcode); /* both */
6634
6635extern const PFNIEMOP g_apfnIemInterpretOnlyOneByteMap[256];
6636extern const PFNIEMOP g_apfnIemInterpretOnlyTwoByteMap[1024];
6637extern const PFNIEMOP g_apfnIemInterpretOnlyThreeByte0f3a[1024];
6638extern const PFNIEMOP g_apfnIemInterpretOnlyThreeByte0f38[1024];
6639extern const PFNIEMOP g_apfnIemInterpretOnlyVecMap1[1024];
6640extern const PFNIEMOP g_apfnIemInterpretOnlyVecMap2[1024];
6641extern const PFNIEMOP g_apfnIemInterpretOnlyVecMap3[1024];
6642
6643/*
6644 * Recompiler related stuff.
6645 */
6646extern const PFNIEMOP g_apfnIemThreadedRecompilerOneByteMap[256];
6647extern const PFNIEMOP g_apfnIemThreadedRecompilerTwoByteMap[1024];
6648extern const PFNIEMOP g_apfnIemThreadedRecompilerThreeByte0f3a[1024];
6649extern const PFNIEMOP g_apfnIemThreadedRecompilerThreeByte0f38[1024];
6650extern const PFNIEMOP g_apfnIemThreadedRecompilerVecMap1[1024];
6651extern const PFNIEMOP g_apfnIemThreadedRecompilerVecMap2[1024];
6652extern const PFNIEMOP g_apfnIemThreadedRecompilerVecMap3[1024];
6653
6654DECLCALLBACK(int) iemTbInit(PVMCC pVM, uint32_t cInitialTbs, uint32_t cMaxTbs,
6655 uint64_t cbInitialExec, uint64_t cbMaxExec, uint32_t cbChunkExec);
6656void iemThreadedTbObsolete(PVMCPUCC pVCpu, PIEMTB pTb, bool fSafeToFree);
6657DECLHIDDEN(void) iemTbAllocatorFree(PVMCPUCC pVCpu, PIEMTB pTb);
6658void iemTbAllocatorProcessDelayedFrees(PVMCPUCC pVCpu, PIEMTBALLOCATOR pTbAllocator);
6659void iemTbAllocatorFreeupNativeSpace(PVMCPUCC pVCpu, uint32_t cNeededInstrs);
6660DECLHIDDEN(const char *) iemTbFlagsToString(uint32_t fFlags, char *pszBuf, size_t cbBuf) RT_NOEXCEPT;
6661DECLHIDDEN(void) iemThreadedDisassembleTb(PCIEMTB pTb, PCDBGFINFOHLP pHlp) RT_NOEXCEPT;
6662
6663
6664/** @todo FNIEMTHREADEDFUNC and friends may need more work... */
6665#if defined(__GNUC__) && !defined(IEM_WITH_THROW_CATCH)
6666typedef VBOXSTRICTRC /*__attribute__((__nothrow__))*/ FNIEMTHREADEDFUNC(PVMCPU pVCpu, uint64_t uParam0, uint64_t uParam1, uint64_t uParam2);
6667typedef FNIEMTHREADEDFUNC *PFNIEMTHREADEDFUNC;
6668# define IEM_DECL_IEMTHREADEDFUNC_DEF(a_Name) \
6669 VBOXSTRICTRC __attribute__((__nothrow__)) a_Name(PVMCPU pVCpu, uint64_t uParam0, uint64_t uParam1, uint64_t uParam2)
6670# define IEM_DECL_IEMTHREADEDFUNC_PROTO(a_Name) \
6671 VBOXSTRICTRC __attribute__((__nothrow__)) a_Name(PVMCPU pVCpu, uint64_t uParam0, uint64_t uParam1, uint64_t uParam2)
6672
6673#else
6674typedef VBOXSTRICTRC (FNIEMTHREADEDFUNC)(PVMCPU pVCpu, uint64_t uParam0, uint64_t uParam1, uint64_t uParam2);
6675typedef FNIEMTHREADEDFUNC *PFNIEMTHREADEDFUNC;
6676# define IEM_DECL_IEMTHREADEDFUNC_DEF(a_Name) \
6677 VBOXSTRICTRC a_Name(PVMCPU pVCpu, uint64_t uParam0, uint64_t uParam1, uint64_t uParam2) IEM_NOEXCEPT_MAY_LONGJMP
6678# define IEM_DECL_IEMTHREADEDFUNC_PROTO(a_Name) \
6679 VBOXSTRICTRC a_Name(PVMCPU pVCpu, uint64_t uParam0, uint64_t uParam1, uint64_t uParam2) IEM_NOEXCEPT_MAY_LONGJMP
6680#endif
6681
6682
6683IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_Nop);
6684IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_LogCpuState);
6685
6686IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_DeferToCImpl0);
6687
6688IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckIrq);
6689IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckMode);
6690IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckHwInstrBps);
6691IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckCsLim);
6692
6693IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckCsLimAndOpcodes);
6694IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckOpcodes);
6695IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckOpcodesConsiderCsLim);
6696
6697/* Branching: */
6698IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckCsLimAndPcAndOpcodes);
6699IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckPcAndOpcodes);
6700IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckPcAndOpcodesConsiderCsLim);
6701
6702IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckCsLimAndOpcodesLoadingTlb);
6703IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckOpcodesLoadingTlb);
6704IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckOpcodesLoadingTlbConsiderCsLim);
6705
6706/* Natural page crossing: */
6707IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckCsLimAndOpcodesAcrossPageLoadingTlb);
6708IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckOpcodesAcrossPageLoadingTlb);
6709IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckOpcodesAcrossPageLoadingTlbConsiderCsLim);
6710
6711IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckCsLimAndOpcodesOnNextPageLoadingTlb);
6712IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckOpcodesOnNextPageLoadingTlb);
6713IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckOpcodesOnNextPageLoadingTlbConsiderCsLim);
6714
6715IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckCsLimAndOpcodesOnNewPageLoadingTlb);
6716IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckOpcodesOnNewPageLoadingTlb);
6717IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckOpcodesOnNewPageLoadingTlbConsiderCsLim);
6718
6719bool iemThreadedCompileEmitIrqCheckBefore(PVMCPUCC pVCpu, PIEMTB pTb);
6720bool iemThreadedCompileBeginEmitCallsComplications(PVMCPUCC pVCpu, PIEMTB pTb);
6721
6722/* Native recompiler public bits: */
6723
6724DECLHIDDEN(PIEMTB) iemNativeRecompile(PVMCPUCC pVCpu, PIEMTB pTb) RT_NOEXCEPT;
6725DECLHIDDEN(void) iemNativeDisassembleTb(PVMCPU pVCpu, PCIEMTB pTb, PCDBGFINFOHLP pHlp) RT_NOEXCEPT;
6726int iemExecMemAllocatorInit(PVMCPU pVCpu, uint64_t cbMax, uint64_t cbInitial, uint32_t cbChunk) RT_NOEXCEPT;
6727DECLHIDDEN(PIEMNATIVEINSTR) iemExecMemAllocatorAlloc(PVMCPU pVCpu, uint32_t cbReq, PIEMTB pTb, PIEMNATIVEINSTR *ppaExec,
6728 struct IEMNATIVEPERCHUNKCTX const **ppChunkCtx) RT_NOEXCEPT;
6729DECLHIDDEN(PIEMNATIVEINSTR) iemExecMemAllocatorAllocFromChunk(PVMCPU pVCpu, uint32_t idxChunk, uint32_t cbReq,
6730 PIEMNATIVEINSTR *ppaExec);
6731DECLHIDDEN(void) iemExecMemAllocatorReadyForUse(PVMCPUCC pVCpu, void *pv, size_t cb) RT_NOEXCEPT;
6732void iemExecMemAllocatorFree(PVMCPU pVCpu, void *pv, size_t cb) RT_NOEXCEPT;
6733DECLASM(DECL_NO_RETURN(void)) iemNativeTbLongJmp(void *pvFramePointer, int rc) RT_NOEXCEPT;
6734DECLHIDDEN(struct IEMNATIVEPERCHUNKCTX const *) iemExecMemGetTbChunkCtx(PVMCPU pVCpu, PCIEMTB pTb);
6735DECLHIDDEN(struct IEMNATIVEPERCHUNKCTX const *) iemNativeRecompileAttachExecMemChunkCtx(PVMCPU pVCpu, uint32_t idxChunk);
6736
6737#endif /* !RT_IN_ASSEMBLER - ASM-NOINC-END */
6738
6739
6740/** @} */
6741
6742RT_C_DECLS_END
6743
6744/* ASM-INC: %include "IEMInternalStruct.mac" */
6745
6746#endif /* !VMM_INCLUDED_SRC_include_IEMInternal_h */
6747
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette