VirtualBox

Changeset 108791 in vbox for trunk/src


Ignore:
Timestamp:
Mar 28, 2025 9:58:31 PM (2 weeks ago)
Author:
vboxsync
svn:sync-xref-src-repo-rev:
168226
Message:

VMM/IEM: More ARM target work. jiraref:VBP-1598

Location:
trunk/src/VBox/VMM
Files:
1 deleted
15 edited
2 copied

Legend:

Unmodified
Added
Removed
  • trunk/src/VBox/VMM/Makefile.kmk

    r108729 r108791  
    570570        VMMAll/IEMAllMem.cpp \
    571571        VMMAll/IEMAllTlb.cpp \
    572         VMMAll/target-armv8/IEMAll-armv8.cpp
     572        VMMAll/target-armv8/IEMAll-armv8.cpp \
     573        VMMAll/target-armv8/IEMAllMem-armv8.cpp
    573574 else
    574575  VBoxVMMArm_SOURCES += VMMAll/IEMAllStub.cpp
  • trunk/src/VBox/VMM/VMMAll/CPUMAllRegs-armv8.cpp

    r107650 r108791  
    318318{
    319319    CPUM_INT_ASSERT_NOT_EXTRN(pVCpu, CPUMCTX_EXTRN_PSTATE);
     320    Assert(!(pVCpu->cpum.s.Guest.fPState & ARMV8_SPSR_EL2_AARCH64_M4)); /* ASSUMES aarch64 mode */
    320321    return ARMV8_SPSR_EL2_AARCH64_GET_EL(pVCpu->cpum.s.Guest.fPState);
    321322}
     
    330331{
    331332    CPUM_INT_ASSERT_NOT_EXTRN(pVCpu, CPUMCTX_EXTRN_PSTATE | CPUMCTX_EXTRN_SCTLR_TCR_TTBR);
     333    Assert(!(pVCpu->cpum.s.Guest.fPState & ARMV8_SPSR_EL2_AARCH64_M4)); /* ASSUMES aarch64 mode */
    332334    uint8_t bEl = ARMV8_SPSR_EL2_AARCH64_GET_EL(pVCpu->cpum.s.Guest.fPState);
    333335    if (bEl == ARMV8_AARCH64_EL_2)
     
    352354    CPUM_INT_ASSERT_NOT_EXTRN(pVCpu, CPUMCTX_EXTRN_PSTATE | CPUMCTX_EXTRN_SCTLR_TCR_TTBR);
    353355
     356    Assert(!(pVCpu->cpum.s.Guest.fPState & ARMV8_SPSR_EL2_AARCH64_M4)); /* ASSUMES aarch64 mode */
    354357    uint8_t bEl = ARMV8_SPSR_EL2_AARCH64_GET_EL(pVCpu->cpum.s.Guest.fPState);
    355358    if (bEl == ARMV8_AARCH64_EL_2)
  • trunk/src/VBox/VMM/VMMAll/IEMAllN8veRecompiler.cpp

    r108369 r108791  
    88228822    /* Do the lookup manually. */
    88238823    RTGCPTR const      GCPtrFlat = (iSegReg == UINT8_MAX ? GCPtr : GCPtr + pVCpu->cpum.GstCtx.aSRegs[iSegReg].u64Base) + offDisp;
    8824     uint64_t const     uTagNoRev = IEMTLB_CALC_TAG_NO_REV(GCPtrFlat);
     8824    uint64_t const     uTagNoRev = IEMTLB_CALC_TAG_NO_REV(pVCpu, GCPtrFlat);
    88258825    PCIEMTLBENTRY      pTlbe     = IEMTLB_TAG_TO_EVEN_ENTRY(&pVCpu->iem.s.DataTlb, uTagNoRev);
    88268826    if (RT_LIKELY(   pTlbe->uTag               == (uTagNoRev | pVCpu->iem.s.DataTlb.uTlbRevision)
  • trunk/src/VBox/VMM/VMMAll/IEMAllTlb.cpp

    r108589 r108791  
    176176#if defined(IEM_WITH_CODE_TLB) || defined(IEM_WITH_DATA_TLB)
    177177    Log10(("IEMTlbInvalidatePage: GCPtr=%RGv\n", GCPtr));
    178     GCPtr = IEMTLB_CALC_TAG_NO_REV(GCPtr);
     178    GCPtr = IEMTLB_CALC_TAG_NO_REV(pVCpu, GCPtr);
    179179    Assert(!(GCPtr >> (48 - X86_PAGE_SHIFT)));
    180180    uintptr_t const idxEven = IEMTLB_TAG_TO_EVEN_INDEX(GCPtr);
  • trunk/src/VBox/VMM/VMMAll/target-armv8/IEMAll-armv8.cpp

    r108710 r108791  
    6060# define INVALID_TLB_ENTRY_FOR_BP(a_uValue) do { \
    6161        RTGCPTR uTagNoRev = (a_uValue); \
    62         uTagNoRev = IEMTLB_CALC_TAG_NO_REV(uTagNoRev); \
     62        uTagNoRev = IEMTLB_CALC_TAG_NO_REV(pVCpu, uTagNoRev); \
    6363        /** @todo do large page accounting */ \
    6464        uintptr_t const idxEven = IEMTLB_TAG_TO_EVEN_INDEX(uTagNoRev); \
  • trunk/src/VBox/VMM/VMMAll/target-armv8/IEMAllMem-armv8.cpp

    r108707 r108791  
    11/* $Id$ */
    22/** @file
    3  * IEM - Interpreted Execution Manager - x86 target, memory.
     3 * IEM - Interpreted Execution Manager - ARMV8 target, memory.
    44 */
    55
     
    3232#define LOG_GROUP LOG_GROUP_IEM_MEM
    3333#define VMCPU_INCL_CPUM_GST_CTX
    34 #ifdef IN_RING0
    35 # define VBOX_VMM_TARGET_X86
    36 #endif
    3734#include <VBox/vmm/iem.h>
    3835#include <VBox/vmm/cpum.h>
     
    4542#include <iprt/assert.h>
    4643#include <iprt/string.h>
    47 #include <iprt/x86.h>
     44#include <iprt/armv8.h>
    4845
    4946#include "IEMInline.h"
    50 #include "IEMInline-x86.h"
    51 #include "IEMInlineMem-x86.h"
    52 #include "IEMAllTlbInline-x86.h"
     47#include "IEMInline-armv8.h"
     48/// @todo #include "IEMInlineMem-armv8.h"
     49#include "IEMAllTlbInline-armv8.h"
    5350
    5451
     
    5956
    6057/**
    61  * Applies the segment limit, base and attributes.
    62  *
    63  * This may raise a \#GP or \#SS.
    64  *
    65  * @returns VBox strict status code.
    66  *
    67  * @param   pVCpu               The cross context virtual CPU structure of the calling thread.
    68  * @param   fAccess             The kind of access which is being performed.
    69  * @param   iSegReg             The index of the segment register to apply.
    70  *                              This is UINT8_MAX if none (for IDT, GDT, LDT,
    71  *                              TSS, ++).
    72  * @param   cbMem               The access size.
    73  * @param   pGCPtrMem           Pointer to the guest memory address to apply
    74  *                              segmentation to.  Input and output parameter.
    75  */
    76 VBOXSTRICTRC iemMemApplySegment(PVMCPUCC pVCpu, uint32_t fAccess, uint8_t iSegReg, size_t cbMem, PRTGCPTR pGCPtrMem) RT_NOEXCEPT
    77 {
    78     if (iSegReg == UINT8_MAX)
    79         return VINF_SUCCESS;
    80 
    81     IEM_CTX_IMPORT_RET(pVCpu, CPUMCTX_EXTRN_SREG_FROM_IDX(iSegReg));
    82     PCPUMSELREGHID pSel = iemSRegGetHid(pVCpu, iSegReg);
    83     switch (IEM_GET_CPU_MODE(pVCpu))
    84     {
    85         case IEMMODE_16BIT:
    86         case IEMMODE_32BIT:
    87         {
    88             RTGCPTR32 GCPtrFirst32 = (RTGCPTR32)*pGCPtrMem;
    89             RTGCPTR32 GCPtrLast32  = GCPtrFirst32 + (uint32_t)cbMem - 1;
    90 
    91             if (   pSel->Attr.n.u1Present
    92                 && !pSel->Attr.n.u1Unusable)
    93             {
    94                 Assert(pSel->Attr.n.u1DescType);
    95                 if (!(pSel->Attr.n.u4Type & X86_SEL_TYPE_CODE))
    96                 {
    97                     if (   (fAccess & IEM_ACCESS_TYPE_WRITE)
    98                         && !(pSel->Attr.n.u4Type & X86_SEL_TYPE_WRITE) )
    99                         return iemRaiseSelectorInvalidAccess(pVCpu, iSegReg, fAccess);
    100 
    101                     if (!IEM_IS_REAL_OR_V86_MODE(pVCpu))
    102                     {
    103                         /** @todo CPL check. */
    104                     }
    105 
    106                     /*
    107                      * There are two kinds of data selectors, normal and expand down.
    108                      */
    109                     if (!(pSel->Attr.n.u4Type & X86_SEL_TYPE_DOWN))
    110                     {
    111                         if (   GCPtrFirst32 > pSel->u32Limit
    112                             || GCPtrLast32  > pSel->u32Limit) /* yes, in real mode too (since 80286). */
    113                             return iemRaiseSelectorBounds(pVCpu, iSegReg, fAccess);
    114                     }
    115                     else
    116                     {
    117                        /*
    118                         * The upper boundary is defined by the B bit, not the G bit!
    119                         */
    120                        if (   GCPtrFirst32 < pSel->u32Limit + UINT32_C(1)
    121                            || GCPtrLast32  > (pSel->Attr.n.u1DefBig ? UINT32_MAX : UINT32_C(0xffff)))
    122                           return iemRaiseSelectorBounds(pVCpu, iSegReg, fAccess);
    123                     }
    124                     *pGCPtrMem = GCPtrFirst32 += (uint32_t)pSel->u64Base;
    125                 }
    126                 else
    127                 {
    128                     /*
    129                      * Code selector and usually be used to read thru, writing is
    130                      * only permitted in real and V8086 mode.
    131                      */
    132                     if (   (   (fAccess & IEM_ACCESS_TYPE_WRITE)
    133                             || (   (fAccess & IEM_ACCESS_TYPE_READ)
    134                                && !(pSel->Attr.n.u4Type & X86_SEL_TYPE_READ)) )
    135                         && !IEM_IS_REAL_OR_V86_MODE(pVCpu) )
    136                         return iemRaiseSelectorInvalidAccess(pVCpu, iSegReg, fAccess);
    137 
    138                     if (   GCPtrFirst32 > pSel->u32Limit
    139                         || GCPtrLast32  > pSel->u32Limit) /* yes, in real mode too (since 80286). */
    140                         return iemRaiseSelectorBounds(pVCpu, iSegReg, fAccess);
    141 
    142                     if (!IEM_IS_REAL_OR_V86_MODE(pVCpu))
    143                     {
    144                         /** @todo CPL check. */
    145                     }
    146 
    147                     *pGCPtrMem  = GCPtrFirst32 += (uint32_t)pSel->u64Base;
    148                 }
    149             }
    150             else
    151                 return iemRaiseGeneralProtectionFault0(pVCpu);
    152             return VINF_SUCCESS;
    153         }
    154 
    155         case IEMMODE_64BIT:
    156         {
    157             RTGCPTR GCPtrMem = *pGCPtrMem;
    158             if (iSegReg == X86_SREG_GS || iSegReg == X86_SREG_FS)
    159                 *pGCPtrMem = GCPtrMem + pSel->u64Base;
    160 
    161             Assert(cbMem >= 1);
    162             if (RT_LIKELY(X86_IS_CANONICAL(GCPtrMem) && X86_IS_CANONICAL(GCPtrMem + cbMem - 1)))
    163                 return VINF_SUCCESS;
    164             /** @todo We should probably raise \#SS(0) here if segment is SS; see AMD spec.
    165              *        4.12.2 "Data Limit Checks in 64-bit Mode". */
    166             return iemRaiseGeneralProtectionFault0(pVCpu);
    167         }
    168 
    169         default:
    170             AssertFailedReturn(VERR_IEM_IPE_7);
    171     }
    172 }
     58 * Converts IEM_ACCESS_XXX + fExec to PGMQPAGE_F_XXX.
     59 */
     60DECL_FORCE_INLINE(uint32_t) iemMemArmAccessToQPage(PVMCPUCC pVCpu, uint32_t fAccess)
     61{
     62    AssertCompile(IEM_ACCESS_TYPE_READ  == PGMQPAGE_F_READ);
     63    AssertCompile(IEM_ACCESS_TYPE_WRITE == PGMQPAGE_F_WRITE);
     64    AssertCompile(IEM_ACCESS_TYPE_EXEC  == PGMQPAGE_F_EXECUTE);
     65    /** @todo IEMTLBE_F_EFF_U_NO_GCS / IEMTLBE_F_EFF_P_NO_GCS,
     66     *  IEMTLBE_F_S1_NS/NSE, IEMTLBE_F_S2_NO_LIM_WRITE/TL0/TL1. */
     67    return (fAccess & (PGMQPAGE_F_READ | IEM_ACCESS_TYPE_WRITE | PGMQPAGE_F_EXECUTE))
     68         | (!(fAccess & IEM_ACCESS_WHAT_SYS) && IEM_F_MODE_ARM_GET_EL(pVCpu->iem.s.fExec) == 0 ? PGMQPAGE_F_USER_MODE : 0);
     69}
     70
    17371
    17472
     
    18785                                               uint32_t fAccess, PRTGCPHYS pGCPhysMem) RT_NOEXCEPT
    18886{
    189     /** @todo Need a different PGM interface here.  We're currently using
    190      *        generic / REM interfaces. this won't cut it for R0. */
    191     /** @todo If/when PGM handles paged real-mode, we can remove the hack in
    192      *        iemSvmWorldSwitch/iemVmxWorldSwitch to work around raising a page-fault
    193      *        here. */
    19487    Assert(!(fAccess & IEM_ACCESS_TYPE_EXEC));
    19588    PGMPTWALKFAST WalkFast;
    196     AssertCompile(IEM_ACCESS_TYPE_READ  == PGMQPAGE_F_READ);
    197     AssertCompile(IEM_ACCESS_TYPE_WRITE == PGMQPAGE_F_WRITE);
    198     AssertCompile(IEM_ACCESS_TYPE_EXEC  == PGMQPAGE_F_EXECUTE);
    199     AssertCompile(X86_CR0_WP            == PGMQPAGE_F_CR0_WP0);
    200     uint32_t fQPage = (fAccess & (PGMQPAGE_F_READ | IEM_ACCESS_TYPE_WRITE | PGMQPAGE_F_EXECUTE))
    201                     | (((uint32_t)pVCpu->cpum.GstCtx.cr0 & X86_CR0_WP) ^ X86_CR0_WP);
    202     if (IEM_GET_CPL(pVCpu) == 3 && !(fAccess & IEM_ACCESS_WHAT_SYS))
    203         fQPage |= PGMQPAGE_F_USER_MODE;
    204     int rc = PGMGstQueryPageFast(pVCpu, GCPtrMem, fQPage, &WalkFast);
     89    int rc = PGMGstQueryPageFast(pVCpu, GCPtrMem, iemMemArmAccessToQPage(pVCpu, fAccess), &WalkFast);
    20590    if (RT_SUCCESS(rc))
    20691    {
     
    20994        /* If the page is writable and does not have the no-exec bit set, all
    21095           access is allowed.  Otherwise we'll have to check more carefully... */
     96#if 0 /** @todo rewrite to arm */
    21197        Assert(   (WalkFast.fEffective & (X86_PTE_RW | X86_PTE_US | X86_PTE_PAE_NX)) == (X86_PTE_RW | X86_PTE_US)
    21298               || (   (   !(fAccess & IEM_ACCESS_TYPE_WRITE)
     
    227113        /** @todo testcase: check when A and D bits are actually set by the CPU.  */
    228114        Assert(!(~WalkFast.fEffective & (fAccess & IEM_ACCESS_TYPE_WRITE ? X86_PTE_D | X86_PTE_A : X86_PTE_A)));
     115#endif
    229116
    230117        *pGCPhysMem = WalkFast.GCPhys;
     
    239126#endif
    240127    *pGCPhysMem = NIL_RTGCPHYS;
    241     return iemRaisePageFault(pVCpu, GCPtrMem, cbAccess, fAccess, rc);
     128    return iemRaiseDataAbortFromWalk(pVCpu, GCPtrMem, cbAccess, fAccess, rc, &WalkFast);
    242129}
    243130
     
    252139{
    253140    /*
    254      * The easy case.
     141     * The typical case.
    255142     */
    256143    if (pVCpu->iem.s.cActiveMappings == 0)
     
    285172}
    286173#endif
     174
     175
     176/**
     177 * Converts PGM_PTATTRS_XXX to IEMTLBE_F_XXX.
     178 */
     179DECL_FORCE_INLINE(uint64_t) iemMemArmPtAttrsToTlbeFlags(uint64_t fPtAttrs)
     180{
     181    /** @todo stage 2 stuff, IEMTLBE_F_EFF_AMEC, PGM_PTATTRS_NT_SHIFT,
     182     *        PGM_PTATTRS_GP_SHIFT */
     183    AssertCompile(   PGM_PTATTRS_PR_SHIFT + 1 == PGM_PTATTRS_PW_SHIFT
     184                  && PGM_PTATTRS_PR_SHIFT + 2 == PGM_PTATTRS_PX_SHIFT
     185                  && PGM_PTATTRS_PR_SHIFT + 3 == PGM_PTATTRS_PGCS_SHIFT
     186                  && PGM_PTATTRS_PR_SHIFT + 4 == PGM_PTATTRS_UR_SHIFT
     187                  && PGM_PTATTRS_PR_SHIFT + 5 == PGM_PTATTRS_UW_SHIFT
     188                  && PGM_PTATTRS_PR_SHIFT + 6 == PGM_PTATTRS_UX_SHIFT
     189                  && PGM_PTATTRS_PR_SHIFT + 7 == PGM_PTATTRS_UGCS_SHIFT);
     190    AssertCompile(   IEMTLBE_F_EFF_P_NO_READ_BIT + 1 == IEMTLBE_F_EFF_P_NO_WRITE_BIT
     191                  && IEMTLBE_F_EFF_P_NO_READ_BIT + 2 == IEMTLBE_F_EFF_P_NO_EXEC_BIT
     192                  && IEMTLBE_F_EFF_P_NO_READ_BIT + 3 == IEMTLBE_F_EFF_P_NO_GCS_BIT
     193                  && IEMTLBE_F_EFF_P_NO_READ_BIT + 4 == IEMTLBE_F_EFF_U_NO_READ_BIT
     194                  && IEMTLBE_F_EFF_P_NO_READ_BIT + 5 == IEMTLBE_F_EFF_U_NO_WRITE_BIT
     195                  && IEMTLBE_F_EFF_P_NO_READ_BIT + 6 == IEMTLBE_F_EFF_U_NO_EXEC_BIT
     196                  && IEMTLBE_F_EFF_P_NO_READ_BIT + 7 == IEMTLBE_F_EFF_U_NO_GCS_BIT);
     197    AssertCompile(IEMTLBE_F_EFF_P_NO_WRITE_BIT < PGM_PTATTRS_PR_SHIFT);
     198    uint64_t const fInv  = fPtAttrs;
     199    uint64_t       fTlbe = (fInv >> (PGM_PTATTRS_PR_SHIFT - IEMTLBE_F_EFF_P_NO_WRITE_BIT))
     200                         & (  IEMTLBE_F_EFF_P_NO_READ
     201                            | IEMTLBE_F_EFF_P_NO_WRITE
     202                            | IEMTLBE_F_EFF_P_NO_EXEC
     203                            | IEMTLBE_F_EFF_P_NO_GCS
     204                            | IEMTLBE_F_EFF_U_NO_READ
     205                            | IEMTLBE_F_EFF_U_NO_WRITE
     206                            | IEMTLBE_F_EFF_U_NO_EXEC
     207                            | IEMTLBE_F_EFF_U_NO_GCS);
     208
     209    AssertCompile(IEMTLBE_F_EFF_NO_DIRTY_BIT > PGM_PTATTRS_ND_SHIFT);
     210    fTlbe |= (fPtAttrs << (IEMTLBE_F_EFF_NO_DIRTY_BIT - PGM_PTATTRS_ND_SHIFT)) & IEMTLBE_F_EFF_NO_DIRTY;
     211
     212    AssertCompile(PGM_PTATTRS_NS_SHIFT + 1 == PGM_PTATTRS_NSE_SHIFT);
     213    AssertCompile(IEMTLBE_F_S1_NS_BIT + 1  == IEMTLBE_F_S1_NSE_BIT);
     214    AssertCompile(IEMTLBE_F_S1_NS_BIT > PGM_PTATTRS_NS_SHIFT);
     215    fTlbe |= (fPtAttrs << (IEMTLBE_F_S1_NS_BIT - PGM_PTATTRS_NS_SHIFT)) & (IEMTLBE_F_S1_NS | IEMTLBE_F_S1_NSE);
     216
     217    if (fPtAttrs & (PGM_PTATTRS_DEVICE_MASK | PGM_PTATTRS_S2_DEVICE_MASK))
     218        fTlbe |= IEMTLBE_F_EFF_DEVICE;
     219
     220    return fTlbe;
     221}
    287222
    288223
     
    307242 *                      8, 12, 16, 32 or 512.  When used by string operations
    308243 *                      it can be up to a page.
    309  * @param   iSegReg     The index of the segment register to use for this
    310  *                      access.  The base and limits are checked. Use UINT8_MAX
    311  *                      to indicate that no segmentation is required (for IDT,
    312  *                      GDT and LDT accesses).
    313244 * @param   GCPtrMem    The address of the guest memory.
    314245 * @param   fAccess     How the memory is being accessed.  The
     
    325256 *                      Pass zero to skip alignment.
    326257 */
    327 VBOXSTRICTRC iemMemMap(PVMCPUCC pVCpu, void **ppvMem, uint8_t *pbUnmapInfo, size_t cbMem, uint8_t iSegReg, RTGCPTR GCPtrMem,
     258VBOXSTRICTRC iemMemMap(PVMCPUCC pVCpu, void **ppvMem, uint8_t *pbUnmapInfo, size_t cbMem, RTGCPTR GCPtrMem,
    328259                       uint32_t fAccess, uint32_t uAlignCtl) RT_NOEXCEPT
    329260{
     
    333264     * Check the input and figure out which mapping entry to use.
    334265     */
     266    Assert(cbMem <= 64);
    335267    Assert(cbMem <= sizeof(pVCpu->iem.s.aBounceBuffers[0]));
    336     Assert(   cbMem <= 64 || cbMem == 512 || cbMem == 256 || cbMem == 108 || cbMem == 104 || cbMem == 102 || cbMem == 94
    337            || (iSegReg == UINT8_MAX && uAlignCtl == 0 && fAccess == IEM_ACCESS_DATA_R /* for the CPUID logging interface */) );
    338268    Assert(!(fAccess & ~(IEM_ACCESS_TYPE_MASK | IEM_ACCESS_WHAT_MASK | IEM_ACCESS_ATOMIC | IEM_ACCESS_PARTIAL_WRITE)));
    339269    Assert(pVCpu->iem.s.cActiveMappings < RT_ELEMENTS(pVCpu->iem.s.aMemMappings));
     
    355285     * slightly complicated happens, fall back on bounce buffering.
    356286     */
    357     VBOXSTRICTRC rcStrict = iemMemApplySegment(pVCpu, fAccess, iSegReg, cbMem, &GCPtrMem);
    358     if (rcStrict == VINF_SUCCESS)
     287    if ((GCPtrMem & GUEST_MIN_PAGE_OFFSET_MASK) + cbMem <= GUEST_MIN_PAGE_SIZE) /* Crossing a possible page/tlb boundary? */
     288    { /* likely */ }
     289    else if (  (GCPtrMem & IEM_F_ARM_GET_TLB_PAGE_OFFSET_MASK(pVCpu->iem.s.fExec)) + cbMem
     290             > IEM_F_ARM_GET_TLB_PAGE_OFFSET_MASK(pVCpu->iem.s.fExec))
     291        return iemMemBounceBufferMapCrossPage(pVCpu, iMemMap, ppvMem, pbUnmapInfo, cbMem, GCPtrMem, fAccess);
     292
     293    /*
     294     * Alignment check.
     295     */
     296    if ( (GCPtrMem & (uAlignCtl & UINT16_MAX)) == 0 )
    359297    { /* likely */ }
    360298    else
    361         return rcStrict;
    362 
    363     if ((GCPtrMem & GUEST_PAGE_OFFSET_MASK) + cbMem <= GUEST_PAGE_SIZE) /* Crossing a page boundary? */
    364     { /* likely */ }
    365     else
    366         return iemMemBounceBufferMapCrossPage(pVCpu, iMemMap, ppvMem, pbUnmapInfo, cbMem, GCPtrMem, fAccess);
    367 
    368     /*
    369      * Alignment check.
    370      */
    371     if ( (GCPtrMem & (uAlignCtl & UINT16_MAX)) == 0 )
    372     { /* likelyish */ }
    373     else
    374     {
     299    {
     300#if 0 /** @todo ARM: Implement alignment checks as we implement instructions... */
    375301        /* Misaligned access. */
    376302        if ((fAccess & IEM_ACCESS_WHAT_MASK) != IEM_ACCESS_WHAT_SYS)
     
    397323                return iemRaiseGeneralProtectionFault0(pVCpu);
    398324        }
     325#endif
    399326
    400327#if (defined(RT_ARCH_AMD64) && defined(RT_OS_LINUX)) || defined(RT_ARCH_ARM64)
     
    429356     * should in theory always be set).
    430357     */
    431     uint8_t           *pbMem     = NULL;
    432     uint64_t const     uTagNoRev = IEMTLB_CALC_TAG_NO_REV(GCPtrMem);
    433     PIEMTLBENTRY       pTlbe     = IEMTLB_TAG_TO_EVEN_ENTRY(&pVCpu->iem.s.DataTlb, uTagNoRev);
    434     uint64_t const     fTlbeAD   = IEMTLBE_F_PT_NO_ACCESSED | (fAccess & IEM_ACCESS_TYPE_WRITE ? IEMTLBE_F_PT_NO_DIRTY : 0);
     358    uint8_t           *pbMem       = NULL;
     359    uint64_t const     uTagNoRev   = IEMTLB_CALC_TAG_NO_REV(pVCpu, GCPtrMem);
     360    PIEMTLBENTRY       pTlbe       = IEMTLB_TAG_TO_EVEN_ENTRY(&pVCpu->iem.s.DataTlb, uTagNoRev);
     361    bool const         fPrivileged = IEM_F_MODE_ARM_GET_EL(pVCpu->iem.s.fExec) > 0 || (fAccess & IEM_ACCESS_WHAT_SYS);
     362    uint64_t const     fTlbeAcc    = fPrivileged
     363                                   ?   (fAccess & IEM_ACCESS_TYPE_READ  ? IEMTLBE_F_EFF_P_NO_READ : 0)
     364                                     | (fAccess & IEM_ACCESS_TYPE_WRITE ? IEMTLBE_F_EFF_P_NO_WRITE | IEMTLBE_F_EFF_NO_DIRTY : 0)
     365                                   :   (fAccess & IEM_ACCESS_TYPE_READ  ? IEMTLBE_F_EFF_U_NO_READ : 0)
     366                                     | (fAccess & IEM_ACCESS_TYPE_WRITE ? IEMTLBE_F_EFF_U_NO_WRITE | IEMTLBE_F_EFF_NO_DIRTY : 0);
     367    /** @todo  IEMTLBE_F_EFF_U_NO_GCS / IEMTLBE_F_EFF_P_NO_GCS,
     368     *  IEMTLBE_F_S1_NS/NSE, IEMTLBE_F_S2_NO_LIM_WRITE/TL0/TL1. */
     369    /** @todo Make sure the TLB is flushed when changing the page size or
     370     *        somehow deal with that as well here? */
     371    /** @todo If the access incompatible, we currently trigger a PT walk,
     372     *        which isn't necessarily correct... */
    435373    if (   (   pTlbe->uTag               == (uTagNoRev | pVCpu->iem.s.DataTlb.uTlbRevision)
    436             && !(pTlbe->fFlagsAndPhysRev & fTlbeAD) )
     374            &&    (pTlbe->fFlagsAndPhysRev & (fTlbeAcc | IEMTLBE_F_S1_ASID | IEMTLBE_F_S2_VMID))
     375               == (pVCpu->iem.s.DataTlb.uTlbPhysRev   & (IEMTLBE_F_S1_ASID | IEMTLBE_F_S2_VMID)) )
    437376        || (   (pTlbe = pTlbe + 1)->uTag == (uTagNoRev | pVCpu->iem.s.DataTlb.uTlbRevisionGlobal)
    438             && !(pTlbe->fFlagsAndPhysRev & fTlbeAD) ) )
     377            &&    (pTlbe->fFlagsAndPhysRev & (fTlbeAcc | IEMTLBE_F_S2_VMID))
     378               == (pVCpu->iem.s.DataTlb.uTlbPhysRev   & (IEMTLBE_F_S2_VMID)) ) )
    439379    {
    440380# ifdef IEM_WITH_TLB_STATISTICS
     
    442382# endif
    443383
    444         /* If the page is either supervisor only or non-writable, we need to do
    445            more careful access checks. */
    446         if (pTlbe->fFlagsAndPhysRev & (IEMTLBE_F_PT_NO_USER | IEMTLBE_F_PT_NO_WRITE))
    447         {
    448             /* Write to read only memory? */
    449             if (   (pTlbe->fFlagsAndPhysRev & IEMTLBE_F_PT_NO_WRITE)
    450                 && (fAccess & IEM_ACCESS_TYPE_WRITE)
    451                 && (   (    IEM_GET_CPL(pVCpu) == 3
    452                         && !(fAccess & IEM_ACCESS_WHAT_SYS))
    453                     || (pVCpu->cpum.GstCtx.cr0 & X86_CR0_WP)))
    454             {
    455                 LogEx(LOG_GROUP_IEM, ("iemMemMap: GCPtrMem=%RGv - read-only page -> #PF\n", GCPtrMem));
    456                 return iemRaisePageFault(pVCpu, GCPtrMem, (uint32_t)cbMem, fAccess & ~IEM_ACCESS_TYPE_READ, VERR_ACCESS_DENIED);
    457             }
    458 
    459             /* Kernel memory accessed by userland? */
    460             if (   (pTlbe->fFlagsAndPhysRev & IEMTLBE_F_PT_NO_USER)
    461                 && IEM_GET_CPL(pVCpu) == 3
    462                 && !(fAccess & IEM_ACCESS_WHAT_SYS))
    463             {
    464                 LogEx(LOG_GROUP_IEM, ("iemMemMap: GCPtrMem=%RGv - user access to kernel page -> #PF\n", GCPtrMem));
    465                 return iemRaisePageFault(pVCpu, GCPtrMem, (uint32_t)cbMem, fAccess, VERR_ACCESS_DENIED);
    466             }
    467         }
    468 
    469384        /* Look up the physical page info if necessary. */
    470         if ((pTlbe->fFlagsAndPhysRev & IEMTLBE_F_PHYS_REV) == pVCpu->iem.s.DataTlb.uTlbPhysRev)
     385        if ((pTlbe->fFlagsAndPhysRev & IEMTLBE_F_PHYS_REV) == (pVCpu->iem.s.DataTlb.uTlbPhysRev & IEMTLBE_F_PHYS_REV))
    471386# ifdef IN_RING3
    472387            pbMem = pTlbe->pbMappingR3;
     
    498413        /** @todo testcase: check when A bits are actually set by the CPU for code.  */
    499414        PGMPTWALKFAST WalkFast;
    500         AssertCompile(IEM_ACCESS_TYPE_READ  == PGMQPAGE_F_READ);
    501         AssertCompile(IEM_ACCESS_TYPE_WRITE == PGMQPAGE_F_WRITE);
    502         AssertCompile(IEM_ACCESS_TYPE_EXEC  == PGMQPAGE_F_EXECUTE);
    503         AssertCompile(X86_CR0_WP            == PGMQPAGE_F_CR0_WP0);
    504         uint32_t fQPage = (fAccess & (PGMQPAGE_F_READ | IEM_ACCESS_TYPE_WRITE | PGMQPAGE_F_EXECUTE))
    505                         | (((uint32_t)pVCpu->cpum.GstCtx.cr0 & X86_CR0_WP) ^ X86_CR0_WP);
    506         if (IEM_GET_CPL(pVCpu) == 3 && !(fAccess & IEM_ACCESS_WHAT_SYS))
    507             fQPage |= PGMQPAGE_F_USER_MODE;
    508         int rc = PGMGstQueryPageFast(pVCpu, GCPtrMem, fQPage, &WalkFast);
     415        int rc = PGMGstQueryPageFast(pVCpu, GCPtrMem, iemMemArmAccessToQPage(pVCpu, fAccess), &WalkFast);
    509416        if (RT_SUCCESS(rc))
    510417            Assert((WalkFast.fInfo & PGM_WALKINFO_SUCCEEDED) && WalkFast.fFailed == PGM_WALKFAIL_SUCCESS);
     
    512419        {
    513420            LogEx(LOG_GROUP_IEM, ("iemMemMap: GCPtrMem=%RGv - failed to fetch page -> #PF\n", GCPtrMem));
    514 # ifdef VBOX_WITH_NESTED_HWVIRT_VMX_EPT
    515             if (WalkFast.fFailed & PGM_WALKFAIL_EPT)
    516                 IEM_VMX_VMEXIT_EPT_RET(pVCpu, &WalkFast, fAccess, IEM_SLAT_FAIL_LINEAR_TO_PHYS_ADDR, 0 /* cbInstr */);
    517 # endif
    518             return iemRaisePageFault(pVCpu, GCPtrMem, (uint32_t)cbMem, fAccess, rc);
     421            /** @todo stage 2 exceptions. */
     422            return iemRaiseDataAbortFromWalk(pVCpu, GCPtrMem, (uint32_t)cbMem, fAccess, rc, &WalkFast);
    519423        }
    520424
     
    523427            || RT_LIKELY(!(fDataBps = iemMemCheckDataBreakpoint(pVCpu->CTX_SUFF(pVM), pVCpu, GCPtrMem, cbMem, fAccess))))
    524428        {
    525             if (   !(WalkFast.fEffective & PGM_PTATTRS_G_MASK)
    526                 || IEM_GET_CPL(pVCpu) != 0) /* optimization: Only use the PTE.G=1 entries in ring-0. */
     429            /** @todo arm: check out global pages on arm */
     430            if (   (WalkFast.fEffective & PGM_PTATTRS_NG_MASK)
     431                || !fPrivileged) /* optimization: Only use global pages privileged accesses. */
    527432            {
    528433                pTlbe--;
    529                 pTlbe->uTag         = uTagNoRev | pVCpu->iem.s.DataTlb.uTlbRevision;
    530                 if (WalkFast.fInfo & PGM_WALKINFO_BIG_PAGE)
    531                     iemTlbLoadedLargePage<false>(pVCpu, &pVCpu->iem.s.DataTlb, uTagNoRev, RT_BOOL(pVCpu->cpum.GstCtx.cr4 & X86_CR4_PAE));
     434                Assert(!IEMTLBE_IS_GLOBAL(pTlbe));
     435                pTlbe->uTag = uTagNoRev | pVCpu->iem.s.DataTlb.uTlbRevision;
     436                /// @todo arm: large/giant page fun
     437                //if (WalkFast.fInfo & PGM_WALKINFO_BIG_PAGE)
     438                //    iemTlbLoadedLargePage<false>(pVCpu, &pVCpu->iem.s.DataTlb, uTagNoRev, RT_BOOL(pVCpu->cpum.GstCtx.cr4 & X86_CR4_PAE));
    532439# ifdef IEMTLB_WITH_LARGE_PAGE_BITMAP
    533                 else
     440                //else
    534441                    ASMBitClear(pVCpu->iem.s.DataTlb.bmLargePage, IEMTLB_TAG_TO_EVEN_INDEX(uTagNoRev));
    535442# endif
     
    537444            else
    538445            {
     446                Assert(IEMTLBE_IS_GLOBAL(pTlbe));
    539447                pVCpu->iem.s.DataTlb.cTlbCoreGlobalLoads++;
    540                 pTlbe->uTag         = uTagNoRev | pVCpu->iem.s.DataTlb.uTlbRevisionGlobal;
    541                 if (WalkFast.fInfo & PGM_WALKINFO_BIG_PAGE)
    542                     iemTlbLoadedLargePage<true>(pVCpu, &pVCpu->iem.s.DataTlb, uTagNoRev, RT_BOOL(pVCpu->cpum.GstCtx.cr4 & X86_CR4_PAE));
     448                pTlbe->uTag = uTagNoRev | pVCpu->iem.s.DataTlb.uTlbRevisionGlobal;
     449                /// @todo arm: large/giant page fun
     450                //if (WalkFast.fInfo & PGM_WALKINFO_BIG_PAGE)
     451                //    iemTlbLoadedLargePage<true>(pVCpu, &pVCpu->iem.s.DataTlb, uTagNoRev, RT_BOOL(pVCpu->cpum.GstCtx.cr4 & X86_CR4_PAE));
    543452# ifdef IEMTLB_WITH_LARGE_PAGE_BITMAP
    544                 else
     453                //else
    545454                    ASMBitClear(pVCpu->iem.s.DataTlb.bmLargePage, IEMTLB_TAG_TO_EVEN_INDEX(uTagNoRev) + 1);
    546455# endif
    547456            }
    548457        }
    549         else
    550         {
    551             /* If we hit a data breakpoint, we use a dummy TLBE to force all accesses
    552                to the page with the data access breakpoint armed on it to pass thru here. */
    553             if (fDataBps > 1)
    554                 LogEx(LOG_GROUP_IEM, ("iemMemMap: Data breakpoint: fDataBps=%#x for %RGv LB %zx; fAccess=%#x cs:rip=%04x:%08RX64\n",
    555                                       fDataBps, GCPtrMem, cbMem, fAccess, pVCpu->cpum.GstCtx.cs.Sel, pVCpu->cpum.GstCtx.rip));
    556             pVCpu->cpum.GstCtx.eflags.uBoth |= fDataBps & (CPUMCTX_DBG_HIT_DRX_MASK | CPUMCTX_DBG_DBGF_MASK);
     458        else if (fDataBps == 1)
     459        {
     460            /* There is one or more data breakpionts in the current page, so we use a dummy
     461               TLBE to force all accesses to the page with the data access breakpoint armed
     462               on it to pass thru here. */
    557463            pTlbe = &pVCpu->iem.s.DataBreakpointTlbe;
    558464            pTlbe->uTag = uTagNoRev;
    559465        }
    560         pTlbe->fFlagsAndPhysRev = (~WalkFast.fEffective & (X86_PTE_US | X86_PTE_RW | X86_PTE_D | X86_PTE_A) /* skipping NX */)
    561                                 | (WalkFast.fInfo & PGM_WALKINFO_BIG_PAGE);
    562         RTGCPHYS const GCPhysPg = WalkFast.GCPhys & ~(RTGCPHYS)GUEST_PAGE_OFFSET_MASK;
     466        else
     467        {
     468            LogEx(LOG_GROUP_IEM, ("iemMemMap: Data breakpoint: fDataBps=%#x for %RGv LB %zx; fAccess=%#x PC=%016RX64\n",
     469                                  fDataBps, GCPtrMem, cbMem, fAccess, pVCpu->cpum.GstCtx.Pc.u64));
     470            return iemRaiseDebugDataAccessOrInvokeDbgf(pVCpu, fDataBps, GCPtrMem, cbMem, fAccess);
     471        }
     472        pTlbe->fFlagsAndPhysRev = iemMemArmPtAttrsToTlbeFlags(WalkFast.fEffective)
     473                                | (  pVCpu->iem.s.DataTlb.uTlbPhysRev
     474                                   & (!IEMTLBE_IS_GLOBAL(pTlbe) ? IEMTLBE_F_S2_VMID | IEMTLBE_F_S1_ASID : IEMTLBE_F_S2_VMID) );
     475
     476        /** @todo PGM_WALKINFO_BIG_PAGE++   */
     477        RTGCPHYS const GCPhysPg = WalkFast.GCPhys & ~(RTGCPHYS)IEM_F_ARM_GET_TLB_PAGE_OFFSET_MASK(pVCpu->iem.s.fExec);
    563478        pTlbe->GCPhys           = GCPhysPg;
    564479        pTlbe->pbMappingR3      = NULL;
    565         Assert(!(pTlbe->fFlagsAndPhysRev & IEMTLBE_F_PT_NO_ACCESSED));
    566         Assert(!(pTlbe->fFlagsAndPhysRev & IEMTLBE_F_PT_NO_DIRTY) || !(fAccess & IEM_ACCESS_TYPE_WRITE));
    567         Assert(   !(pTlbe->fFlagsAndPhysRev & IEMTLBE_F_PT_NO_WRITE)
    568                || !(fAccess & IEM_ACCESS_TYPE_WRITE)
    569                || (fQPage & (PGMQPAGE_F_CR0_WP0 | PGMQPAGE_F_USER_MODE)) == PGMQPAGE_F_CR0_WP0);
    570         Assert(   !(pTlbe->fFlagsAndPhysRev & IEMTLBE_F_PT_NO_USER)
    571                || IEM_GET_CPL(pVCpu) != 3
    572                || (fAccess & IEM_ACCESS_WHAT_SYS));
     480        Assert(!(pTlbe->fFlagsAndPhysRev & IEMTLBE_F_EFF_NO_DIRTY) || !(fAccess & IEM_ACCESS_TYPE_WRITE));
    573481
    574482        if (pTlbe != &pVCpu->iem.s.DataBreakpointTlbe)
    575483        {
    576             if (!((uintptr_t)pTlbe & (sizeof(*pTlbe) * 2 - 1)))
     484            if (!IEMTLBE_IS_GLOBAL(pTlbe))
    577485                IEMTLBTRACE_LOAD(       pVCpu, GCPtrMem, pTlbe->GCPhys, (uint32_t)pTlbe->fFlagsAndPhysRev, true);
    578486            else
     
    599507    else
    600508        return iemMemBounceBufferMapPhys(pVCpu, iMemMap, ppvMem, pbUnmapInfo, cbMem,
    601                                          pTlbe->GCPhys | (GCPtrMem & GUEST_PAGE_OFFSET_MASK), fAccess,
     509                                         pTlbe->GCPhys | (GCPtrMem & IEM_F_ARM_GET_TLB_PAGE_OFFSET_MASK(pVCpu->iem.s.fExec)),
     510                                         fAccess,
    602511                                           pTlbe->fFlagsAndPhysRev & IEMTLBE_F_PG_UNASSIGNED ? VERR_PGM_PHYS_TLB_UNASSIGNED
    603512                                         : pTlbe->fFlagsAndPhysRev & IEMTLBE_F_PG_NO_READ    ? VERR_PGM_PHYS_TLB_CATCH_ALL
     
    607516    if (pbMem)
    608517    {
    609         Assert(!((uintptr_t)pbMem & GUEST_PAGE_OFFSET_MASK));
    610         pbMem    = pbMem + (GCPtrMem & GUEST_PAGE_OFFSET_MASK);
     518        Assert(!((uintptr_t)pbMem & IEM_F_ARM_GET_TLB_PAGE_OFFSET_MASK(pVCpu->iem.s.fExec)));
     519        pbMem    = pbMem + (GCPtrMem & IEM_F_ARM_GET_TLB_PAGE_OFFSET_MASK(pVCpu->iem.s.fExec));
    611520        fAccess |= IEM_ACCESS_NOT_LOCKED;
    612521    }
     
    614523    {
    615524        Assert(!(fAccess & IEM_ACCESS_NOT_LOCKED));
    616         RTGCPHYS const GCPhysFirst = pTlbe->GCPhys | (GCPtrMem & GUEST_PAGE_OFFSET_MASK);
    617         rcStrict = iemMemPageMap(pVCpu, GCPhysFirst, fAccess, (void **)&pbMem, &pVCpu->iem.s.aMemMappingLocks[iMemMap].Lock);
     525        RTGCPHYS const GCPhysFirst = pTlbe->GCPhys | (GCPtrMem & IEM_F_ARM_GET_TLB_PAGE_OFFSET_MASK(pVCpu->iem.s.fExec));
     526        VBOXSTRICTRC rcStrict = iemMemPageMap(pVCpu, GCPhysFirst, fAccess, (void **)&pbMem, &pVCpu->iem.s.aMemMappingLocks[iMemMap].Lock);
    618527        if (rcStrict != VINF_SUCCESS)
    619528            return iemMemBounceBufferMapPhys(pVCpu, iMemMap, ppvMem, pbUnmapInfo, cbMem, GCPhysFirst, fAccess, rcStrict);
     
    623532
    624533    if (fAccess & IEM_ACCESS_TYPE_WRITE)
    625         Log6(("IEM WR %RGv (%RGp) LB %#zx\n", GCPtrMem, pTlbe->GCPhys | (GCPtrMem & GUEST_PAGE_OFFSET_MASK), cbMem));
     534        Log6(("IEM WR %RGv (%RGp) LB %#zx\n", GCPtrMem, pTlbe->GCPhys | (GCPtrMem & IEM_F_ARM_GET_TLB_PAGE_OFFSET_MASK(pVCpu->iem.s.fExec)), cbMem));
    626535    if (fAccess & IEM_ACCESS_TYPE_READ)
    627         Log2(("IEM RD %RGv (%RGp) LB %#zx\n", GCPtrMem, pTlbe->GCPhys | (GCPtrMem & GUEST_PAGE_OFFSET_MASK), cbMem));
     536        Log2(("IEM RD %RGv (%RGp) LB %#zx\n", GCPtrMem, pTlbe->GCPhys | (GCPtrMem & IEM_F_ARM_GET_TLB_PAGE_OFFSET_MASK(pVCpu->iem.s.fExec)), cbMem));
    628537
    629538#else  /* !IEM_WITH_DATA_TLB */
    630539
    631540    RTGCPHYS GCPhysFirst;
    632     rcStrict = iemMemPageTranslateAndCheckAccess(pVCpu, GCPtrMem, (uint32_t)cbMem, fAccess, &GCPhysFirst);
     541    VBOXSTRICTRC rcStrict = iemMemPageTranslateAndCheckAccess(pVCpu, GCPtrMem, (uint32_t)cbMem, fAccess, &GCPhysFirst);
    633542    if (rcStrict != VINF_SUCCESS)
    634543        return rcStrict;
     
    708617 */
    709618template<bool a_fSafeCall = false>
    710 static void *iemMemMapJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, size_t cbMem, uint8_t iSegReg, RTGCPTR GCPtrMem,
     619static void *iemMemMapJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, size_t cbMem, RTGCPTR GCPtrMem,
    711620                          uint32_t fAccess, uint32_t uAlignCtl) IEM_NOEXCEPT_MAY_LONGJMP
    712621{
    713622    STAM_COUNTER_INC(&pVCpu->iem.s.StatMemMapJmp);
     623#if 1 /** @todo redo this according when iemMemMap() has been fully debugged. */
     624    void        *pvMem    = NULL;
     625    VBOXSTRICTRC rcStrict = iemMemMap(pVCpu, &pvMem, pbUnmapInfo, cbMem, GCPtrMem, fAccess, uAlignCtl);
     626    if (rcStrict == VINF_SUCCESS)
     627    { /* likely */ }
     628    else
     629        IEM_DO_LONGJMP(pVCpu, VBOXSTRICTRC_VAL(rcStrict));
     630    return pvMem;
     631
     632#else /* later */
    714633
    715634    /*
     
    823742                                        : 0;
    824743    uint64_t const     fNoRead          = fAccess & IEM_ACCESS_TYPE_READ ? IEMTLBE_F_PG_NO_READ : 0;
    825     uint64_t const     uTagNoRev        = IEMTLB_CALC_TAG_NO_REV(GCPtrMem);
     744    uint64_t const     uTagNoRev        = IEMTLB_CALC_TAG_NO_REV(pVCpu, GCPtrMem);
    826745    PIEMTLBENTRY       pTlbe            = IEMTLB_TAG_TO_EVEN_ENTRY(&pVCpu->iem.s.DataTlb, uTagNoRev);
    827746    uint64_t const     fTlbeAD          = IEMTLBE_F_PT_NO_ACCESSED | (fNoWriteNoDirty & IEMTLBE_F_PT_NO_DIRTY);
     
    925844        if (pTlbe != &pVCpu->iem.s.DataBreakpointTlbe)
    926845        {
    927             if (!((uintptr_t)pTlbe & (sizeof(*pTlbe) * 2 - 1)))
     846            if (!IEMTLBE_IS_GLOBAL(pTlbe))
    928847                IEMTLBTRACE_LOAD(       pVCpu, GCPtrMem, pTlbe->GCPhys, (uint32_t)pTlbe->fFlagsAndPhysRev, true);
    929848            else
     
    10911010    *pbUnmapInfo = iMemMap | 0x08 | ((fAccess & IEM_ACCESS_TYPE_MASK) << 4);
    10921011    return pvMem;
     1012#endif /* later */
    10931013}
    10941014
    10951015
    10961016/** @see iemMemMapJmp */
    1097 static void *iemMemMapSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, size_t cbMem, uint8_t iSegReg, RTGCPTR GCPtrMem,
     1017static void *iemMemMapSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, size_t cbMem, RTGCPTR GCPtrMem,
    10981018                              uint32_t fAccess, uint32_t uAlignCtl) IEM_NOEXCEPT_MAY_LONGJMP
    10991019{
    1100     return iemMemMapJmp<true /*a_fSafeCall*/>(pVCpu, pbUnmapInfo, cbMem, iSegReg, GCPtrMem, fAccess, uAlignCtl);
    1101 }
    1102 
    1103 
     1020    return iemMemMapJmp<true /*a_fSafeCall*/>(pVCpu, pbUnmapInfo, cbMem, GCPtrMem, fAccess, uAlignCtl);
     1021}
     1022
     1023
     1024#if 0 /** @todo ARM: more memory stuff... */
    11041025
    11051026/*
     
    18721793/** @} */
    18731794
     1795#endif /* work in progress */
     1796
  • trunk/src/VBox/VMM/VMMAll/target-armv8/IEMAllTlbInline-armv8.h

    r108590 r108791  
    5555# endif
    5656
    57     AssertCompile(IEMTLB_CALC_TAG_NO_REV((RTGCPTR)0x8731U << GUEST_PAGE_SHIFT) == 0x8731U);
     57    AssertCompile(IEMTLB_CALC_TAG_NO_REV(pVCpu, (RTGCPTR)0x8731U << GUEST_PAGE_SHIFT) == 0x8731U);
    5858    uint32_t const                 fMask = (f2MbLargePages ? _2M - 1U : _4M - 1U) >> GUEST_PAGE_SHIFT;
    5959    IEMTLB::LARGEPAGERANGE * const pRange = a_fGlobal
     
    165165     * We make ASSUMPTIONS about IEMTLB_CALC_TAG_NO_REV here.
    166166     */
    167     AssertCompile(IEMTLB_CALC_TAG_NO_REV((RTGCPTR)0x8731U << GUEST_PAGE_SHIFT) == 0x8731U);
     167    AssertCompile(IEMTLB_CALC_TAG_NO_REV(p V C p u - not true, (RTGCPTR)0x8731U << GUEST_PAGE_SHIFT) == 0x8731U);
    168168    if (   !a_fDataTlb
    169169        && GCPtrInstrBufPcTag - GCPtrTag < (a_f2MbLargePage ? 512U : 1024U))
     
    392392                                                 RTGCPTR GCPtrInstrBufPcTag) RT_NOEXCEPT
    393393{
    394     AssertCompile(IEMTLB_CALC_TAG_NO_REV((RTGCPTR)0x8731U << GUEST_PAGE_SHIFT) == 0x8731U);
     394    AssertCompile(IEMTLB_CALC_TAG_NO_REV(p V C p u - not true, (RTGCPTR)0x8731U << GUEST_PAGE_SHIFT) == 0x8731U);
    395395
    396396    GCPtrTag &= ~(RTGCPTR)(RT_BIT_64((a_f2MbLargePage ? 21 : 22) - GUEST_PAGE_SHIFT) - 1U);
     
    428428        IEMTLBTRACE_EVICT_SLOT(pVCpu, GCPtrTag, pTlb->aEntries[idxEven].GCPhys, idxEven, a_fDataTlb);
    429429        pTlb->aEntries[idxEven].uTag = 0;
    430         if (!a_fDataTlb && GCPtrTag == IEMTLB_CALC_TAG_NO_REV(pVCpu->iem.s.uInstrBufPc))
     430        if (!a_fDataTlb && GCPtrTag == IEMTLB_CALC_TAG_NO_REV(pVCpu, pVCpu->iem.s.uInstrBufPc))
    431431            pVCpu->iem.s.cbInstrBufTotal = 0;
    432432    }
     
    435435        IEMTLBTRACE_EVICT_SLOT(pVCpu, GCPtrTag, pTlb->aEntries[idxEven + 1].GCPhys, idxEven + 1, a_fDataTlb);
    436436        pTlb->aEntries[idxEven + 1].uTag = 0;
    437         if (!a_fDataTlb && GCPtrTag == IEMTLB_CALC_TAG_NO_REV(pVCpu->iem.s.uInstrBufPc))
     437        if (!a_fDataTlb && GCPtrTag == IEMTLB_CALC_TAG_NO_REV(pVCpu, pVCpu->iem.s.uInstrBufPc))
    438438            pVCpu->iem.s.cbInstrBufTotal = 0;
    439439    }
     
    451451# endif
    452452    {
    453         RTGCPTR const GCPtrInstrBufPcTag = a_fDataTlb ? 0 : IEMTLB_CALC_TAG_NO_REV(pVCpu->iem.s.uInstrBufPc);
     453        RTGCPTR const GCPtrInstrBufPcTag = a_fDataTlb ? 0 : IEMTLB_CALC_TAG_NO_REV(pVCpu, pVCpu->iem.s.uInstrBufPc);
    454454        if (pVCpu->cpum.GstCtx.cr4 & X86_CR4_PAE)
    455455            iemTlbInvalidateLargePageWorker<a_fDataTlb, true>(pVCpu, pTlb, GCPtrTag, GCPtrInstrBufPcTag);
  • trunk/src/VBox/VMM/VMMAll/target-armv8/IEMInline-armv8.h

    r108702 r108791  
    4646DECL_FORCE_INLINE(uint32_t) iemCalcExecAcFlag(PVMCPUCC pVCpu, uint32_t fExecMode) RT_NOEXCEPT
    4747{
    48     IEM_CTX_ASSERT(pVCpu, CPUMCTX_EXTRN_SCTLR_TCR_TTBR | CPUMCTX_EXTRN_PSTATE);
     48    IEM_CTX_ASSERT(pVCpu, CPUMCTX_EXTRN_SCTLR_TCR_TTBR | CPUMCTX_EXTRN_SYSREG_EL2 | CPUMCTX_EXTRN_PSTATE);
    4949    uint64_t const fSctlr =    IEM_F_MODE_ARM_GET_EL(fExecMode) == 1
    5050                            || (   IEM_F_MODE_ARM_GET_EL(fExecMode) == 0
     
    6363
    6464/**
     65 * Calculates the stage 1 page size.
     66 *
     67 * @returns IEM_F_ARM_S1_PAGE_MASK
     68 * @param   pVCpu               The cross context virtual CPU structure of the
     69 *                              calling thread.
     70 * @param   fExecMode           The mode part of fExec.
     71 */
     72DECL_FORCE_INLINE(uint32_t) iemCalcExecStage1PageSize(PVMCPUCC pVCpu, uint32_t fExecMode) RT_NOEXCEPT
     73{
     74    IEM_CTX_ASSERT(pVCpu, CPUMCTX_EXTRN_SCTLR_TCR_TTBR | CPUMCTX_EXTRN_PSTATE);
     75    uint64_t const fSCtlR = IEM_F_MODE_ARM_GET_EL(fExecMode) <= 1 ? pVCpu->cpum.GstCtx.Sctlr.u64
     76#if 1 /** @todo armv8: EL3 & check up EL2 logic. */
     77                          :                                         pVCpu->cpum.GstCtx.SctlrEl2.u64;
     78#else
     79                          : IEM_F_MODE_ARM_GET_EL(fExecMode) == 2 ? pVCpu->cpum.GstCtx.SctlrEl2.u64
     80                          :                                         pVCpu->cpum.GstCtx.SctlrEl3.u64;
     81#endif
     82    if (fSCtlR & ARMV8_SCTLR_EL1_M)
     83    {
     84        uint64_t const fTcr = IEM_F_MODE_ARM_GET_EL(fExecMode) <= 1 ? pVCpu->cpum.GstCtx.Tcr.u64
     85#if 1 /** @todo armv8: EL3 & check up EL2 logic. */
     86                            :                                         pVCpu->cpum.GstCtx.TcrEl2.u64;
     87#else
     88                        : IEM_F_MODE_ARM_GET_EL(fExecMode) == 2 ? pVCpu->cpum.GstCtx.TcrEl2.u64
     89                        :                                         pVCpu->cpum.GstCtx.TcrEl3.u64;
     90#endif
     91        switch (fTcr & ARMV8_TCR_EL1_AARCH64_TG0_MASK)
     92        {
     93            case ARMV8_TCR_EL1_AARCH64_TG0_4KB  << ARMV8_TCR_EL1_AARCH64_TG0_SHIFT:
     94                return IEM_F_ARM_S1_PAGE_4K;
     95            case ARMV8_TCR_EL1_AARCH64_TG0_16KB << ARMV8_TCR_EL1_AARCH64_TG0_SHIFT:
     96                return IEM_F_ARM_S1_PAGE_16K;
     97            case ARMV8_TCR_EL1_AARCH64_TG0_64KB << ARMV8_TCR_EL1_AARCH64_TG0_SHIFT:
     98                return IEM_F_ARM_S1_PAGE_64K;
     99            default:
     100                AssertFailed();
     101                return IEM_F_ARM_S1_PAGE_4K;
     102        }
     103    }
     104    /* MMU is not enabled, use 4KB TLB entries for now. */
     105    /** @todo check out 64KB for non-MMU mode. */
     106    return IEM_F_ARM_S1_PAGE_4K; /** @todo Do we need a NO_MMU flag? */
     107}
     108
     109
     110/**
    65111 * Calculates the IEM_F_MODE_XXX, IEM_F_ARM_A, IEM_F_ARM_AA and IEM_F_ARM_SP_IDX
    66112 *
     
    69115 *                              calling thread.
    70116 */
    71 DECL_FORCE_INLINE(uint32_t) iemCalcExecModeAndSpIdxAndAcFlags(PVMCPUCC pVCpu) RT_NOEXCEPT
     117DECL_FORCE_INLINE(uint32_t) iemCalcExecModeAndSpIdxAndAcFlagsAndS1PgSize(PVMCPUCC pVCpu) RT_NOEXCEPT
    72118{
    73119    IEM_CTX_ASSERT(pVCpu, CPUMCTX_EXTRN_PSTATE);
     
    122168    /* Alignment checks: */
    123169    fExec |= iemCalcExecAcFlag(pVCpu, fExec);
     170
     171    /* Page size. */
     172    fExec |= iemCalcExecStage1PageSize(pVCpu, fExec);
    124173    return fExec;
    125174}
     
    153202DECL_FORCE_INLINE(uint32_t) iemCalcExecFlags(PVMCPUCC pVCpu) RT_NOEXCEPT
    154203{
    155     return iemCalcExecModeAndSpIdxAndAcFlags(pVCpu)
     204    return iemCalcExecModeAndSpIdxAndAcFlagsAndS1PgSize(pVCpu)
    156205         | iemCalcExecDbgFlags(pVCpu)
    157206         ;
     
    169218{
    170219    pVCpu->iem.s.fExec = (pVCpu->iem.s.fExec & ~(IEM_F_MODE_MASK | IEM_F_ARM_A | IEM_F_ARM_AA))
    171                        | iemCalcExecModeAndSpIdxAndAcFlags(pVCpu);
     220                       | iemCalcExecModeAndSpIdxAndAcFlagsAndS1PgSize(pVCpu);
    172221}
    173222
  • trunk/src/VBox/VMM/VMMAll/target-armv8/IEMInternal-armv8.h

    r108734 r108791  
    11/* $Id$ */
    22/** @file
    3  * IEM - Internal header file, x86 target specifics.
     3 * IEM - Internal header file, ARMv8 target specifics.
    44 */
    55
    66/*
    7  * Copyright (C) 2011-2024 Oracle and/or its affiliates.
     7 * Copyright (C) 2011-2025 Oracle and/or its affiliates.
    88 *
    99 * This file is part of VirtualBox base platform packages, as
     
    2626 */
    2727
    28 #ifndef VMM_INCLUDED_SRC_VMMAll_target_x86_IEMInternal_x86_h
    29 #define VMM_INCLUDED_SRC_VMMAll_target_x86_IEMInternal_x86_h
     28#ifndef VMM_INCLUDED_SRC_VMMAll_target_armv8_IEMInternal_armv8_h
     29#define VMM_INCLUDED_SRC_VMMAll_target_armv8_IEMInternal_armv8_h
    3030#ifndef RT_WITHOUT_PRAGMA_ONCE
    3131# pragma once
     
    3636
    3737
    38 /** @defgroup grp_iem_int_x86   X86 Target Internals
     38/** @defgroup grp_iem_int_arm   ARM Target Internals
    3939 * @ingroup grp_iem_int
    4040 * @internal
     
    4545
    4646
    47 /** @name Prefix constants (IEMCPU::fPrefixes)
    48  * @note x86 specific
    49  * @{ */
    50 #define IEM_OP_PRF_SEG_CS               RT_BIT_32(0)  /**< CS segment prefix (0x2e). */
    51 #define IEM_OP_PRF_SEG_SS               RT_BIT_32(1)  /**< SS segment prefix (0x36). */
    52 #define IEM_OP_PRF_SEG_DS               RT_BIT_32(2)  /**< DS segment prefix (0x3e). */
    53 #define IEM_OP_PRF_SEG_ES               RT_BIT_32(3)  /**< ES segment prefix (0x26). */
    54 #define IEM_OP_PRF_SEG_FS               RT_BIT_32(4)  /**< FS segment prefix (0x64). */
    55 #define IEM_OP_PRF_SEG_GS               RT_BIT_32(5)  /**< GS segment prefix (0x65). */
    56 #define IEM_OP_PRF_SEG_MASK             UINT32_C(0x3f)
    57 
    58 #define IEM_OP_PRF_SIZE_OP              RT_BIT_32(8)  /**< Operand size prefix (0x66). */
    59 #define IEM_OP_PRF_SIZE_REX_W           RT_BIT_32(9)  /**< REX.W prefix (0x48-0x4f). */
    60 #define IEM_OP_PRF_SIZE_ADDR            RT_BIT_32(10) /**< Address size prefix (0x67). */
    61 
    62 #define IEM_OP_PRF_LOCK                 RT_BIT_32(16) /**< Lock prefix (0xf0). */
    63 #define IEM_OP_PRF_REPNZ                RT_BIT_32(17) /**< Repeat-not-zero prefix (0xf2). */
    64 #define IEM_OP_PRF_REPZ                 RT_BIT_32(18) /**< Repeat-if-zero prefix (0xf3). */
    65 
    66 #define IEM_OP_PRF_REX                  RT_BIT_32(24) /**< Any REX prefix (0x40-0x4f). */
    67 #define IEM_OP_PRF_REX_B                RT_BIT_32(25) /**< REX.B prefix (0x41,0x43,0x45,0x47,0x49,0x4b,0x4d,0x4f). */
    68 #define IEM_OP_PRF_REX_X                RT_BIT_32(26) /**< REX.X prefix (0x42,0x43,0x46,0x47,0x4a,0x4b,0x4e,0x4f). */
    69 #define IEM_OP_PRF_REX_R                RT_BIT_32(27) /**< REX.R prefix (0x44,0x45,0x46,0x47,0x4c,0x4d,0x4e,0x4f). */
    70 /** Mask with all the REX prefix flags.
    71  * This is generally for use when needing to undo the REX prefixes when they
    72  * are followed legacy prefixes and therefore does not immediately preceed
    73  * the first opcode byte.
    74  * For testing whether any REX prefix is present, use  IEM_OP_PRF_REX instead. */
    75 #define IEM_OP_PRF_REX_MASK  (IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W )
    76 
    77 #define IEM_OP_PRF_VEX                  RT_BIT_32(28) /**< Indiciates VEX prefix. */
    78 #define IEM_OP_PRF_EVEX                 RT_BIT_32(29) /**< Indiciates EVEX prefix. */
    79 #define IEM_OP_PRF_XOP                  RT_BIT_32(30) /**< Indiciates XOP prefix. */
    80 /** @} */
    81 
    82 /** @name IEMOPFORM_XXX - Opcode forms
    83  * @note These are ORed together with IEMOPHINT_XXX.
    84  * @note x86 specific
    85  * @{ */
    86 /** ModR/M: reg, r/m */
    87 #define IEMOPFORM_RM            0
    88 /** ModR/M: reg, r/m (register) */
    89 #define IEMOPFORM_RM_REG        (IEMOPFORM_RM | IEMOPFORM_MOD3)
    90 /** ModR/M: reg, r/m (memory)   */
    91 #define IEMOPFORM_RM_MEM        (IEMOPFORM_RM | IEMOPFORM_NOT_MOD3)
    92 /** ModR/M: reg, r/m, imm */
    93 #define IEMOPFORM_RMI           1
    94 /** ModR/M: reg, r/m (register), imm */
    95 #define IEMOPFORM_RMI_REG       (IEMOPFORM_RMI | IEMOPFORM_MOD3)
    96 /** ModR/M: reg, r/m (memory), imm   */
    97 #define IEMOPFORM_RMI_MEM       (IEMOPFORM_RMI | IEMOPFORM_NOT_MOD3)
    98 /** ModR/M: reg, r/m, xmm0 */
    99 #define IEMOPFORM_RM0           2
    100 /** ModR/M: reg, r/m (register), xmm0 */
    101 #define IEMOPFORM_RM0_REG       (IEMOPFORM_RM0 | IEMOPFORM_MOD3)
    102 /** ModR/M: reg, r/m (memory), xmm0   */
    103 #define IEMOPFORM_RM0_MEM       (IEMOPFORM_RM0 | IEMOPFORM_NOT_MOD3)
    104 /** ModR/M: r/m, reg */
    105 #define IEMOPFORM_MR            3
    106 /** ModR/M: r/m (register), reg */
    107 #define IEMOPFORM_MR_REG        (IEMOPFORM_MR | IEMOPFORM_MOD3)
    108 /** ModR/M: r/m (memory), reg */
    109 #define IEMOPFORM_MR_MEM        (IEMOPFORM_MR | IEMOPFORM_NOT_MOD3)
    110 /** ModR/M: r/m, reg, imm */
    111 #define IEMOPFORM_MRI           4
    112 /** ModR/M: r/m (register), reg, imm */
    113 #define IEMOPFORM_MRI_REG       (IEMOPFORM_MRI | IEMOPFORM_MOD3)
    114 /** ModR/M: r/m (memory), reg, imm */
    115 #define IEMOPFORM_MRI_MEM       (IEMOPFORM_MRI | IEMOPFORM_NOT_MOD3)
    116 /** ModR/M: r/m only */
    117 #define IEMOPFORM_M             5
    118 /** ModR/M: r/m only (register). */
    119 #define IEMOPFORM_M_REG         (IEMOPFORM_M | IEMOPFORM_MOD3)
    120 /** ModR/M: r/m only (memory). */
    121 #define IEMOPFORM_M_MEM         (IEMOPFORM_M | IEMOPFORM_NOT_MOD3)
    122 /** ModR/M: r/m, imm */
    123 #define IEMOPFORM_MI            6
    124 /** ModR/M: r/m (register), imm */
    125 #define IEMOPFORM_MI_REG        (IEMOPFORM_MI | IEMOPFORM_MOD3)
    126 /** ModR/M: r/m (memory), imm */
    127 #define IEMOPFORM_MI_MEM        (IEMOPFORM_MI | IEMOPFORM_NOT_MOD3)
    128 /** ModR/M: r/m, 1  (shift and rotate instructions) */
    129 #define IEMOPFORM_M1            7
    130 /** ModR/M: r/m (register), 1. */
    131 #define IEMOPFORM_M1_REG        (IEMOPFORM_M1 | IEMOPFORM_MOD3)
    132 /** ModR/M: r/m (memory), 1. */
    133 #define IEMOPFORM_M1_MEM        (IEMOPFORM_M1 | IEMOPFORM_NOT_MOD3)
    134 /** ModR/M: r/m, CL  (shift and rotate instructions)
    135  * @todo This should just've been a generic fixed register. But the python
    136  *       code doesn't needs more convincing. */
    137 #define IEMOPFORM_M_CL          8
    138 /** ModR/M: r/m (register), CL. */
    139 #define IEMOPFORM_M_CL_REG      (IEMOPFORM_M_CL | IEMOPFORM_MOD3)
    140 /** ModR/M: r/m (memory), CL. */
    141 #define IEMOPFORM_M_CL_MEM      (IEMOPFORM_M_CL | IEMOPFORM_NOT_MOD3)
    142 /** ModR/M: reg only */
    143 #define IEMOPFORM_R             9
    144 
    145 /** VEX+ModR/M: reg, r/m */
    146 #define IEMOPFORM_VEX_RM        16
    147 /** VEX+ModR/M: reg, r/m (register) */
    148 #define IEMOPFORM_VEX_RM_REG    (IEMOPFORM_VEX_RM | IEMOPFORM_MOD3)
    149 /** VEX+ModR/M: reg, r/m (memory)   */
    150 #define IEMOPFORM_VEX_RM_MEM    (IEMOPFORM_VEX_RM | IEMOPFORM_NOT_MOD3)
    151 /** VEX+ModR/M: r/m, reg */
    152 #define IEMOPFORM_VEX_MR        17
    153 /** VEX+ModR/M: r/m (register), reg */
    154 #define IEMOPFORM_VEX_MR_REG    (IEMOPFORM_VEX_MR | IEMOPFORM_MOD3)
    155 /** VEX+ModR/M: r/m (memory), reg */
    156 #define IEMOPFORM_VEX_MR_MEM    (IEMOPFORM_VEX_MR | IEMOPFORM_NOT_MOD3)
    157 /** VEX+ModR/M: r/m, reg, imm8 */
    158 #define IEMOPFORM_VEX_MRI       18
    159 /** VEX+ModR/M: r/m (register), reg, imm8 */
    160 #define IEMOPFORM_VEX_MRI_REG   (IEMOPFORM_VEX_MRI | IEMOPFORM_MOD3)
    161 /** VEX+ModR/M: r/m (memory), reg, imm8 */
    162 #define IEMOPFORM_VEX_MRI_MEM   (IEMOPFORM_VEX_MRI | IEMOPFORM_NOT_MOD3)
    163 /** VEX+ModR/M: r/m only */
    164 #define IEMOPFORM_VEX_M         19
    165 /** VEX+ModR/M: r/m only (register). */
    166 #define IEMOPFORM_VEX_M_REG     (IEMOPFORM_VEX_M | IEMOPFORM_MOD3)
    167 /** VEX+ModR/M: r/m only (memory). */
    168 #define IEMOPFORM_VEX_M_MEM     (IEMOPFORM_VEX_M | IEMOPFORM_NOT_MOD3)
    169 /** VEX+ModR/M: reg only */
    170 #define IEMOPFORM_VEX_R         20
    171 /** VEX+ModR/M: reg, vvvv, r/m */
    172 #define IEMOPFORM_VEX_RVM       21
    173 /** VEX+ModR/M: reg, vvvv, r/m (register). */
    174 #define IEMOPFORM_VEX_RVM_REG   (IEMOPFORM_VEX_RVM | IEMOPFORM_MOD3)
    175 /** VEX+ModR/M: reg, vvvv, r/m (memory). */
    176 #define IEMOPFORM_VEX_RVM_MEM   (IEMOPFORM_VEX_RVM | IEMOPFORM_NOT_MOD3)
    177 /** VEX+ModR/M: reg, vvvv, r/m, imm */
    178 #define IEMOPFORM_VEX_RVMI      22
    179 /** VEX+ModR/M: reg, vvvv, r/m (register), imm. */
    180 #define IEMOPFORM_VEX_RVMI_REG  (IEMOPFORM_VEX_RVMI | IEMOPFORM_MOD3)
    181 /** VEX+ModR/M: reg, vvvv, r/m (memory), imm. */
    182 #define IEMOPFORM_VEX_RVMI_MEM  (IEMOPFORM_VEX_RVMI | IEMOPFORM_NOT_MOD3)
    183 /** VEX+ModR/M: reg, vvvv, r/m, imm(reg) */
    184 #define IEMOPFORM_VEX_RVMR      23
    185 /** VEX+ModR/M: reg, vvvv, r/m (register), imm(reg). */
    186 #define IEMOPFORM_VEX_RVMR_REG  (IEMOPFORM_VEX_RVMI | IEMOPFORM_MOD3)
    187 /** VEX+ModR/M: reg, vvvv, r/m (memory), imm(reg). */
    188 #define IEMOPFORM_VEX_RVMR_MEM  (IEMOPFORM_VEX_RVMI | IEMOPFORM_NOT_MOD3)
    189 /** VEX+ModR/M: reg, r/m, vvvv */
    190 #define IEMOPFORM_VEX_RMV       24
    191 /** VEX+ModR/M: reg, r/m, vvvv (register). */
    192 #define IEMOPFORM_VEX_RMV_REG   (IEMOPFORM_VEX_RMV | IEMOPFORM_MOD3)
    193 /** VEX+ModR/M: reg, r/m, vvvv (memory). */
    194 #define IEMOPFORM_VEX_RMV_MEM   (IEMOPFORM_VEX_RMV | IEMOPFORM_NOT_MOD3)
    195 /** VEX+ModR/M: reg, r/m, imm8 */
    196 #define IEMOPFORM_VEX_RMI       25
    197 /** VEX+ModR/M: reg, r/m, imm8 (register). */
    198 #define IEMOPFORM_VEX_RMI_REG   (IEMOPFORM_VEX_RMI | IEMOPFORM_MOD3)
    199 /** VEX+ModR/M: reg, r/m, imm8 (memory). */
    200 #define IEMOPFORM_VEX_RMI_MEM   (IEMOPFORM_VEX_RMI | IEMOPFORM_NOT_MOD3)
    201 /** VEX+ModR/M: r/m, vvvv, reg */
    202 #define IEMOPFORM_VEX_MVR       26
    203 /** VEX+ModR/M: r/m, vvvv, reg (register) */
    204 #define IEMOPFORM_VEX_MVR_REG   (IEMOPFORM_VEX_MVR | IEMOPFORM_MOD3)
    205 /** VEX+ModR/M: r/m, vvvv, reg (memory) */
    206 #define IEMOPFORM_VEX_MVR_MEM   (IEMOPFORM_VEX_MVR | IEMOPFORM_NOT_MOD3)
    207 /** VEX+ModR/M+/n: vvvv, r/m */
    208 #define IEMOPFORM_VEX_VM        27
    209 /** VEX+ModR/M+/n: vvvv, r/m (register) */
    210 #define IEMOPFORM_VEX_VM_REG    (IEMOPFORM_VEX_VM | IEMOPFORM_MOD3)
    211 /** VEX+ModR/M+/n: vvvv, r/m (memory) */
    212 #define IEMOPFORM_VEX_VM_MEM    (IEMOPFORM_VEX_VM | IEMOPFORM_NOT_MOD3)
    213 /** VEX+ModR/M+/n: vvvv, r/m, imm8 */
    214 #define IEMOPFORM_VEX_VMI       28
    215 /** VEX+ModR/M+/n: vvvv, r/m, imm8 (register) */
    216 #define IEMOPFORM_VEX_VMI_REG   (IEMOPFORM_VEX_VMI | IEMOPFORM_MOD3)
    217 /** VEX+ModR/M+/n: vvvv, r/m, imm8 (memory) */
    218 #define IEMOPFORM_VEX_VMI_MEM   (IEMOPFORM_VEX_VMI | IEMOPFORM_NOT_MOD3)
    219 
    220 /** Fixed register instruction, no R/M. */
    221 #define IEMOPFORM_FIXED         32
    222 
    223 /** The r/m is a register. */
    224 #define IEMOPFORM_MOD3          RT_BIT_32(8)
    225 /** The r/m is a memory access. */
    226 #define IEMOPFORM_NOT_MOD3      RT_BIT_32(9)
    227 /** @} */
    228 
    229 /** @name IEMOPHINT_XXX - Additional Opcode Hints
    230  * @note These are ORed together with IEMOPFORM_XXX.
    231  * @note x86 specific
    232  * @{ */
    233 /** Ignores the operand size prefix (66h). */
    234 #define IEMOPHINT_IGNORES_OZ_PFX    RT_BIT_32(10)
    235 /** Ignores REX.W (aka WIG). */
    236 #define IEMOPHINT_IGNORES_REXW      RT_BIT_32(11)
    237 /** Both the operand size prefixes (66h + REX.W) are ignored. */
    238 #define IEMOPHINT_IGNORES_OP_SIZES  (IEMOPHINT_IGNORES_OZ_PFX | IEMOPHINT_IGNORES_REXW)
    239 /** Allowed with the lock prefix. */
    240 #define IEMOPHINT_LOCK_ALLOWED      RT_BIT_32(11)
    241 /** The VEX.L value is ignored (aka LIG). */
    242 #define IEMOPHINT_VEX_L_IGNORED     RT_BIT_32(12)
    243 /** The VEX.L value must be zero (i.e. 128-bit width only). */
    244 #define IEMOPHINT_VEX_L_ZERO        RT_BIT_32(13)
    245 /** The VEX.L value must be one (i.e. 256-bit width only). */
    246 #define IEMOPHINT_VEX_L_ONE         RT_BIT_32(14)
    247 /** The VEX.V value must be zero. */
    248 #define IEMOPHINT_VEX_V_ZERO        RT_BIT_32(15)
    249 /** The REX.W/VEX.V value must be zero. */
    250 #define IEMOPHINT_REX_W_ZERO        RT_BIT_32(16)
    251 #define IEMOPHINT_VEX_W_ZERO        IEMOPHINT_REX_W_ZERO
    252 /** The REX.W/VEX.V value must be one. */
    253 #define IEMOPHINT_REX_W_ONE         RT_BIT_32(17)
    254 #define IEMOPHINT_VEX_W_ONE         IEMOPHINT_REX_W_ONE
    255 
    256 /** Hint to IEMAllInstructionPython.py that this macro should be skipped.  */
    257 #define IEMOPHINT_SKIP_PYTHON       RT_BIT_32(31)
    258 /** @} */
    259 
    260 /**
    261  * Possible hardware task switch sources - iemTaskSwitch(), iemVmxVmexitTaskSwitch().
    262  * @note x86 specific
    263  */
    264 typedef enum IEMTASKSWITCH
    265 {
    266     /** Task switch caused by an interrupt/exception. */
    267     IEMTASKSWITCH_INT_XCPT = 1,
    268     /** Task switch caused by a far CALL. */
    269     IEMTASKSWITCH_CALL,
    270     /** Task switch caused by a far JMP. */
    271     IEMTASKSWITCH_JUMP,
    272     /** Task switch caused by an IRET. */
    273     IEMTASKSWITCH_IRET
    274 } IEMTASKSWITCH;
    275 AssertCompileSize(IEMTASKSWITCH, 4);
    276 
    277 /**
    278  * Possible CrX load (write) sources - iemCImpl_load_CrX().
    279  * @note x86 specific
    280  */
    281 typedef enum IEMACCESSCRX
    282 {
    283     /** CrX access caused by 'mov crX' instruction. */
    284     IEMACCESSCRX_MOV_CRX,
    285     /** CrX (CR0) write caused by 'lmsw' instruction. */
    286     IEMACCESSCRX_LMSW,
    287     /** CrX (CR0) write caused by 'clts' instruction. */
    288     IEMACCESSCRX_CLTS,
    289     /** CrX (CR0) read caused by 'smsw' instruction. */
    290     IEMACCESSCRX_SMSW
    291 } IEMACCESSCRX;
    292 
    293 #ifdef VBOX_WITH_NESTED_HWVIRT_VMX
    294 /** @name IEM_SLAT_FAIL_XXX - Second-level address translation failure information.
    295  *
    296  * These flags provide further context to SLAT page-walk failures that could not be
    297  * determined by PGM (e.g, PGM is not privy to memory access permissions).
    298  *
    299  * @{
    300  */
    301 /** Translating a nested-guest linear address failed accessing a nested-guest
    302  *  physical address. */
    303 # define IEM_SLAT_FAIL_LINEAR_TO_PHYS_ADDR          RT_BIT_32(0)
    304 /** Translating a nested-guest linear address failed accessing a
    305  *  paging-structure entry or updating accessed/dirty bits. */
    306 # define IEM_SLAT_FAIL_LINEAR_TO_PAGE_TABLE         RT_BIT_32(1)
    307 /** @} */
    308 
    309 DECLCALLBACK(FNPGMPHYSHANDLER)      iemVmxApicAccessPageHandler;
    310 # ifndef IN_RING3
    311 DECLCALLBACK(FNPGMRZPHYSPFHANDLER)  iemVmxApicAccessPagePfHandler;
    312 # endif
    313 #endif
    314 
    315 /**
    316  * Indicates to the verifier that the given flag set is undefined.
    317  *
    318  * Can be invoked again to add more flags.
    319  *
    320  * This is a NOOP if the verifier isn't compiled in.
    321  *
    322  * @note We're temporarily keeping this until code is converted to new
    323  *       disassembler style opcode handling.
    324  */
    325 #define IEMOP_VERIFICATION_UNDEFINED_EFLAGS(a_fEfl) do { } while (0)
    326 
    327 
    328 /** Defined in IEMAllAImplC.cpp but also used by IEMAllAImplA.asm. */
    329 RT_C_DECLS_BEGIN
    330 extern uint8_t const g_afParity[256];
    331 RT_C_DECLS_END
    332 
    333 
    334 /** @name Arithmetic assignment operations on bytes (binary).
    335  * @{ */
    336 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLBINU8, (uint32_t fEFlagsIn, uint8_t  *pu8Dst,  uint8_t  u8Src));
    337 typedef FNIEMAIMPLBINU8  *PFNIEMAIMPLBINU8;
    338 FNIEMAIMPLBINU8 iemAImpl_add_u8, iemAImpl_add_u8_locked;
    339 FNIEMAIMPLBINU8 iemAImpl_adc_u8, iemAImpl_adc_u8_locked;
    340 FNIEMAIMPLBINU8 iemAImpl_sub_u8, iemAImpl_sub_u8_locked;
    341 FNIEMAIMPLBINU8 iemAImpl_sbb_u8, iemAImpl_sbb_u8_locked;
    342 FNIEMAIMPLBINU8  iemAImpl_or_u8,  iemAImpl_or_u8_locked;
    343 FNIEMAIMPLBINU8 iemAImpl_xor_u8, iemAImpl_xor_u8_locked;
    344 FNIEMAIMPLBINU8 iemAImpl_and_u8, iemAImpl_and_u8_locked;
    345 /** @} */
    346 
    347 /** @name Arithmetic assignment operations on words (binary).
    348  * @{ */
    349 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLBINU16, (uint32_t fEFlagsIn, uint16_t *pu16Dst, uint16_t u16Src));
    350 typedef FNIEMAIMPLBINU16  *PFNIEMAIMPLBINU16;
    351 FNIEMAIMPLBINU16 iemAImpl_add_u16, iemAImpl_add_u16_locked;
    352 FNIEMAIMPLBINU16 iemAImpl_adc_u16, iemAImpl_adc_u16_locked;
    353 FNIEMAIMPLBINU16 iemAImpl_sub_u16, iemAImpl_sub_u16_locked;
    354 FNIEMAIMPLBINU16 iemAImpl_sbb_u16, iemAImpl_sbb_u16_locked;
    355 FNIEMAIMPLBINU16  iemAImpl_or_u16,  iemAImpl_or_u16_locked;
    356 FNIEMAIMPLBINU16 iemAImpl_xor_u16, iemAImpl_xor_u16_locked;
    357 FNIEMAIMPLBINU16 iemAImpl_and_u16, iemAImpl_and_u16_locked;
    358 /** @}  */
    359 
    360 
    361 /** @name Arithmetic assignment operations on double words (binary).
    362  * @{ */
    363 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLBINU32, (uint32_t fEFlagsIn, uint32_t *pu32Dst, uint32_t u32Src));
    364 typedef FNIEMAIMPLBINU32 *PFNIEMAIMPLBINU32;
    365 FNIEMAIMPLBINU32 iemAImpl_add_u32, iemAImpl_add_u32_locked;
    366 FNIEMAIMPLBINU32 iemAImpl_adc_u32, iemAImpl_adc_u32_locked;
    367 FNIEMAIMPLBINU32 iemAImpl_sub_u32, iemAImpl_sub_u32_locked;
    368 FNIEMAIMPLBINU32 iemAImpl_sbb_u32, iemAImpl_sbb_u32_locked;
    369 FNIEMAIMPLBINU32  iemAImpl_or_u32,  iemAImpl_or_u32_locked;
    370 FNIEMAIMPLBINU32 iemAImpl_xor_u32, iemAImpl_xor_u32_locked;
    371 FNIEMAIMPLBINU32 iemAImpl_and_u32, iemAImpl_and_u32_locked;
    372 FNIEMAIMPLBINU32 iemAImpl_blsi_u32, iemAImpl_blsi_u32_fallback;
    373 FNIEMAIMPLBINU32 iemAImpl_blsr_u32, iemAImpl_blsr_u32_fallback;
    374 FNIEMAIMPLBINU32 iemAImpl_blsmsk_u32, iemAImpl_blsmsk_u32_fallback;
    375 /** @}  */
    376 
    377 /** @name Arithmetic assignment operations on quad words (binary).
    378  * @{ */
    379 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLBINU64, (uint32_t fEFlagsIn, uint64_t *pu64Dst, uint64_t u64Src));
    380 typedef FNIEMAIMPLBINU64 *PFNIEMAIMPLBINU64;
    381 FNIEMAIMPLBINU64 iemAImpl_add_u64, iemAImpl_add_u64_locked;
    382 FNIEMAIMPLBINU64 iemAImpl_adc_u64, iemAImpl_adc_u64_locked;
    383 FNIEMAIMPLBINU64 iemAImpl_sub_u64, iemAImpl_sub_u64_locked;
    384 FNIEMAIMPLBINU64 iemAImpl_sbb_u64, iemAImpl_sbb_u64_locked;
    385 FNIEMAIMPLBINU64  iemAImpl_or_u64,  iemAImpl_or_u64_locked;
    386 FNIEMAIMPLBINU64 iemAImpl_xor_u64, iemAImpl_xor_u64_locked;
    387 FNIEMAIMPLBINU64 iemAImpl_and_u64, iemAImpl_and_u64_locked;
    388 FNIEMAIMPLBINU64 iemAImpl_blsi_u64, iemAImpl_blsi_u64_fallback;
    389 FNIEMAIMPLBINU64 iemAImpl_blsr_u64, iemAImpl_blsr_u64_fallback;
    390 FNIEMAIMPLBINU64 iemAImpl_blsmsk_u64, iemAImpl_blsmsk_u64_fallback;
    391 /** @}  */
    392 
    393 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLBINROU8, (uint32_t fEFlagsIn, uint8_t const *pu8Dst, uint8_t u8Src));
    394 typedef FNIEMAIMPLBINROU8 *PFNIEMAIMPLBINROU8;
    395 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLBINROU16,(uint32_t fEFlagsIn, uint16_t const *pu16Dst, uint16_t u16Src));
    396 typedef FNIEMAIMPLBINROU16 *PFNIEMAIMPLBINROU16;
    397 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLBINROU32,(uint32_t fEFlagsIn, uint32_t const *pu32Dst, uint32_t u32Src));
    398 typedef FNIEMAIMPLBINROU32 *PFNIEMAIMPLBINROU32;
    399 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLBINROU64,(uint32_t fEFlagsIn, uint64_t const *pu64Dst, uint64_t u64Src));
    400 typedef FNIEMAIMPLBINROU64 *PFNIEMAIMPLBINROU64;
    401 
    402 /** @name Compare operations (thrown in with the binary ops).
    403  * @{ */
    404 FNIEMAIMPLBINROU8  iemAImpl_cmp_u8;
    405 FNIEMAIMPLBINROU16 iemAImpl_cmp_u16;
    406 FNIEMAIMPLBINROU32 iemAImpl_cmp_u32;
    407 FNIEMAIMPLBINROU64 iemAImpl_cmp_u64;
    408 /** @}  */
    409 
    410 /** @name Test operations (thrown in with the binary ops).
    411  * @{ */
    412 FNIEMAIMPLBINROU8  iemAImpl_test_u8;
    413 FNIEMAIMPLBINROU16 iemAImpl_test_u16;
    414 FNIEMAIMPLBINROU32 iemAImpl_test_u32;
    415 FNIEMAIMPLBINROU64 iemAImpl_test_u64;
    416 /** @}  */
    417 
    418 /** @name Bit operations operations (thrown in with the binary ops).
    419  * @{ */
    420 FNIEMAIMPLBINROU16 iemAImpl_bt_u16;
    421 FNIEMAIMPLBINROU32 iemAImpl_bt_u32;
    422 FNIEMAIMPLBINROU64 iemAImpl_bt_u64;
    423 FNIEMAIMPLBINU16 iemAImpl_btc_u16, iemAImpl_btc_u16_locked;
    424 FNIEMAIMPLBINU32 iemAImpl_btc_u32, iemAImpl_btc_u32_locked;
    425 FNIEMAIMPLBINU64 iemAImpl_btc_u64, iemAImpl_btc_u64_locked;
    426 FNIEMAIMPLBINU16 iemAImpl_btr_u16, iemAImpl_btr_u16_locked;
    427 FNIEMAIMPLBINU32 iemAImpl_btr_u32, iemAImpl_btr_u32_locked;
    428 FNIEMAIMPLBINU64 iemAImpl_btr_u64, iemAImpl_btr_u64_locked;
    429 FNIEMAIMPLBINU16 iemAImpl_bts_u16, iemAImpl_bts_u16_locked;
    430 FNIEMAIMPLBINU32 iemAImpl_bts_u32, iemAImpl_bts_u32_locked;
    431 FNIEMAIMPLBINU64 iemAImpl_bts_u64, iemAImpl_bts_u64_locked;
    432 /** @}  */
    433 
    434 /** @name Arithmetic three operand operations on double words (binary).
    435  * @{ */
    436 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLBINVEXU32, (uint32_t *pu32Dst, uint32_t u32Src1, uint32_t u32Src2, uint32_t *pEFlags));
    437 typedef FNIEMAIMPLBINVEXU32 *PFNIEMAIMPLBINVEXU32;
    438 FNIEMAIMPLBINVEXU32 iemAImpl_andn_u32, iemAImpl_andn_u32_fallback;
    439 FNIEMAIMPLBINVEXU32 iemAImpl_bextr_u32, iemAImpl_bextr_u32_fallback;
    440 FNIEMAIMPLBINVEXU32 iemAImpl_bzhi_u32, iemAImpl_bzhi_u32_fallback;
    441 /** @}  */
    442 
    443 /** @name Arithmetic three operand operations on quad words (binary).
    444  * @{ */
    445 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLBINVEXU64, (uint64_t *pu64Dst, uint64_t u64Src1, uint64_t u64Src2, uint32_t *pEFlags));
    446 typedef FNIEMAIMPLBINVEXU64 *PFNIEMAIMPLBINVEXU64;
    447 FNIEMAIMPLBINVEXU64 iemAImpl_andn_u64, iemAImpl_andn_u64_fallback;
    448 FNIEMAIMPLBINVEXU64 iemAImpl_bextr_u64, iemAImpl_bextr_u64_fallback;
    449 FNIEMAIMPLBINVEXU64 iemAImpl_bzhi_u64, iemAImpl_bzhi_u64_fallback;
    450 /** @}  */
    451 
    452 /** @name Arithmetic three operand operations on double words w/o EFLAGS (binary).
    453  * @{ */
    454 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLBINVEXU32NOEFL, (uint32_t *pu32Dst, uint32_t u32Src1, uint32_t u32Src2));
    455 typedef FNIEMAIMPLBINVEXU32NOEFL *PFNIEMAIMPLBINVEXU32NOEFL;
    456 FNIEMAIMPLBINVEXU32NOEFL iemAImpl_pdep_u32, iemAImpl_pdep_u32_fallback;
    457 FNIEMAIMPLBINVEXU32NOEFL iemAImpl_pext_u32, iemAImpl_pext_u32_fallback;
    458 FNIEMAIMPLBINVEXU32NOEFL iemAImpl_sarx_u32, iemAImpl_sarx_u32_fallback;
    459 FNIEMAIMPLBINVEXU32NOEFL iemAImpl_shlx_u32, iemAImpl_shlx_u32_fallback;
    460 FNIEMAIMPLBINVEXU32NOEFL iemAImpl_shrx_u32, iemAImpl_shrx_u32_fallback;
    461 FNIEMAIMPLBINVEXU32NOEFL iemAImpl_rorx_u32;
    462 /** @}  */
    463 
    464 /** @name Arithmetic three operand operations on quad words w/o EFLAGS (binary).
    465  * @{ */
    466 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLBINVEXU64NOEFL, (uint64_t *pu64Dst, uint64_t u64Src1, uint64_t u64Src2));
    467 typedef FNIEMAIMPLBINVEXU64NOEFL *PFNIEMAIMPLBINVEXU64NOEFL;
    468 FNIEMAIMPLBINVEXU64NOEFL iemAImpl_pdep_u64, iemAImpl_pdep_u64_fallback;
    469 FNIEMAIMPLBINVEXU64NOEFL iemAImpl_pext_u64, iemAImpl_pext_u64_fallback;
    470 FNIEMAIMPLBINVEXU64NOEFL iemAImpl_sarx_u64, iemAImpl_sarx_u64_fallback;
    471 FNIEMAIMPLBINVEXU64NOEFL iemAImpl_shlx_u64, iemAImpl_shlx_u64_fallback;
    472 FNIEMAIMPLBINVEXU64NOEFL iemAImpl_shrx_u64, iemAImpl_shrx_u64_fallback;
    473 FNIEMAIMPLBINVEXU64NOEFL iemAImpl_rorx_u64;
    474 /** @}  */
    475 
    476 /** @name MULX 32-bit and 64-bit.
    477  * @{ */
    478 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMULXVEXU32, (uint32_t *puDst1, uint32_t *puDst2, uint32_t uSrc1, uint32_t uSrc2));
    479 typedef FNIEMAIMPLMULXVEXU32 *PFNIEMAIMPLMULXVEXU32;
    480 FNIEMAIMPLMULXVEXU32 iemAImpl_mulx_u32, iemAImpl_mulx_u32_fallback;
    481 
    482 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMULXVEXU64, (uint64_t *puDst1, uint64_t *puDst2, uint64_t uSrc1, uint64_t uSrc2));
    483 typedef FNIEMAIMPLMULXVEXU64 *PFNIEMAIMPLMULXVEXU64;
    484 FNIEMAIMPLMULXVEXU64 iemAImpl_mulx_u64, iemAImpl_mulx_u64_fallback;
    485 /** @}  */
    486 
    487 
    488 /** @name Exchange memory with register operations.
    489  * @{ */
    490 IEM_DECL_IMPL_DEF(void, iemAImpl_xchg_u8_locked, (uint8_t  *pu8Mem,  uint8_t  *pu8Reg));
    491 IEM_DECL_IMPL_DEF(void, iemAImpl_xchg_u16_locked,(uint16_t *pu16Mem, uint16_t *pu16Reg));
    492 IEM_DECL_IMPL_DEF(void, iemAImpl_xchg_u32_locked,(uint32_t *pu32Mem, uint32_t *pu32Reg));
    493 IEM_DECL_IMPL_DEF(void, iemAImpl_xchg_u64_locked,(uint64_t *pu64Mem, uint64_t *pu64Reg));
    494 IEM_DECL_IMPL_DEF(void, iemAImpl_xchg_u8_unlocked, (uint8_t  *pu8Mem,  uint8_t  *pu8Reg));
    495 IEM_DECL_IMPL_DEF(void, iemAImpl_xchg_u16_unlocked,(uint16_t *pu16Mem, uint16_t *pu16Reg));
    496 IEM_DECL_IMPL_DEF(void, iemAImpl_xchg_u32_unlocked,(uint32_t *pu32Mem, uint32_t *pu32Reg));
    497 IEM_DECL_IMPL_DEF(void, iemAImpl_xchg_u64_unlocked,(uint64_t *pu64Mem, uint64_t *pu64Reg));
    498 /** @}  */
    499 
    500 /** @name Exchange and add operations.
    501  * @{ */
    502 IEM_DECL_IMPL_DEF(void, iemAImpl_xadd_u8, (uint8_t  *pu8Dst,  uint8_t  *pu8Reg,  uint32_t *pEFlags));
    503 IEM_DECL_IMPL_DEF(void, iemAImpl_xadd_u16,(uint16_t *pu16Dst, uint16_t *pu16Reg, uint32_t *pEFlags));
    504 IEM_DECL_IMPL_DEF(void, iemAImpl_xadd_u32,(uint32_t *pu32Dst, uint32_t *pu32Reg, uint32_t *pEFlags));
    505 IEM_DECL_IMPL_DEF(void, iemAImpl_xadd_u64,(uint64_t *pu64Dst, uint64_t *pu64Reg, uint32_t *pEFlags));
    506 IEM_DECL_IMPL_DEF(void, iemAImpl_xadd_u8_locked, (uint8_t  *pu8Dst,  uint8_t  *pu8Reg,  uint32_t *pEFlags));
    507 IEM_DECL_IMPL_DEF(void, iemAImpl_xadd_u16_locked,(uint16_t *pu16Dst, uint16_t *pu16Reg, uint32_t *pEFlags));
    508 IEM_DECL_IMPL_DEF(void, iemAImpl_xadd_u32_locked,(uint32_t *pu32Dst, uint32_t *pu32Reg, uint32_t *pEFlags));
    509 IEM_DECL_IMPL_DEF(void, iemAImpl_xadd_u64_locked,(uint64_t *pu64Dst, uint64_t *pu64Reg, uint32_t *pEFlags));
    510 /** @}  */
    511 
    512 /** @name Compare and exchange.
    513  * @{ */
    514 IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg_u8,        (uint8_t  *pu8Dst,  uint8_t  *puAl,  uint8_t  uSrcReg, uint32_t *pEFlags));
    515 IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg_u8_locked, (uint8_t  *pu8Dst,  uint8_t  *puAl,  uint8_t  uSrcReg, uint32_t *pEFlags));
    516 IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg_u16,       (uint16_t *pu16Dst, uint16_t *puAx,  uint16_t uSrcReg, uint32_t *pEFlags));
    517 IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg_u16_locked,(uint16_t *pu16Dst, uint16_t *puAx,  uint16_t uSrcReg, uint32_t *pEFlags));
    518 IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg_u32,       (uint32_t *pu32Dst, uint32_t *puEax, uint32_t uSrcReg, uint32_t *pEFlags));
    519 IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg_u32_locked,(uint32_t *pu32Dst, uint32_t *puEax, uint32_t uSrcReg, uint32_t *pEFlags));
    520 #if ARCH_BITS == 32
    521 IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg_u64,       (uint64_t *pu64Dst, uint64_t *puRax, uint64_t *puSrcReg, uint32_t *pEFlags));
    522 IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg_u64_locked,(uint64_t *pu64Dst, uint64_t *puRax, uint64_t *puSrcReg, uint32_t *pEFlags));
    523 #else
    524 IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg_u64,       (uint64_t *pu64Dst, uint64_t *puRax, uint64_t uSrcReg, uint32_t *pEFlags));
    525 IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg_u64_locked,(uint64_t *pu64Dst, uint64_t *puRax, uint64_t uSrcReg, uint32_t *pEFlags));
    526 #endif
    527 IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg8b,(uint64_t *pu64Dst, PRTUINT64U pu64EaxEdx, PRTUINT64U pu64EbxEcx,
    528                                             uint32_t *pEFlags));
    529 IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg8b_locked,(uint64_t *pu64Dst, PRTUINT64U pu64EaxEdx, PRTUINT64U pu64EbxEcx,
    530                                                    uint32_t *pEFlags));
    531 IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg16b,(PRTUINT128U pu128Dst, PRTUINT128U pu128RaxRdx, PRTUINT128U pu128RbxRcx,
    532                                              uint32_t *pEFlags));
    533 IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg16b_locked,(PRTUINT128U pu128Dst, PRTUINT128U pu128RaxRdx, PRTUINT128U pu128RbxRcx,
    534                                                     uint32_t *pEFlags));
    535 #ifndef RT_ARCH_ARM64
    536 IEM_DECL_IMPL_DEF(void, iemAImpl_cmpxchg16b_fallback,(PRTUINT128U pu128Dst, PRTUINT128U pu128RaxRdx,
    537                                                       PRTUINT128U pu128RbxRcx, uint32_t *pEFlags));
    538 #endif
    539 /** @} */
    540 
    541 /** @name Memory ordering
    542  * @{ */
    543 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEMFENCE,(void));
    544 typedef FNIEMAIMPLMEMFENCE *PFNIEMAIMPLMEMFENCE;
    545 IEM_DECL_IMPL_DEF(void, iemAImpl_mfence,(void));
    546 IEM_DECL_IMPL_DEF(void, iemAImpl_sfence,(void));
    547 IEM_DECL_IMPL_DEF(void, iemAImpl_lfence,(void));
    548 #ifndef RT_ARCH_ARM64
    549 IEM_DECL_IMPL_DEF(void, iemAImpl_alt_mem_fence,(void));
    550 #endif
    551 /** @} */
    552 
    553 /** @name Double precision shifts
    554  * @{ */
    555 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLSHIFTDBLU16,(uint16_t *pu16Dst, uint16_t u16Src, uint8_t cShift, uint32_t *pEFlags));
    556 typedef FNIEMAIMPLSHIFTDBLU16  *PFNIEMAIMPLSHIFTDBLU16;
    557 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLSHIFTDBLU32,(uint32_t *pu32Dst, uint32_t u32Src, uint8_t cShift, uint32_t *pEFlags));
    558 typedef FNIEMAIMPLSHIFTDBLU32  *PFNIEMAIMPLSHIFTDBLU32;
    559 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLSHIFTDBLU64,(uint64_t *pu64Dst, uint64_t u64Src, uint8_t cShift, uint32_t *pEFlags));
    560 typedef FNIEMAIMPLSHIFTDBLU64  *PFNIEMAIMPLSHIFTDBLU64;
    561 FNIEMAIMPLSHIFTDBLU16 iemAImpl_shld_u16, iemAImpl_shld_u16_amd, iemAImpl_shld_u16_intel;
    562 FNIEMAIMPLSHIFTDBLU32 iemAImpl_shld_u32, iemAImpl_shld_u32_amd, iemAImpl_shld_u32_intel;
    563 FNIEMAIMPLSHIFTDBLU64 iemAImpl_shld_u64, iemAImpl_shld_u64_amd, iemAImpl_shld_u64_intel;
    564 FNIEMAIMPLSHIFTDBLU16 iemAImpl_shrd_u16, iemAImpl_shrd_u16_amd, iemAImpl_shrd_u16_intel;
    565 FNIEMAIMPLSHIFTDBLU32 iemAImpl_shrd_u32, iemAImpl_shrd_u32_amd, iemAImpl_shrd_u32_intel;
    566 FNIEMAIMPLSHIFTDBLU64 iemAImpl_shrd_u64, iemAImpl_shrd_u64_amd, iemAImpl_shrd_u64_intel;
    567 /** @}  */
    568 
    569 
    570 /** @name Bit search operations (thrown in with the binary ops).
    571  * @{ */
    572 FNIEMAIMPLBINU16 iemAImpl_bsf_u16, iemAImpl_bsf_u16_amd, iemAImpl_bsf_u16_intel;
    573 FNIEMAIMPLBINU32 iemAImpl_bsf_u32, iemAImpl_bsf_u32_amd, iemAImpl_bsf_u32_intel;
    574 FNIEMAIMPLBINU64 iemAImpl_bsf_u64, iemAImpl_bsf_u64_amd, iemAImpl_bsf_u64_intel;
    575 FNIEMAIMPLBINU16 iemAImpl_bsr_u16, iemAImpl_bsr_u16_amd, iemAImpl_bsr_u16_intel;
    576 FNIEMAIMPLBINU32 iemAImpl_bsr_u32, iemAImpl_bsr_u32_amd, iemAImpl_bsr_u32_intel;
    577 FNIEMAIMPLBINU64 iemAImpl_bsr_u64, iemAImpl_bsr_u64_amd, iemAImpl_bsr_u64_intel;
    578 FNIEMAIMPLBINU16 iemAImpl_lzcnt_u16, iemAImpl_lzcnt_u16_amd, iemAImpl_lzcnt_u16_intel;
    579 FNIEMAIMPLBINU32 iemAImpl_lzcnt_u32, iemAImpl_lzcnt_u32_amd, iemAImpl_lzcnt_u32_intel;
    580 FNIEMAIMPLBINU64 iemAImpl_lzcnt_u64, iemAImpl_lzcnt_u64_amd, iemAImpl_lzcnt_u64_intel;
    581 FNIEMAIMPLBINU16 iemAImpl_tzcnt_u16, iemAImpl_tzcnt_u16_amd, iemAImpl_tzcnt_u16_intel;
    582 FNIEMAIMPLBINU32 iemAImpl_tzcnt_u32, iemAImpl_tzcnt_u32_amd, iemAImpl_tzcnt_u32_intel;
    583 FNIEMAIMPLBINU64 iemAImpl_tzcnt_u64, iemAImpl_tzcnt_u64_amd, iemAImpl_tzcnt_u64_intel;
    584 FNIEMAIMPLBINU16 iemAImpl_popcnt_u16, iemAImpl_popcnt_u16_fallback;
    585 FNIEMAIMPLBINU32 iemAImpl_popcnt_u32, iemAImpl_popcnt_u32_fallback;
    586 FNIEMAIMPLBINU64 iemAImpl_popcnt_u64, iemAImpl_popcnt_u64_fallback;
    587 /** @}  */
    588 
    589 /** @name Signed multiplication operations (thrown in with the binary ops).
    590  * @{ */
    591 FNIEMAIMPLBINU16 iemAImpl_imul_two_u16, iemAImpl_imul_two_u16_amd, iemAImpl_imul_two_u16_intel;
    592 FNIEMAIMPLBINU32 iemAImpl_imul_two_u32, iemAImpl_imul_two_u32_amd, iemAImpl_imul_two_u32_intel;
    593 FNIEMAIMPLBINU64 iemAImpl_imul_two_u64, iemAImpl_imul_two_u64_amd, iemAImpl_imul_two_u64_intel;
    594 /** @}  */
    595 
    596 /** @name Arithmetic assignment operations on bytes (unary).
    597  * @{ */
    598 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLUNARYU8,  (uint8_t  *pu8Dst,  uint32_t *pEFlags));
    599 typedef FNIEMAIMPLUNARYU8  *PFNIEMAIMPLUNARYU8;
    600 FNIEMAIMPLUNARYU8 iemAImpl_inc_u8, iemAImpl_inc_u8_locked;
    601 FNIEMAIMPLUNARYU8 iemAImpl_dec_u8, iemAImpl_dec_u8_locked;
    602 FNIEMAIMPLUNARYU8 iemAImpl_not_u8, iemAImpl_not_u8_locked;
    603 FNIEMAIMPLUNARYU8 iemAImpl_neg_u8, iemAImpl_neg_u8_locked;
    604 /** @} */
    605 
    606 /** @name Arithmetic assignment operations on words (unary).
    607  * @{ */
    608 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLUNARYU16,  (uint16_t  *pu16Dst,  uint32_t *pEFlags));
    609 typedef FNIEMAIMPLUNARYU16  *PFNIEMAIMPLUNARYU16;
    610 FNIEMAIMPLUNARYU16 iemAImpl_inc_u16, iemAImpl_inc_u16_locked;
    611 FNIEMAIMPLUNARYU16 iemAImpl_dec_u16, iemAImpl_dec_u16_locked;
    612 FNIEMAIMPLUNARYU16 iemAImpl_not_u16, iemAImpl_not_u16_locked;
    613 FNIEMAIMPLUNARYU16 iemAImpl_neg_u16, iemAImpl_neg_u16_locked;
    614 /** @} */
    615 
    616 /** @name Arithmetic assignment operations on double words (unary).
    617  * @{ */
    618 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLUNARYU32,  (uint32_t  *pu32Dst,  uint32_t *pEFlags));
    619 typedef FNIEMAIMPLUNARYU32  *PFNIEMAIMPLUNARYU32;
    620 FNIEMAIMPLUNARYU32 iemAImpl_inc_u32, iemAImpl_inc_u32_locked;
    621 FNIEMAIMPLUNARYU32 iemAImpl_dec_u32, iemAImpl_dec_u32_locked;
    622 FNIEMAIMPLUNARYU32 iemAImpl_not_u32, iemAImpl_not_u32_locked;
    623 FNIEMAIMPLUNARYU32 iemAImpl_neg_u32, iemAImpl_neg_u32_locked;
    624 /** @} */
    625 
    626 /** @name Arithmetic assignment operations on quad words (unary).
    627  * @{ */
    628 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLUNARYU64,  (uint64_t  *pu64Dst,  uint32_t *pEFlags));
    629 typedef FNIEMAIMPLUNARYU64  *PFNIEMAIMPLUNARYU64;
    630 FNIEMAIMPLUNARYU64 iemAImpl_inc_u64, iemAImpl_inc_u64_locked;
    631 FNIEMAIMPLUNARYU64 iemAImpl_dec_u64, iemAImpl_dec_u64_locked;
    632 FNIEMAIMPLUNARYU64 iemAImpl_not_u64, iemAImpl_not_u64_locked;
    633 FNIEMAIMPLUNARYU64 iemAImpl_neg_u64, iemAImpl_neg_u64_locked;
    634 /** @} */
    635 
    636 
    637 /** @name Shift operations on bytes (Group 2).
    638  * @{ */
    639 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSHIFTU8,(uint32_t fEFlagsIn, uint8_t *pu8Dst, uint8_t cShift));
    640 typedef FNIEMAIMPLSHIFTU8  *PFNIEMAIMPLSHIFTU8;
    641 FNIEMAIMPLSHIFTU8 iemAImpl_rol_u8, iemAImpl_rol_u8_amd, iemAImpl_rol_u8_intel;
    642 FNIEMAIMPLSHIFTU8 iemAImpl_ror_u8, iemAImpl_ror_u8_amd, iemAImpl_ror_u8_intel;
    643 FNIEMAIMPLSHIFTU8 iemAImpl_rcl_u8, iemAImpl_rcl_u8_amd, iemAImpl_rcl_u8_intel;
    644 FNIEMAIMPLSHIFTU8 iemAImpl_rcr_u8, iemAImpl_rcr_u8_amd, iemAImpl_rcr_u8_intel;
    645 FNIEMAIMPLSHIFTU8 iemAImpl_shl_u8, iemAImpl_shl_u8_amd, iemAImpl_shl_u8_intel;
    646 FNIEMAIMPLSHIFTU8 iemAImpl_shr_u8, iemAImpl_shr_u8_amd, iemAImpl_shr_u8_intel;
    647 FNIEMAIMPLSHIFTU8 iemAImpl_sar_u8, iemAImpl_sar_u8_amd, iemAImpl_sar_u8_intel;
    648 /** @} */
    649 
    650 /** @name Shift operations on words (Group 2).
    651  * @{ */
    652 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSHIFTU16,(uint32_t fEFlagsIn, uint16_t *pu16Dst, uint8_t cShift));
    653 typedef FNIEMAIMPLSHIFTU16  *PFNIEMAIMPLSHIFTU16;
    654 FNIEMAIMPLSHIFTU16 iemAImpl_rol_u16, iemAImpl_rol_u16_amd, iemAImpl_rol_u16_intel;
    655 FNIEMAIMPLSHIFTU16 iemAImpl_ror_u16, iemAImpl_ror_u16_amd, iemAImpl_ror_u16_intel;
    656 FNIEMAIMPLSHIFTU16 iemAImpl_rcl_u16, iemAImpl_rcl_u16_amd, iemAImpl_rcl_u16_intel;
    657 FNIEMAIMPLSHIFTU16 iemAImpl_rcr_u16, iemAImpl_rcr_u16_amd, iemAImpl_rcr_u16_intel;
    658 FNIEMAIMPLSHIFTU16 iemAImpl_shl_u16, iemAImpl_shl_u16_amd, iemAImpl_shl_u16_intel;
    659 FNIEMAIMPLSHIFTU16 iemAImpl_shr_u16, iemAImpl_shr_u16_amd, iemAImpl_shr_u16_intel;
    660 FNIEMAIMPLSHIFTU16 iemAImpl_sar_u16, iemAImpl_sar_u16_amd, iemAImpl_sar_u16_intel;
    661 /** @} */
    662 
    663 /** @name Shift operations on double words (Group 2).
    664  * @{ */
    665 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSHIFTU32,(uint32_t fEFlagsIn, uint32_t *pu32Dst, uint8_t cShift));
    666 typedef FNIEMAIMPLSHIFTU32  *PFNIEMAIMPLSHIFTU32;
    667 FNIEMAIMPLSHIFTU32 iemAImpl_rol_u32, iemAImpl_rol_u32_amd, iemAImpl_rol_u32_intel;
    668 FNIEMAIMPLSHIFTU32 iemAImpl_ror_u32, iemAImpl_ror_u32_amd, iemAImpl_ror_u32_intel;
    669 FNIEMAIMPLSHIFTU32 iemAImpl_rcl_u32, iemAImpl_rcl_u32_amd, iemAImpl_rcl_u32_intel;
    670 FNIEMAIMPLSHIFTU32 iemAImpl_rcr_u32, iemAImpl_rcr_u32_amd, iemAImpl_rcr_u32_intel;
    671 FNIEMAIMPLSHIFTU32 iemAImpl_shl_u32, iemAImpl_shl_u32_amd, iemAImpl_shl_u32_intel;
    672 FNIEMAIMPLSHIFTU32 iemAImpl_shr_u32, iemAImpl_shr_u32_amd, iemAImpl_shr_u32_intel;
    673 FNIEMAIMPLSHIFTU32 iemAImpl_sar_u32, iemAImpl_sar_u32_amd, iemAImpl_sar_u32_intel;
    674 /** @} */
    675 
    676 /** @name Shift operations on words (Group 2).
    677  * @{ */
    678 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSHIFTU64,(uint32_t fEFlagsIn, uint64_t *pu64Dst, uint8_t cShift));
    679 typedef FNIEMAIMPLSHIFTU64  *PFNIEMAIMPLSHIFTU64;
    680 FNIEMAIMPLSHIFTU64 iemAImpl_rol_u64, iemAImpl_rol_u64_amd, iemAImpl_rol_u64_intel;
    681 FNIEMAIMPLSHIFTU64 iemAImpl_ror_u64, iemAImpl_ror_u64_amd, iemAImpl_ror_u64_intel;
    682 FNIEMAIMPLSHIFTU64 iemAImpl_rcl_u64, iemAImpl_rcl_u64_amd, iemAImpl_rcl_u64_intel;
    683 FNIEMAIMPLSHIFTU64 iemAImpl_rcr_u64, iemAImpl_rcr_u64_amd, iemAImpl_rcr_u64_intel;
    684 FNIEMAIMPLSHIFTU64 iemAImpl_shl_u64, iemAImpl_shl_u64_amd, iemAImpl_shl_u64_intel;
    685 FNIEMAIMPLSHIFTU64 iemAImpl_shr_u64, iemAImpl_shr_u64_amd, iemAImpl_shr_u64_intel;
    686 FNIEMAIMPLSHIFTU64 iemAImpl_sar_u64, iemAImpl_sar_u64_amd, iemAImpl_sar_u64_intel;
    687 /** @} */
    688 
    689 /** @name Multiplication and division operations.
    690  * @{ */
    691 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMULDIVU8,(uint16_t *pu16AX, uint8_t u8FactorDivisor, uint32_t fEFlags));
    692 typedef FNIEMAIMPLMULDIVU8  *PFNIEMAIMPLMULDIVU8;
    693 FNIEMAIMPLMULDIVU8 iemAImpl_mul_u8,  iemAImpl_mul_u8_amd,  iemAImpl_mul_u8_intel;
    694 FNIEMAIMPLMULDIVU8 iemAImpl_imul_u8, iemAImpl_imul_u8_amd, iemAImpl_imul_u8_intel;
    695 FNIEMAIMPLMULDIVU8 iemAImpl_div_u8,  iemAImpl_div_u8_amd,  iemAImpl_div_u8_intel;
    696 FNIEMAIMPLMULDIVU8 iemAImpl_idiv_u8, iemAImpl_idiv_u8_amd, iemAImpl_idiv_u8_intel;
    697 
    698 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMULDIVU16,(uint16_t *pu16AX, uint16_t *pu16DX, uint16_t u16FactorDivisor, uint32_t fEFlags));
    699 typedef FNIEMAIMPLMULDIVU16  *PFNIEMAIMPLMULDIVU16;
    700 FNIEMAIMPLMULDIVU16 iemAImpl_mul_u16,  iemAImpl_mul_u16_amd,  iemAImpl_mul_u16_intel;
    701 FNIEMAIMPLMULDIVU16 iemAImpl_imul_u16, iemAImpl_imul_u16_amd, iemAImpl_imul_u16_intel;
    702 FNIEMAIMPLMULDIVU16 iemAImpl_div_u16,  iemAImpl_div_u16_amd,  iemAImpl_div_u16_intel;
    703 FNIEMAIMPLMULDIVU16 iemAImpl_idiv_u16, iemAImpl_idiv_u16_amd, iemAImpl_idiv_u16_intel;
    704 
    705 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMULDIVU32,(uint32_t *pu32EAX, uint32_t *pu32EDX, uint32_t u32FactorDivisor, uint32_t fEFlags));
    706 typedef FNIEMAIMPLMULDIVU32  *PFNIEMAIMPLMULDIVU32;
    707 FNIEMAIMPLMULDIVU32 iemAImpl_mul_u32,  iemAImpl_mul_u32_amd,  iemAImpl_mul_u32_intel;
    708 FNIEMAIMPLMULDIVU32 iemAImpl_imul_u32, iemAImpl_imul_u32_amd, iemAImpl_imul_u32_intel;
    709 FNIEMAIMPLMULDIVU32 iemAImpl_div_u32,  iemAImpl_div_u32_amd,  iemAImpl_div_u32_intel;
    710 FNIEMAIMPLMULDIVU32 iemAImpl_idiv_u32, iemAImpl_idiv_u32_amd, iemAImpl_idiv_u32_intel;
    711 
    712 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMULDIVU64,(uint64_t *pu64RAX, uint64_t *pu64RDX, uint64_t u64FactorDivisor, uint32_t fEFlags));
    713 typedef FNIEMAIMPLMULDIVU64  *PFNIEMAIMPLMULDIVU64;
    714 FNIEMAIMPLMULDIVU64 iemAImpl_mul_u64,  iemAImpl_mul_u64_amd,  iemAImpl_mul_u64_intel;
    715 FNIEMAIMPLMULDIVU64 iemAImpl_imul_u64, iemAImpl_imul_u64_amd, iemAImpl_imul_u64_intel;
    716 FNIEMAIMPLMULDIVU64 iemAImpl_div_u64,  iemAImpl_div_u64_amd,  iemAImpl_div_u64_intel;
    717 FNIEMAIMPLMULDIVU64 iemAImpl_idiv_u64, iemAImpl_idiv_u64_amd, iemAImpl_idiv_u64_intel;
    718 /** @} */
    719 
    720 /** @name Byte Swap.
    721  * @{  */
    722 IEM_DECL_IMPL_TYPE(void, iemAImpl_bswap_u16,(uint32_t *pu32Dst)); /* Yes, 32-bit register access. */
    723 IEM_DECL_IMPL_TYPE(void, iemAImpl_bswap_u32,(uint32_t *pu32Dst));
    724 IEM_DECL_IMPL_TYPE(void, iemAImpl_bswap_u64,(uint64_t *pu64Dst));
    725 /** @}  */
    726 
    727 /** @name Misc.
    728  * @{ */
    729 FNIEMAIMPLBINU16 iemAImpl_arpl;
    730 /** @} */
    731 
    732 /** @name RDRAND and RDSEED
    733  * @{ */
    734 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLRDRANDSEEDU16,(uint16_t *puDst, uint32_t *pEFlags));
    735 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLRDRANDSEEDU32,(uint32_t *puDst, uint32_t *pEFlags));
    736 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLRDRANDSEEDU64,(uint64_t *puDst, uint32_t *pEFlags));
    737 typedef FNIEMAIMPLRDRANDSEEDU16  *PFNIEMAIMPLRDRANDSEEDU16;
    738 typedef FNIEMAIMPLRDRANDSEEDU32  *PFNIEMAIMPLRDRANDSEEDU32;
    739 typedef FNIEMAIMPLRDRANDSEEDU64  *PFNIEMAIMPLRDRANDSEEDU64;
    740 
    741 FNIEMAIMPLRDRANDSEEDU16 iemAImpl_rdrand_u16, iemAImpl_rdrand_u16_fallback;
    742 FNIEMAIMPLRDRANDSEEDU32 iemAImpl_rdrand_u32, iemAImpl_rdrand_u32_fallback;
    743 FNIEMAIMPLRDRANDSEEDU64 iemAImpl_rdrand_u64, iemAImpl_rdrand_u64_fallback;
    744 FNIEMAIMPLRDRANDSEEDU16 iemAImpl_rdseed_u16, iemAImpl_rdseed_u16_fallback;
    745 FNIEMAIMPLRDRANDSEEDU32 iemAImpl_rdseed_u32, iemAImpl_rdseed_u32_fallback;
    746 FNIEMAIMPLRDRANDSEEDU64 iemAImpl_rdseed_u64, iemAImpl_rdseed_u64_fallback;
    747 /** @} */
    748 
    749 /** @name ADOX and ADCX
    750  * @{ */
    751 FNIEMAIMPLBINU32 iemAImpl_adcx_u32, iemAImpl_adcx_u32_fallback;
    752 FNIEMAIMPLBINU64 iemAImpl_adcx_u64, iemAImpl_adcx_u64_fallback;
    753 FNIEMAIMPLBINU32 iemAImpl_adox_u32, iemAImpl_adox_u32_fallback;
    754 FNIEMAIMPLBINU64 iemAImpl_adox_u64, iemAImpl_adox_u64_fallback;
    755 /** @} */
    756 
    757 
    758 /**
    759  * A FPU result.
    760  * @note x86 specific
    761  */
    762 typedef struct IEMFPURESULT
    763 {
    764     /** The output value. */
    765     RTFLOAT80U      r80Result;
    766     /** The output status. */
    767     uint16_t        FSW;
    768 } IEMFPURESULT;
    769 AssertCompileMemberOffset(IEMFPURESULT, FSW, 10);
    770 /** Pointer to a FPU result. */
    771 typedef IEMFPURESULT *PIEMFPURESULT;
    772 /** Pointer to a const FPU result. */
    773 typedef IEMFPURESULT const *PCIEMFPURESULT;
    774 
    775 /** @name FPU operations taking a 32-bit float argument
    776  * @{ */
    777 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUR32FSW,(PCX86FXSTATE pFpuState, uint16_t *pFSW,
    778                                                       PCRTFLOAT80U pr80Val1, PCRTFLOAT32U pr32Val2));
    779 typedef FNIEMAIMPLFPUR32FSW *PFNIEMAIMPLFPUR32FSW;
    780 
    781 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUR32,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes,
    782                                                    PCRTFLOAT80U pr80Val1, PCRTFLOAT32U pr32Val2));
    783 typedef FNIEMAIMPLFPUR32    *PFNIEMAIMPLFPUR32;
    784 
    785 FNIEMAIMPLFPUR32FSW iemAImpl_fcom_r80_by_r32;
    786 FNIEMAIMPLFPUR32    iemAImpl_fadd_r80_by_r32;
    787 FNIEMAIMPLFPUR32    iemAImpl_fmul_r80_by_r32;
    788 FNIEMAIMPLFPUR32    iemAImpl_fsub_r80_by_r32;
    789 FNIEMAIMPLFPUR32    iemAImpl_fsubr_r80_by_r32;
    790 FNIEMAIMPLFPUR32    iemAImpl_fdiv_r80_by_r32;
    791 FNIEMAIMPLFPUR32    iemAImpl_fdivr_r80_by_r32;
    792 
    793 IEM_DECL_IMPL_DEF(void, iemAImpl_fld_r80_from_r32,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes, PCRTFLOAT32U pr32Val));
    794 IEM_DECL_IMPL_DEF(void, iemAImpl_fst_r80_to_r32,(PCX86FXSTATE pFpuState, uint16_t *pu16FSW,
    795                                                  PRTFLOAT32U pr32Val, PCRTFLOAT80U pr80Val));
    796 /** @} */
    797 
    798 /** @name FPU operations taking a 64-bit float argument
    799  * @{ */
    800 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUR64FSW,(PCX86FXSTATE pFpuState, uint16_t *pFSW,
    801                                                       PCRTFLOAT80U pr80Val1, PCRTFLOAT64U pr64Val2));
    802 typedef FNIEMAIMPLFPUR64FSW *PFNIEMAIMPLFPUR64FSW;
    803 
    804 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUR64,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes,
    805                                                    PCRTFLOAT80U pr80Val1, PCRTFLOAT64U pr64Val2));
    806 typedef FNIEMAIMPLFPUR64   *PFNIEMAIMPLFPUR64;
    807 
    808 FNIEMAIMPLFPUR64FSW iemAImpl_fcom_r80_by_r64;
    809 FNIEMAIMPLFPUR64    iemAImpl_fadd_r80_by_r64;
    810 FNIEMAIMPLFPUR64    iemAImpl_fmul_r80_by_r64;
    811 FNIEMAIMPLFPUR64    iemAImpl_fsub_r80_by_r64;
    812 FNIEMAIMPLFPUR64    iemAImpl_fsubr_r80_by_r64;
    813 FNIEMAIMPLFPUR64    iemAImpl_fdiv_r80_by_r64;
    814 FNIEMAIMPLFPUR64    iemAImpl_fdivr_r80_by_r64;
    815 
    816 IEM_DECL_IMPL_DEF(void, iemAImpl_fld_r80_from_r64,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes, PCRTFLOAT64U pr64Val));
    817 IEM_DECL_IMPL_DEF(void, iemAImpl_fst_r80_to_r64,(PCX86FXSTATE pFpuState, uint16_t *pu16FSW,
    818                                                  PRTFLOAT64U pr32Val, PCRTFLOAT80U pr80Val));
    819 /** @} */
    820 
    821 /** @name FPU operations taking a 80-bit float argument
    822  * @{ */
    823 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUR80,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes,
    824                                                    PCRTFLOAT80U pr80Val1, PCRTFLOAT80U pr80Val2));
    825 typedef FNIEMAIMPLFPUR80    *PFNIEMAIMPLFPUR80;
    826 FNIEMAIMPLFPUR80            iemAImpl_fadd_r80_by_r80;
    827 FNIEMAIMPLFPUR80            iemAImpl_fmul_r80_by_r80;
    828 FNIEMAIMPLFPUR80            iemAImpl_fsub_r80_by_r80;
    829 FNIEMAIMPLFPUR80            iemAImpl_fsubr_r80_by_r80;
    830 FNIEMAIMPLFPUR80            iemAImpl_fdiv_r80_by_r80;
    831 FNIEMAIMPLFPUR80            iemAImpl_fdivr_r80_by_r80;
    832 FNIEMAIMPLFPUR80            iemAImpl_fprem_r80_by_r80;
    833 FNIEMAIMPLFPUR80            iemAImpl_fprem1_r80_by_r80;
    834 FNIEMAIMPLFPUR80            iemAImpl_fscale_r80_by_r80;
    835 
    836 FNIEMAIMPLFPUR80            iemAImpl_fpatan_r80_by_r80,  iemAImpl_fpatan_r80_by_r80_amd,  iemAImpl_fpatan_r80_by_r80_intel;
    837 FNIEMAIMPLFPUR80            iemAImpl_fyl2x_r80_by_r80,   iemAImpl_fyl2x_r80_by_r80_amd,   iemAImpl_fyl2x_r80_by_r80_intel;
    838 FNIEMAIMPLFPUR80            iemAImpl_fyl2xp1_r80_by_r80, iemAImpl_fyl2xp1_r80_by_r80_amd, iemAImpl_fyl2xp1_r80_by_r80_intel;
    839 
    840 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUR80FSW,(PCX86FXSTATE pFpuState, uint16_t *pFSW,
    841                                                       PCRTFLOAT80U pr80Val1, PCRTFLOAT80U pr80Val2));
    842 typedef FNIEMAIMPLFPUR80FSW *PFNIEMAIMPLFPUR80FSW;
    843 FNIEMAIMPLFPUR80FSW         iemAImpl_fcom_r80_by_r80;
    844 FNIEMAIMPLFPUR80FSW         iemAImpl_fucom_r80_by_r80;
    845 
    846 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLFPUR80EFL,(PCX86FXSTATE pFpuState, uint16_t *pu16Fsw,
    847                                                           PCRTFLOAT80U pr80Val1, PCRTFLOAT80U pr80Val2));
    848 typedef FNIEMAIMPLFPUR80EFL *PFNIEMAIMPLFPUR80EFL;
    849 FNIEMAIMPLFPUR80EFL         iemAImpl_fcomi_r80_by_r80;
    850 FNIEMAIMPLFPUR80EFL         iemAImpl_fucomi_r80_by_r80;
    851 
    852 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUR80UNARY,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes, PCRTFLOAT80U pr80Val));
    853 typedef FNIEMAIMPLFPUR80UNARY *PFNIEMAIMPLFPUR80UNARY;
    854 FNIEMAIMPLFPUR80UNARY       iemAImpl_fabs_r80;
    855 FNIEMAIMPLFPUR80UNARY       iemAImpl_fchs_r80;
    856 FNIEMAIMPLFPUR80UNARY       iemAImpl_f2xm1_r80, iemAImpl_f2xm1_r80_amd, iemAImpl_f2xm1_r80_intel;
    857 FNIEMAIMPLFPUR80UNARY       iemAImpl_fsqrt_r80;
    858 FNIEMAIMPLFPUR80UNARY       iemAImpl_frndint_r80;
    859 FNIEMAIMPLFPUR80UNARY       iemAImpl_fsin_r80, iemAImpl_fsin_r80_amd, iemAImpl_fsin_r80_intel;
    860 FNIEMAIMPLFPUR80UNARY       iemAImpl_fcos_r80, iemAImpl_fcos_r80_amd, iemAImpl_fcos_r80_intel;
    861 
    862 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUR80UNARYFSW,(PCX86FXSTATE pFpuState, uint16_t *pu16Fsw, PCRTFLOAT80U pr80Val));
    863 typedef FNIEMAIMPLFPUR80UNARYFSW *PFNIEMAIMPLFPUR80UNARYFSW;
    864 FNIEMAIMPLFPUR80UNARYFSW    iemAImpl_ftst_r80;
    865 FNIEMAIMPLFPUR80UNARYFSW    iemAImpl_fxam_r80;
    866 
    867 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUR80LDCONST,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes));
    868 typedef FNIEMAIMPLFPUR80LDCONST *PFNIEMAIMPLFPUR80LDCONST;
    869 FNIEMAIMPLFPUR80LDCONST     iemAImpl_fld1;
    870 FNIEMAIMPLFPUR80LDCONST     iemAImpl_fldl2t;
    871 FNIEMAIMPLFPUR80LDCONST     iemAImpl_fldl2e;
    872 FNIEMAIMPLFPUR80LDCONST     iemAImpl_fldpi;
    873 FNIEMAIMPLFPUR80LDCONST     iemAImpl_fldlg2;
    874 FNIEMAIMPLFPUR80LDCONST     iemAImpl_fldln2;
    875 FNIEMAIMPLFPUR80LDCONST     iemAImpl_fldz;
    876 
    877 /**
    878  * A FPU result consisting of two output values and FSW.
    879  * @note x86 specific
    880  */
    881 typedef struct IEMFPURESULTTWO
    882 {
    883     /** The first output value. */
    884     RTFLOAT80U      r80Result1;
    885     /** The output status. */
    886     uint16_t        FSW;
    887     /** The second output value. */
    888     RTFLOAT80U      r80Result2;
    889 } IEMFPURESULTTWO;
    890 AssertCompileMemberOffset(IEMFPURESULTTWO, FSW, 10);
    891 AssertCompileMemberOffset(IEMFPURESULTTWO, r80Result2, 12);
    892 /** Pointer to a FPU result consisting of two output values and FSW. */
    893 typedef IEMFPURESULTTWO *PIEMFPURESULTTWO;
    894 /** Pointer to a const FPU result consisting of two output values and FSW. */
    895 typedef IEMFPURESULTTWO const *PCIEMFPURESULTTWO;
    896 
    897 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUR80UNARYTWO,(PCX86FXSTATE pFpuState, PIEMFPURESULTTWO pFpuResTwo,
    898                                                            PCRTFLOAT80U pr80Val));
    899 typedef FNIEMAIMPLFPUR80UNARYTWO *PFNIEMAIMPLFPUR80UNARYTWO;
    900 FNIEMAIMPLFPUR80UNARYTWO    iemAImpl_fptan_r80_r80, iemAImpl_fptan_r80_r80_amd, iemAImpl_fptan_r80_r80_intel;
    901 FNIEMAIMPLFPUR80UNARYTWO    iemAImpl_fxtract_r80_r80;
    902 FNIEMAIMPLFPUR80UNARYTWO    iemAImpl_fsincos_r80_r80, iemAImpl_fsincos_r80_r80_amd, iemAImpl_fsincos_r80_r80_intel;
    903 
    904 IEM_DECL_IMPL_DEF(void, iemAImpl_fld_r80_from_r80,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes, PCRTFLOAT80U pr80Val));
    905 IEM_DECL_IMPL_DEF(void, iemAImpl_fst_r80_to_r80,(PCX86FXSTATE pFpuState, uint16_t *pu16FSW,
    906                                                  PRTFLOAT80U pr80Dst, PCRTFLOAT80U pr80Src));
    907 
    908 IEM_DECL_IMPL_DEF(void, iemAImpl_fld_r80_from_d80,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes, PCRTPBCD80U pd80Val));
    909 IEM_DECL_IMPL_DEF(void, iemAImpl_fst_r80_to_d80,(PCX86FXSTATE pFpuState, uint16_t *pu16FSW,
    910                                                  PRTPBCD80U pd80Dst, PCRTFLOAT80U pr80Src));
    911 
    912 /** @} */
    913 
    914 /** @name FPU operations taking a 16-bit signed integer argument
    915  * @{  */
    916 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUI16,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes,
    917                                                    PCRTFLOAT80U pr80Val1, int16_t const *pi16Val2));
    918 typedef FNIEMAIMPLFPUI16 *PFNIEMAIMPLFPUI16;
    919 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUSTR80TOI16,(PCX86FXSTATE pFpuState, uint16_t *pFpuRes,
    920                                                           int16_t *pi16Dst, PCRTFLOAT80U pr80Src));
    921 typedef FNIEMAIMPLFPUSTR80TOI16 *PFNIEMAIMPLFPUSTR80TOI16;
    922 
    923 FNIEMAIMPLFPUI16    iemAImpl_fiadd_r80_by_i16;
    924 FNIEMAIMPLFPUI16    iemAImpl_fimul_r80_by_i16;
    925 FNIEMAIMPLFPUI16    iemAImpl_fisub_r80_by_i16;
    926 FNIEMAIMPLFPUI16    iemAImpl_fisubr_r80_by_i16;
    927 FNIEMAIMPLFPUI16    iemAImpl_fidiv_r80_by_i16;
    928 FNIEMAIMPLFPUI16    iemAImpl_fidivr_r80_by_i16;
    929 
    930 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUI16FSW,(PCX86FXSTATE pFpuState, uint16_t *pFSW,
    931                                                       PCRTFLOAT80U pr80Val1, int16_t const *pi16Val2));
    932 typedef FNIEMAIMPLFPUI16FSW *PFNIEMAIMPLFPUI16FSW;
    933 FNIEMAIMPLFPUI16FSW     iemAImpl_ficom_r80_by_i16;
    934 
    935 IEM_DECL_IMPL_DEF(void, iemAImpl_fild_r80_from_i16,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes, int16_t const *pi16Val));
    936 FNIEMAIMPLFPUSTR80TOI16 iemAImpl_fist_r80_to_i16;
    937 FNIEMAIMPLFPUSTR80TOI16 iemAImpl_fistt_r80_to_i16, iemAImpl_fistt_r80_to_i16_amd, iemAImpl_fistt_r80_to_i16_intel;
    938 /** @}  */
    939 
    940 /** @name FPU operations taking a 32-bit signed integer argument
    941  * @{  */
    942 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUI32,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes,
    943                                                    PCRTFLOAT80U pr80Val1, int32_t const *pi32Val2));
    944 typedef FNIEMAIMPLFPUI32 *PFNIEMAIMPLFPUI32;
    945 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUSTR80TOI32,(PCX86FXSTATE pFpuState, uint16_t *pFpuRes,
    946                                                           int32_t *pi32Dst, PCRTFLOAT80U pr80Src));
    947 typedef FNIEMAIMPLFPUSTR80TOI32 *PFNIEMAIMPLFPUSTR80TOI32;
    948 
    949 FNIEMAIMPLFPUI32    iemAImpl_fiadd_r80_by_i32;
    950 FNIEMAIMPLFPUI32    iemAImpl_fimul_r80_by_i32;
    951 FNIEMAIMPLFPUI32    iemAImpl_fisub_r80_by_i32;
    952 FNIEMAIMPLFPUI32    iemAImpl_fisubr_r80_by_i32;
    953 FNIEMAIMPLFPUI32    iemAImpl_fidiv_r80_by_i32;
    954 FNIEMAIMPLFPUI32    iemAImpl_fidivr_r80_by_i32;
    955 
    956 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUI32FSW,(PCX86FXSTATE pFpuState, uint16_t *pFSW,
    957                                                       PCRTFLOAT80U pr80Val1, int32_t const *pi32Val2));
    958 typedef FNIEMAIMPLFPUI32FSW *PFNIEMAIMPLFPUI32FSW;
    959 FNIEMAIMPLFPUI32FSW     iemAImpl_ficom_r80_by_i32;
    960 
    961 IEM_DECL_IMPL_DEF(void, iemAImpl_fild_r80_from_i32,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes, int32_t const *pi32Val));
    962 FNIEMAIMPLFPUSTR80TOI32 iemAImpl_fist_r80_to_i32;
    963 FNIEMAIMPLFPUSTR80TOI32 iemAImpl_fistt_r80_to_i32;
    964 /** @}  */
    965 
    966 /** @name FPU operations taking a 64-bit signed integer argument
    967  * @{  */
    968 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLFPUSTR80TOI64,(PCX86FXSTATE pFpuState, uint16_t *pFpuRes,
    969                                                           int64_t *pi64Dst, PCRTFLOAT80U pr80Src));
    970 typedef FNIEMAIMPLFPUSTR80TOI64 *PFNIEMAIMPLFPUSTR80TOI64;
    971 
    972 IEM_DECL_IMPL_DEF(void, iemAImpl_fild_r80_from_i64,(PCX86FXSTATE pFpuState, PIEMFPURESULT pFpuRes, int64_t const *pi64Val));
    973 FNIEMAIMPLFPUSTR80TOI64 iemAImpl_fist_r80_to_i64;
    974 FNIEMAIMPLFPUSTR80TOI64 iemAImpl_fistt_r80_to_i64;
    975 /** @} */
    976 
    977 
    978 /** Temporary type representing a 256-bit vector register. */
    979 typedef struct { uint64_t au64[4]; } IEMVMM256;
    980 /** Temporary type pointing to a 256-bit vector register. */
    981 typedef IEMVMM256 *PIEMVMM256;
    982 /** Temporary type pointing to a const 256-bit vector register. */
    983 typedef IEMVMM256 *PCIEMVMM256;
    984 
    985 
    986 /** @name Media (SSE/MMX/AVX) operations: full1 + full2 -> full1.
    987  * @{ */
    988 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAF2U64,(PCX86FXSTATE pFpuState, uint64_t *puDst, uint64_t const *puSrc));
    989 typedef FNIEMAIMPLMEDIAF2U64   *PFNIEMAIMPLMEDIAF2U64;
    990 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMEDIAF2U128,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86XMMREG puSrc));
    991 typedef FNIEMAIMPLMEDIAF2U128  *PFNIEMAIMPLMEDIAF2U128;
    992 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMEDIAF2U256,(uint32_t uMxCsrIn, PX86YMMREG puDst, PCX86YMMREG puSrc));
    993 typedef FNIEMAIMPLMEDIAF2U256  *PFNIEMAIMPLMEDIAF2U256;
    994 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMEDIAF3U128,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86XMMREG puSrc1, PCX86XMMREG puSrc2));
    995 typedef FNIEMAIMPLMEDIAF3U128  *PFNIEMAIMPLMEDIAF3U128;
    996 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMEDIAF3U256,(uint32_t uMxCsrIn, PX86YMMREG puDst, PCX86YMMREG puSrc1, PCX86YMMREG puSrc2));
    997 typedef FNIEMAIMPLMEDIAF3U256  *PFNIEMAIMPLMEDIAF3U256;
    998 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAOPTF2U64,(uint64_t *puDst, uint64_t const *puSrc));
    999 typedef FNIEMAIMPLMEDIAOPTF2U64   *PFNIEMAIMPLMEDIAOPTF2U64;
    1000 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAOPTF2U128,(PRTUINT128U puDst, PCRTUINT128U puSrc));
    1001 typedef FNIEMAIMPLMEDIAOPTF2U128  *PFNIEMAIMPLMEDIAOPTF2U128;
    1002 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAOPTF3U128,(PRTUINT128U puDst, PCRTUINT128U puSrc1, PCRTUINT128U puSrc2));
    1003 typedef FNIEMAIMPLMEDIAOPTF3U128  *PFNIEMAIMPLMEDIAOPTF3U128;
    1004 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAOPTF3U256,(PRTUINT256U puDst, PCRTUINT256U puSrc1, PCRTUINT256U puSrc2));
    1005 typedef FNIEMAIMPLMEDIAOPTF3U256  *PFNIEMAIMPLMEDIAOPTF3U256;
    1006 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAOPTF2U256,(PRTUINT256U puDst, PCRTUINT256U puSrc));
    1007 typedef FNIEMAIMPLMEDIAOPTF2U256  *PFNIEMAIMPLMEDIAOPTF2U256;
    1008 FNIEMAIMPLMEDIAOPTF2U64  iemAImpl_pshufb_u64, iemAImpl_pshufb_u64_fallback;
    1009 FNIEMAIMPLMEDIAOPTF2U64  iemAImpl_pand_u64, iemAImpl_pandn_u64, iemAImpl_por_u64, iemAImpl_pxor_u64;
    1010 FNIEMAIMPLMEDIAOPTF2U64  iemAImpl_pcmpeqb_u64,  iemAImpl_pcmpeqw_u64,  iemAImpl_pcmpeqd_u64;
    1011 FNIEMAIMPLMEDIAOPTF2U64  iemAImpl_pcmpgtb_u64,  iemAImpl_pcmpgtw_u64,  iemAImpl_pcmpgtd_u64;
    1012 FNIEMAIMPLMEDIAOPTF2U64  iemAImpl_paddb_u64, iemAImpl_paddsb_u64, iemAImpl_paddusb_u64;
    1013 FNIEMAIMPLMEDIAOPTF2U64  iemAImpl_paddw_u64, iemAImpl_paddsw_u64, iemAImpl_paddusw_u64;
    1014 FNIEMAIMPLMEDIAOPTF2U64  iemAImpl_paddd_u64;
    1015 FNIEMAIMPLMEDIAOPTF2U64  iemAImpl_paddq_u64;
    1016 FNIEMAIMPLMEDIAOPTF2U64  iemAImpl_psubb_u64, iemAImpl_psubsb_u64, iemAImpl_psubusb_u64;
    1017 FNIEMAIMPLMEDIAOPTF2U64  iemAImpl_psubw_u64, iemAImpl_psubsw_u64, iemAImpl_psubusw_u64;
    1018 FNIEMAIMPLMEDIAOPTF2U64  iemAImpl_psubd_u64;
    1019 FNIEMAIMPLMEDIAOPTF2U64  iemAImpl_psubq_u64;
    1020 FNIEMAIMPLMEDIAOPTF2U64  iemAImpl_pmaddwd_u64, iemAImpl_pmaddwd_u64_fallback;
    1021 FNIEMAIMPLMEDIAOPTF2U64  iemAImpl_pmullw_u64, iemAImpl_pmulhw_u64;
    1022 FNIEMAIMPLMEDIAOPTF2U64  iemAImpl_pminub_u64, iemAImpl_pmaxub_u64;
    1023 FNIEMAIMPLMEDIAOPTF2U64  iemAImpl_pminsw_u64, iemAImpl_pmaxsw_u64;
    1024 FNIEMAIMPLMEDIAOPTF2U64  iemAImpl_pabsb_u64, iemAImpl_pabsb_u64_fallback;
    1025 FNIEMAIMPLMEDIAOPTF2U64  iemAImpl_pabsw_u64, iemAImpl_pabsw_u64_fallback;
    1026 FNIEMAIMPLMEDIAOPTF2U64  iemAImpl_pabsd_u64, iemAImpl_pabsd_u64_fallback;
    1027 FNIEMAIMPLMEDIAOPTF2U64  iemAImpl_psignb_u64, iemAImpl_psignb_u64_fallback;
    1028 FNIEMAIMPLMEDIAOPTF2U64  iemAImpl_psignw_u64, iemAImpl_psignw_u64_fallback;
    1029 FNIEMAIMPLMEDIAOPTF2U64  iemAImpl_psignd_u64, iemAImpl_psignd_u64_fallback;
    1030 FNIEMAIMPLMEDIAOPTF2U64  iemAImpl_phaddw_u64, iemAImpl_phaddw_u64_fallback;
    1031 FNIEMAIMPLMEDIAOPTF2U64  iemAImpl_phaddd_u64, iemAImpl_phaddd_u64_fallback;
    1032 FNIEMAIMPLMEDIAOPTF2U64  iemAImpl_phsubw_u64, iemAImpl_phsubw_u64_fallback;
    1033 FNIEMAIMPLMEDIAOPTF2U64  iemAImpl_phsubd_u64, iemAImpl_phsubd_u64_fallback;
    1034 FNIEMAIMPLMEDIAOPTF2U64  iemAImpl_phaddsw_u64, iemAImpl_phaddsw_u64_fallback;
    1035 FNIEMAIMPLMEDIAOPTF2U64  iemAImpl_phsubsw_u64, iemAImpl_phsubsw_u64_fallback;
    1036 FNIEMAIMPLMEDIAOPTF2U64  iemAImpl_pmaddubsw_u64, iemAImpl_pmaddubsw_u64_fallback;
    1037 FNIEMAIMPLMEDIAOPTF2U64  iemAImpl_pmulhrsw_u64, iemAImpl_pmulhrsw_u64_fallback;
    1038 FNIEMAIMPLMEDIAOPTF2U64  iemAImpl_pmuludq_u64;
    1039 FNIEMAIMPLMEDIAOPTF2U64  iemAImpl_psllw_u64, iemAImpl_psrlw_u64, iemAImpl_psraw_u64;
    1040 FNIEMAIMPLMEDIAOPTF2U64  iemAImpl_pslld_u64, iemAImpl_psrld_u64, iemAImpl_psrad_u64;
    1041 FNIEMAIMPLMEDIAOPTF2U64  iemAImpl_psllq_u64, iemAImpl_psrlq_u64;
    1042 FNIEMAIMPLMEDIAOPTF2U64  iemAImpl_packsswb_u64, iemAImpl_packuswb_u64;
    1043 FNIEMAIMPLMEDIAOPTF2U64  iemAImpl_packssdw_u64;
    1044 FNIEMAIMPLMEDIAOPTF2U64  iemAImpl_pmulhuw_u64;
    1045 FNIEMAIMPLMEDIAOPTF2U64  iemAImpl_pavgb_u64, iemAImpl_pavgw_u64;
    1046 FNIEMAIMPLMEDIAOPTF2U64  iemAImpl_psadbw_u64;
    1047 
    1048 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pshufb_u128, iemAImpl_pshufb_u128_fallback;
    1049 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pand_u128, iemAImpl_pandn_u128, iemAImpl_por_u128, iemAImpl_pxor_u128;
    1050 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pcmpeqb_u128, iemAImpl_pcmpeqw_u128, iemAImpl_pcmpeqd_u128;
    1051 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pcmpeqq_u128, iemAImpl_pcmpeqq_u128_fallback;
    1052 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pcmpgtb_u128, iemAImpl_pcmpgtw_u128, iemAImpl_pcmpgtd_u128;
    1053 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pcmpgtq_u128, iemAImpl_pcmpgtq_u128_fallback;
    1054 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_paddb_u128, iemAImpl_paddsb_u128, iemAImpl_paddusb_u128;
    1055 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_paddw_u128, iemAImpl_paddsw_u128, iemAImpl_paddusw_u128;
    1056 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_paddd_u128;
    1057 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_paddq_u128;
    1058 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_psubb_u128, iemAImpl_psubsb_u128, iemAImpl_psubusb_u128;
    1059 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_psubw_u128, iemAImpl_psubsw_u128, iemAImpl_psubusw_u128;
    1060 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_psubd_u128;
    1061 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_psubq_u128;
    1062 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmullw_u128, iemAImpl_pmullw_u128_fallback;
    1063 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmulhw_u128;
    1064 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmulld_u128, iemAImpl_pmulld_u128_fallback;
    1065 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmaddwd_u128, iemAImpl_pmaddwd_u128_fallback;
    1066 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pminub_u128;
    1067 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pminud_u128, iemAImpl_pminud_u128_fallback;
    1068 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pminuw_u128, iemAImpl_pminuw_u128_fallback;
    1069 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pminsb_u128, iemAImpl_pminsb_u128_fallback;
    1070 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pminsd_u128, iemAImpl_pminsd_u128_fallback;
    1071 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pminsw_u128, iemAImpl_pminsw_u128_fallback;
    1072 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmaxub_u128;
    1073 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmaxud_u128, iemAImpl_pmaxud_u128_fallback;
    1074 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmaxuw_u128, iemAImpl_pmaxuw_u128_fallback;
    1075 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmaxsb_u128, iemAImpl_pmaxsb_u128_fallback;
    1076 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmaxsw_u128;
    1077 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmaxsd_u128, iemAImpl_pmaxsd_u128_fallback;
    1078 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pabsb_u128, iemAImpl_pabsb_u128_fallback;
    1079 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pabsw_u128, iemAImpl_pabsw_u128_fallback;
    1080 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pabsd_u128, iemAImpl_pabsd_u128_fallback;
    1081 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_psignb_u128, iemAImpl_psignb_u128_fallback;
    1082 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_psignw_u128, iemAImpl_psignw_u128_fallback;
    1083 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_psignd_u128, iemAImpl_psignd_u128_fallback;
    1084 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_phaddw_u128, iemAImpl_phaddw_u128_fallback;
    1085 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_phaddd_u128, iemAImpl_phaddd_u128_fallback;
    1086 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_phsubw_u128, iemAImpl_phsubw_u128_fallback;
    1087 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_phsubd_u128, iemAImpl_phsubd_u128_fallback;
    1088 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_phaddsw_u128, iemAImpl_phaddsw_u128_fallback;
    1089 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_phsubsw_u128, iemAImpl_phsubsw_u128_fallback;
    1090 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmaddubsw_u128, iemAImpl_pmaddubsw_u128_fallback;
    1091 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmulhrsw_u128, iemAImpl_pmulhrsw_u128_fallback;
    1092 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmuludq_u128;
    1093 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmaddwd_u128, iemAImpl_pmaddwd_u128_fallback;
    1094 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_packsswb_u128, iemAImpl_packuswb_u128;
    1095 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_packssdw_u128, iemAImpl_packusdw_u128;
    1096 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_psllw_u128, iemAImpl_psrlw_u128, iemAImpl_psraw_u128;
    1097 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pslld_u128, iemAImpl_psrld_u128, iemAImpl_psrad_u128;
    1098 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_psllq_u128, iemAImpl_psrlq_u128;
    1099 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmulhuw_u128;
    1100 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pavgb_u128, iemAImpl_pavgw_u128;
    1101 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_psadbw_u128;
    1102 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_pmuldq_u128, iemAImpl_pmuldq_u128_fallback;
    1103 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_unpcklps_u128, iemAImpl_unpcklpd_u128;
    1104 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_unpckhps_u128, iemAImpl_unpckhpd_u128;
    1105 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_phminposuw_u128, iemAImpl_phminposuw_u128_fallback;
    1106 
    1107 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpshufb_u128,    iemAImpl_vpshufb_u128_fallback;
    1108 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpand_u128,      iemAImpl_vpand_u128_fallback;
    1109 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpandn_u128,     iemAImpl_vpandn_u128_fallback;
    1110 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpor_u128,       iemAImpl_vpor_u128_fallback;
    1111 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpxor_u128,      iemAImpl_vpxor_u128_fallback;
    1112 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpcmpeqb_u128,   iemAImpl_vpcmpeqb_u128_fallback;
    1113 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpcmpeqw_u128,   iemAImpl_vpcmpeqw_u128_fallback;
    1114 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpcmpeqd_u128,   iemAImpl_vpcmpeqd_u128_fallback;
    1115 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpcmpeqq_u128,   iemAImpl_vpcmpeqq_u128_fallback;
    1116 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpcmpgtb_u128,   iemAImpl_vpcmpgtb_u128_fallback;
    1117 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpcmpgtw_u128,   iemAImpl_vpcmpgtw_u128_fallback;
    1118 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpcmpgtd_u128,   iemAImpl_vpcmpgtd_u128_fallback;
    1119 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpcmpgtq_u128,   iemAImpl_vpcmpgtq_u128_fallback;
    1120 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpaddb_u128,     iemAImpl_vpaddb_u128_fallback;
    1121 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpaddw_u128,     iemAImpl_vpaddw_u128_fallback;
    1122 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpaddd_u128,     iemAImpl_vpaddd_u128_fallback;
    1123 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpaddq_u128,     iemAImpl_vpaddq_u128_fallback;
    1124 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsubb_u128,     iemAImpl_vpsubb_u128_fallback;
    1125 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsubw_u128,     iemAImpl_vpsubw_u128_fallback;
    1126 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsubd_u128,     iemAImpl_vpsubd_u128_fallback;
    1127 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsubq_u128,     iemAImpl_vpsubq_u128_fallback;
    1128 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpminub_u128,    iemAImpl_vpminub_u128_fallback;
    1129 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpminuw_u128,    iemAImpl_vpminuw_u128_fallback;
    1130 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpminud_u128,    iemAImpl_vpminud_u128_fallback;
    1131 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpminsb_u128,    iemAImpl_vpminsb_u128_fallback;
    1132 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpminsw_u128,    iemAImpl_vpminsw_u128_fallback;
    1133 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpminsd_u128,    iemAImpl_vpminsd_u128_fallback;
    1134 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmaxub_u128,    iemAImpl_vpmaxub_u128_fallback;
    1135 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmaxuw_u128,    iemAImpl_vpmaxuw_u128_fallback;
    1136 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmaxud_u128,    iemAImpl_vpmaxud_u128_fallback;
    1137 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmaxsb_u128,    iemAImpl_vpmaxsb_u128_fallback;
    1138 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmaxsw_u128,    iemAImpl_vpmaxsw_u128_fallback;
    1139 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmaxsd_u128,    iemAImpl_vpmaxsd_u128_fallback;
    1140 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpacksswb_u128,  iemAImpl_vpacksswb_u128_fallback;
    1141 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpackssdw_u128,  iemAImpl_vpackssdw_u128_fallback;
    1142 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpackuswb_u128,  iemAImpl_vpackuswb_u128_fallback;
    1143 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpackusdw_u128,  iemAImpl_vpackusdw_u128_fallback;
    1144 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmullw_u128,    iemAImpl_vpmullw_u128_fallback;
    1145 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmulld_u128,    iemAImpl_vpmulld_u128_fallback;
    1146 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmulhw_u128,    iemAImpl_vpmulhw_u128_fallback;
    1147 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmulhuw_u128,   iemAImpl_vpmulhuw_u128_fallback;
    1148 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpavgb_u128,     iemAImpl_vpavgb_u128_fallback;
    1149 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpavgw_u128,     iemAImpl_vpavgw_u128_fallback;
    1150 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsignb_u128,    iemAImpl_vpsignb_u128_fallback;
    1151 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsignw_u128,    iemAImpl_vpsignw_u128_fallback;
    1152 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsignd_u128,    iemAImpl_vpsignd_u128_fallback;
    1153 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vphaddw_u128,    iemAImpl_vphaddw_u128_fallback;
    1154 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vphaddd_u128,    iemAImpl_vphaddd_u128_fallback;
    1155 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vphsubw_u128,    iemAImpl_vphsubw_u128_fallback;
    1156 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vphsubd_u128,    iemAImpl_vphsubd_u128_fallback;
    1157 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vphaddsw_u128,   iemAImpl_vphaddsw_u128_fallback;
    1158 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vphsubsw_u128,   iemAImpl_vphsubsw_u128_fallback;
    1159 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmaddubsw_u128, iemAImpl_vpmaddubsw_u128_fallback;
    1160 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmulhrsw_u128,  iemAImpl_vpmulhrsw_u128_fallback;
    1161 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsadbw_u128,    iemAImpl_vpsadbw_u128_fallback;
    1162 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmuldq_u128,    iemAImpl_vpmuldq_u128_fallback;
    1163 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmuludq_u128,   iemAImpl_vpmuludq_u128_fallback;
    1164 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsubsb_u128,    iemAImpl_vpsubsb_u128_fallback;
    1165 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsubsw_u128,    iemAImpl_vpsubsw_u128_fallback;
    1166 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsubusb_u128,   iemAImpl_vpsubusb_u128_fallback;
    1167 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsubusw_u128,   iemAImpl_vpsubusw_u128_fallback;
    1168 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpaddusb_u128,   iemAImpl_vpaddusb_u128_fallback;
    1169 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpaddusw_u128,   iemAImpl_vpaddusw_u128_fallback;
    1170 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpaddsb_u128,    iemAImpl_vpaddsb_u128_fallback;
    1171 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpaddsw_u128,    iemAImpl_vpaddsw_u128_fallback;
    1172 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsllw_u128,     iemAImpl_vpsllw_u128_fallback;
    1173 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpslld_u128,     iemAImpl_vpslld_u128_fallback;
    1174 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsllq_u128,     iemAImpl_vpsllq_u128_fallback;
    1175 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsraw_u128,     iemAImpl_vpsraw_u128_fallback;
    1176 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsrad_u128,     iemAImpl_vpsrad_u128_fallback;
    1177 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsrlw_u128,     iemAImpl_vpsrlw_u128_fallback;
    1178 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsrld_u128,     iemAImpl_vpsrld_u128_fallback;
    1179 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpsrlq_u128,     iemAImpl_vpsrlq_u128_fallback;
    1180 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vpmaddwd_u128, iemAImpl_vpmaddwd_u128_fallback;
    1181 
    1182 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_vpabsb_u128,     iemAImpl_vpabsb_u128_fallback;
    1183 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_vpabsw_u128,     iemAImpl_vpabsd_u128_fallback;
    1184 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_vpabsd_u128,     iemAImpl_vpabsw_u128_fallback;
    1185 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_vphminposuw_u128, iemAImpl_vphminposuw_u128_fallback;
    1186 
    1187 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpshufb_u256,    iemAImpl_vpshufb_u256_fallback;
    1188 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpand_u256,      iemAImpl_vpand_u256_fallback;
    1189 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpandn_u256,     iemAImpl_vpandn_u256_fallback;
    1190 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpor_u256,       iemAImpl_vpor_u256_fallback;
    1191 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpxor_u256,      iemAImpl_vpxor_u256_fallback;
    1192 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpcmpeqb_u256,   iemAImpl_vpcmpeqb_u256_fallback;
    1193 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpcmpeqw_u256,   iemAImpl_vpcmpeqw_u256_fallback;
    1194 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpcmpeqd_u256,   iemAImpl_vpcmpeqd_u256_fallback;
    1195 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpcmpeqq_u256,   iemAImpl_vpcmpeqq_u256_fallback;
    1196 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpcmpgtb_u256,   iemAImpl_vpcmpgtb_u256_fallback;
    1197 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpcmpgtw_u256,   iemAImpl_vpcmpgtw_u256_fallback;
    1198 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpcmpgtd_u256,   iemAImpl_vpcmpgtd_u256_fallback;
    1199 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpcmpgtq_u256,   iemAImpl_vpcmpgtq_u256_fallback;
    1200 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpaddb_u256,     iemAImpl_vpaddb_u256_fallback;
    1201 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpaddw_u256,     iemAImpl_vpaddw_u256_fallback;
    1202 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpaddd_u256,     iemAImpl_vpaddd_u256_fallback;
    1203 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpaddq_u256,     iemAImpl_vpaddq_u256_fallback;
    1204 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsubb_u256,     iemAImpl_vpsubb_u256_fallback;
    1205 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsubw_u256,     iemAImpl_vpsubw_u256_fallback;
    1206 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsubd_u256,     iemAImpl_vpsubd_u256_fallback;
    1207 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsubq_u256,     iemAImpl_vpsubq_u256_fallback;
    1208 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpminub_u256,    iemAImpl_vpminub_u256_fallback;
    1209 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpminuw_u256,    iemAImpl_vpminuw_u256_fallback;
    1210 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpminud_u256,    iemAImpl_vpminud_u256_fallback;
    1211 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpminsb_u256,    iemAImpl_vpminsb_u256_fallback;
    1212 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpminsw_u256,    iemAImpl_vpminsw_u256_fallback;
    1213 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpminsd_u256,    iemAImpl_vpminsd_u256_fallback;
    1214 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmaxub_u256,    iemAImpl_vpmaxub_u256_fallback;
    1215 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmaxuw_u256,    iemAImpl_vpmaxuw_u256_fallback;
    1216 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmaxud_u256,    iemAImpl_vpmaxud_u256_fallback;
    1217 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmaxsb_u256,    iemAImpl_vpmaxsb_u256_fallback;
    1218 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmaxsw_u256,    iemAImpl_vpmaxsw_u256_fallback;
    1219 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmaxsd_u256,    iemAImpl_vpmaxsd_u256_fallback;
    1220 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpacksswb_u256,  iemAImpl_vpacksswb_u256_fallback;
    1221 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpackssdw_u256,  iemAImpl_vpackssdw_u256_fallback;
    1222 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpackuswb_u256,  iemAImpl_vpackuswb_u256_fallback;
    1223 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpackusdw_u256,  iemAImpl_vpackusdw_u256_fallback;
    1224 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmullw_u256,    iemAImpl_vpmullw_u256_fallback;
    1225 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmulld_u256,    iemAImpl_vpmulld_u256_fallback;
    1226 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmulhw_u256,    iemAImpl_vpmulhw_u256_fallback;
    1227 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmulhuw_u256,   iemAImpl_vpmulhuw_u256_fallback;
    1228 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpavgb_u256,     iemAImpl_vpavgb_u256_fallback;
    1229 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpavgw_u256,     iemAImpl_vpavgw_u256_fallback;
    1230 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsignb_u256,    iemAImpl_vpsignb_u256_fallback;
    1231 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsignw_u256,    iemAImpl_vpsignw_u256_fallback;
    1232 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsignd_u256,    iemAImpl_vpsignd_u256_fallback;
    1233 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vphaddw_u256,    iemAImpl_vphaddw_u256_fallback;
    1234 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vphaddd_u256,    iemAImpl_vphaddd_u256_fallback;
    1235 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vphsubw_u256,    iemAImpl_vphsubw_u256_fallback;
    1236 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vphsubd_u256,    iemAImpl_vphsubd_u256_fallback;
    1237 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vphaddsw_u256,   iemAImpl_vphaddsw_u256_fallback;
    1238 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vphsubsw_u256,   iemAImpl_vphsubsw_u256_fallback;
    1239 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmaddubsw_u256, iemAImpl_vpmaddubsw_u256_fallback;
    1240 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmulhrsw_u256,  iemAImpl_vpmulhrsw_u256_fallback;
    1241 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsadbw_u256,    iemAImpl_vpsadbw_u256_fallback;
    1242 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmuldq_u256,    iemAImpl_vpmuldq_u256_fallback;
    1243 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmuludq_u256,   iemAImpl_vpmuludq_u256_fallback;
    1244 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsubsb_u256,    iemAImpl_vpsubsb_u256_fallback;
    1245 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsubsw_u256,    iemAImpl_vpsubsw_u256_fallback;
    1246 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsubusb_u256,   iemAImpl_vpsubusb_u256_fallback;
    1247 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsubusw_u256,   iemAImpl_vpsubusw_u256_fallback;
    1248 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpaddusb_u256,   iemAImpl_vpaddusb_u256_fallback;
    1249 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpaddusw_u256,   iemAImpl_vpaddusw_u256_fallback;
    1250 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpaddsb_u256,    iemAImpl_vpaddsb_u256_fallback;
    1251 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpaddsw_u256,    iemAImpl_vpaddsw_u256_fallback;
    1252 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsllw_u256,     iemAImpl_vpsllw_u256_fallback;
    1253 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpslld_u256,     iemAImpl_vpslld_u256_fallback;
    1254 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsllq_u256,     iemAImpl_vpsllq_u256_fallback;
    1255 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsraw_u256,     iemAImpl_vpsraw_u256_fallback;
    1256 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsrad_u256,     iemAImpl_vpsrad_u256_fallback;
    1257 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsrlw_u256,     iemAImpl_vpsrlw_u256_fallback;
    1258 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsrld_u256,     iemAImpl_vpsrld_u256_fallback;
    1259 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpsrlq_u256,     iemAImpl_vpsrlq_u256_fallback;
    1260 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpmaddwd_u256, iemAImpl_vpmaddwd_u256_fallback;
    1261 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpermps_u256,    iemAImpl_vpermps_u256_fallback;
    1262 FNIEMAIMPLMEDIAOPTF3U256 iemAImpl_vpermd_u256,     iemAImpl_vpermd_u256_fallback;
    1263 
    1264 FNIEMAIMPLMEDIAOPTF2U256 iemAImpl_vpabsb_u256,     iemAImpl_vpabsb_u256_fallback;
    1265 FNIEMAIMPLMEDIAOPTF2U256 iemAImpl_vpabsw_u256,     iemAImpl_vpabsw_u256_fallback;
    1266 FNIEMAIMPLMEDIAOPTF2U256 iemAImpl_vpabsd_u256,     iemAImpl_vpabsd_u256_fallback;
    1267 /** @} */
    1268 
    1269 /** @name Media (SSE/MMX/AVX) operations: lowhalf1 + lowhalf1 -> full1.
    1270  * @{ */
    1271 FNIEMAIMPLMEDIAOPTF2U64   iemAImpl_punpcklbw_u64,  iemAImpl_punpcklwd_u64,  iemAImpl_punpckldq_u64;
    1272 FNIEMAIMPLMEDIAOPTF2U128  iemAImpl_punpcklbw_u128, iemAImpl_punpcklwd_u128, iemAImpl_punpckldq_u128, iemAImpl_punpcklqdq_u128;
    1273 FNIEMAIMPLMEDIAOPTF3U128  iemAImpl_vpunpcklbw_u128,  iemAImpl_vpunpcklbw_u128_fallback,
    1274                           iemAImpl_vpunpcklwd_u128,  iemAImpl_vpunpcklwd_u128_fallback,
    1275                           iemAImpl_vpunpckldq_u128,  iemAImpl_vpunpckldq_u128_fallback,
    1276                           iemAImpl_vpunpcklqdq_u128, iemAImpl_vpunpcklqdq_u128_fallback,
    1277                           iemAImpl_vunpcklps_u128, iemAImpl_vunpcklps_u128_fallback,
    1278                           iemAImpl_vunpcklpd_u128, iemAImpl_vunpcklpd_u128_fallback,
    1279                           iemAImpl_vunpckhps_u128, iemAImpl_vunpckhps_u128_fallback,
    1280                           iemAImpl_vunpckhpd_u128, iemAImpl_vunpckhpd_u128_fallback;
    1281 
    1282 FNIEMAIMPLMEDIAOPTF3U256  iemAImpl_vpunpcklbw_u256,  iemAImpl_vpunpcklbw_u256_fallback,
    1283                           iemAImpl_vpunpcklwd_u256,  iemAImpl_vpunpcklwd_u256_fallback,
    1284                           iemAImpl_vpunpckldq_u256,  iemAImpl_vpunpckldq_u256_fallback,
    1285                           iemAImpl_vpunpcklqdq_u256, iemAImpl_vpunpcklqdq_u256_fallback,
    1286                           iemAImpl_vunpcklps_u256, iemAImpl_vunpcklps_u256_fallback,
    1287                           iemAImpl_vunpcklpd_u256, iemAImpl_vunpcklpd_u256_fallback,
    1288                           iemAImpl_vunpckhps_u256, iemAImpl_vunpckhps_u256_fallback,
    1289                           iemAImpl_vunpckhpd_u256, iemAImpl_vunpckhpd_u256_fallback;
    1290 /** @} */
    1291 
    1292 /** @name Media (SSE/MMX/AVX) operations: hihalf1 + hihalf2 -> full1.
    1293  * @{ */
    1294 FNIEMAIMPLMEDIAOPTF2U64   iemAImpl_punpckhbw_u64,  iemAImpl_punpckhwd_u64,  iemAImpl_punpckhdq_u64;
    1295 FNIEMAIMPLMEDIAOPTF2U128  iemAImpl_punpckhbw_u128, iemAImpl_punpckhwd_u128, iemAImpl_punpckhdq_u128, iemAImpl_punpckhqdq_u128;
    1296 FNIEMAIMPLMEDIAOPTF3U128  iemAImpl_vpunpckhbw_u128,  iemAImpl_vpunpckhbw_u128_fallback,
    1297                           iemAImpl_vpunpckhwd_u128,  iemAImpl_vpunpckhwd_u128_fallback,
    1298                           iemAImpl_vpunpckhdq_u128,  iemAImpl_vpunpckhdq_u128_fallback,
    1299                           iemAImpl_vpunpckhqdq_u128, iemAImpl_vpunpckhqdq_u128_fallback;
    1300 FNIEMAIMPLMEDIAOPTF3U256  iemAImpl_vpunpckhbw_u256,  iemAImpl_vpunpckhbw_u256_fallback,
    1301                           iemAImpl_vpunpckhwd_u256,  iemAImpl_vpunpckhwd_u256_fallback,
    1302                           iemAImpl_vpunpckhdq_u256,  iemAImpl_vpunpckhdq_u256_fallback,
    1303                           iemAImpl_vpunpckhqdq_u256, iemAImpl_vpunpckhqdq_u256_fallback;
    1304 /** @} */
    1305 
    1306 /** @name Media (SSE/MMX/AVX) operation: Packed Shuffle Stuff (evil)
    1307  * @{ */
    1308 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAPSHUFU128,(PRTUINT128U puDst, PCRTUINT128U puSrc, uint8_t bEvil));
    1309 typedef FNIEMAIMPLMEDIAPSHUFU128 *PFNIEMAIMPLMEDIAPSHUFU128;
    1310 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAPSHUFU256,(PRTUINT256U puDst, PCRTUINT256U puSrc, uint8_t bEvil));
    1311 typedef FNIEMAIMPLMEDIAPSHUFU256 *PFNIEMAIMPLMEDIAPSHUFU256;
    1312 IEM_DECL_IMPL_DEF(void, iemAImpl_pshufw_u64,(uint64_t *puDst, uint64_t const *puSrc, uint8_t bEvil));
    1313 FNIEMAIMPLMEDIAPSHUFU128 iemAImpl_pshufhw_u128, iemAImpl_pshuflw_u128, iemAImpl_pshufd_u128;
    1314 #ifndef IEM_WITHOUT_ASSEMBLY
    1315 FNIEMAIMPLMEDIAPSHUFU256 iemAImpl_vpshufhw_u256, iemAImpl_vpshuflw_u256, iemAImpl_vpshufd_u256;
    1316 #endif
    1317 FNIEMAIMPLMEDIAPSHUFU256 iemAImpl_vpshufhw_u256_fallback, iemAImpl_vpshuflw_u256_fallback, iemAImpl_vpshufd_u256_fallback;
    1318 /** @} */
    1319 
    1320 /** @name Media (SSE/MMX/AVX) operation: Shift Immediate Stuff (evil)
    1321  * @{ */
    1322 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAPSHIFTU64,(uint64_t *puDst, uint8_t bShift));
    1323 typedef FNIEMAIMPLMEDIAPSHIFTU64 *PFNIEMAIMPLMEDIAPSHIFTU64;
    1324 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAPSHIFTU128,(PRTUINT128U puDst, uint8_t bShift));
    1325 typedef FNIEMAIMPLMEDIAPSHIFTU128 *PFNIEMAIMPLMEDIAPSHIFTU128;
    1326 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAPSHIFTU256,(PRTUINT256U puDst, uint8_t bShift));
    1327 typedef FNIEMAIMPLMEDIAPSHIFTU256 *PFNIEMAIMPLMEDIAPSHIFTU256;
    1328 FNIEMAIMPLMEDIAPSHIFTU64  iemAImpl_psllw_imm_u64,  iemAImpl_pslld_imm_u64,  iemAImpl_psllq_imm_u64;
    1329 FNIEMAIMPLMEDIAPSHIFTU64  iemAImpl_psrlw_imm_u64,  iemAImpl_psrld_imm_u64,  iemAImpl_psrlq_imm_u64;
    1330 FNIEMAIMPLMEDIAPSHIFTU64  iemAImpl_psraw_imm_u64,  iemAImpl_psrad_imm_u64;
    1331 FNIEMAIMPLMEDIAPSHIFTU128 iemAImpl_psllw_imm_u128, iemAImpl_pslld_imm_u128, iemAImpl_psllq_imm_u128;
    1332 FNIEMAIMPLMEDIAPSHIFTU128 iemAImpl_psrlw_imm_u128, iemAImpl_psrld_imm_u128, iemAImpl_psrlq_imm_u128;
    1333 FNIEMAIMPLMEDIAPSHIFTU128 iemAImpl_psraw_imm_u128, iemAImpl_psrad_imm_u128;
    1334 FNIEMAIMPLMEDIAPSHIFTU128 iemAImpl_pslldq_imm_u128, iemAImpl_psrldq_imm_u128;
    1335 /** @} */
    1336 
    1337 /** @name Media (SSE/MMX/AVX) operation: Move Byte Mask
    1338  * @{ */
    1339 IEM_DECL_IMPL_DEF(void, iemAImpl_maskmovq_u64,(uint64_t *puMem, uint64_t const *puSrc, uint64_t const *puMsk));
    1340 IEM_DECL_IMPL_DEF(void, iemAImpl_maskmovdqu_u128,(PRTUINT128U puMem, PCRTUINT128U puSrc, PCRTUINT128U puMsk));
    1341 IEM_DECL_IMPL_DEF(void, iemAImpl_pmovmskb_u64,(uint64_t *pu64Dst, uint64_t const *puSrc));
    1342 IEM_DECL_IMPL_DEF(void, iemAImpl_pmovmskb_u128,(uint64_t *pu64Dst, PCRTUINT128U puSrc));
    1343 #ifndef IEM_WITHOUT_ASSEMBLY
    1344 IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovmskb_u256,(uint64_t *pu64Dst, PCRTUINT256U puSrc));
    1345 #endif
    1346 IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovmskb_u256_fallback,(uint64_t *pu64Dst, PCRTUINT256U puSrc));
    1347 /** @} */
    1348 
    1349 /** @name Media (SSE/MMX/AVX) operations: Variable Blend Packed Bytes/R32/R64.
    1350  * @{ */
    1351 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLBLENDU128,(PRTUINT128U puDst, PCRTUINT128U puSrc, PCRTUINT128U puMask));
    1352 typedef FNIEMAIMPLBLENDU128  *PFNIEMAIMPLBLENDU128;
    1353 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLAVXBLENDU128,(PRTUINT128U puDst, PCRTUINT128U puSrc1, PCRTUINT128U puSrc2, PCRTUINT128U puMask));
    1354 typedef FNIEMAIMPLAVXBLENDU128  *PFNIEMAIMPLAVXBLENDU128;
    1355 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLAVXBLENDU256,(PRTUINT256U puDst, PCRTUINT256U puSrc1, PCRTUINT256U puSrc2, PCRTUINT256U puMask));
    1356 typedef FNIEMAIMPLAVXBLENDU256  *PFNIEMAIMPLAVXBLENDU256;
    1357 
    1358 FNIEMAIMPLBLENDU128 iemAImpl_pblendvb_u128;
    1359 FNIEMAIMPLBLENDU128 iemAImpl_pblendvb_u128_fallback;
    1360 FNIEMAIMPLAVXBLENDU128 iemAImpl_vpblendvb_u128;
    1361 FNIEMAIMPLAVXBLENDU128 iemAImpl_vpblendvb_u128_fallback;
    1362 FNIEMAIMPLAVXBLENDU256 iemAImpl_vpblendvb_u256;
    1363 FNIEMAIMPLAVXBLENDU256 iemAImpl_vpblendvb_u256_fallback;
    1364 
    1365 FNIEMAIMPLBLENDU128 iemAImpl_blendvps_u128;
    1366 FNIEMAIMPLBLENDU128 iemAImpl_blendvps_u128_fallback;
    1367 FNIEMAIMPLAVXBLENDU128 iemAImpl_vblendvps_u128;
    1368 FNIEMAIMPLAVXBLENDU128 iemAImpl_vblendvps_u128_fallback;
    1369 FNIEMAIMPLAVXBLENDU256 iemAImpl_vblendvps_u256;
    1370 FNIEMAIMPLAVXBLENDU256 iemAImpl_vblendvps_u256_fallback;
    1371 
    1372 FNIEMAIMPLBLENDU128 iemAImpl_blendvpd_u128;
    1373 FNIEMAIMPLBLENDU128 iemAImpl_blendvpd_u128_fallback;
    1374 FNIEMAIMPLAVXBLENDU128 iemAImpl_vblendvpd_u128;
    1375 FNIEMAIMPLAVXBLENDU128 iemAImpl_vblendvpd_u128_fallback;
    1376 FNIEMAIMPLAVXBLENDU256 iemAImpl_vblendvpd_u256;
    1377 FNIEMAIMPLAVXBLENDU256 iemAImpl_vblendvpd_u256_fallback;
    1378 /** @} */
    1379 
    1380 
    1381 /** @name Media (SSE/MMX/AVX) operation: Sort this later
    1382  * @{ */
    1383 IEM_DECL_IMPL_DEF(void, iemAImpl_pmovsxbw_u128,(PRTUINT128U puDst, uint64_t uSrc));
    1384 IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxbw_u128,(PRTUINT128U puDst, uint64_t uSrc));
    1385 IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxbw_u128_fallback,(PRTUINT128U puDst, uint64_t uSrc));
    1386 IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxbw_u256,(PRTUINT256U puDst, PCRTUINT128U puSrc));
    1387 IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxbw_u256_fallback,(PRTUINT256U puDst, PCRTUINT128U puSrc));
    1388 
    1389 IEM_DECL_IMPL_DEF(void, iemAImpl_pmovsxbd_u128,(PRTUINT128U puDst, uint32_t uSrc));
    1390 IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxbd_u128,(PRTUINT128U puDst, uint32_t uSrc));
    1391 IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxbd_u128_fallback,(PRTUINT128U puDst, uint32_t uSrc));
    1392 IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxbd_u256,(PRTUINT256U puDst, PCRTUINT128U puSrc));
    1393 IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxbd_u256_fallback,(PRTUINT256U puDst, PCRTUINT128U puSrc));
    1394 
    1395 IEM_DECL_IMPL_DEF(void, iemAImpl_pmovsxbq_u128,(PRTUINT128U puDst, uint16_t uSrc));
    1396 IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxbq_u128,(PRTUINT128U puDst, uint16_t uSrc));
    1397 IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxbq_u128_fallback,(PRTUINT128U puDst, uint16_t uSrc));
    1398 IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxbq_u256,(PRTUINT256U puDst, PCRTUINT128U puSrc));
    1399 IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxbq_u256_fallback,(PRTUINT256U puDst, PCRTUINT128U puSrc));
    1400 
    1401 IEM_DECL_IMPL_DEF(void, iemAImpl_pmovsxwd_u128,(PRTUINT128U puDst, uint64_t uSrc));
    1402 IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxwd_u128,(PRTUINT128U puDst, uint64_t uSrc));
    1403 IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxwd_u128_fallback,(PRTUINT128U puDst, uint64_t uSrc));
    1404 IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxwd_u256,(PRTUINT256U puDst, PCRTUINT128U puSrc));
    1405 IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxwd_u256_fallback,(PRTUINT256U puDst, PCRTUINT128U puSrc));
    1406 
    1407 IEM_DECL_IMPL_DEF(void, iemAImpl_pmovsxwq_u128,(PRTUINT128U puDst, uint32_t uSrc));
    1408 IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxwq_u128,(PRTUINT128U puDst, uint32_t uSrc));
    1409 IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxwq_u128_fallback,(PRTUINT128U puDst, uint32_t uSrc));
    1410 IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxwq_u256,(PRTUINT256U puDst, PCRTUINT128U puSrc));
    1411 IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxwq_u256_fallback,(PRTUINT256U puDst, PCRTUINT128U puSrc));
    1412 
    1413 IEM_DECL_IMPL_DEF(void, iemAImpl_pmovsxdq_u128,(PRTUINT128U puDst, uint64_t uSrc));
    1414 IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxdq_u128,(PRTUINT128U puDst, uint64_t uSrc));
    1415 IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxdq_u128_fallback,(PRTUINT128U puDst, uint64_t uSrc));
    1416 IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxdq_u256,(PRTUINT256U puDst, PCRTUINT128U puSrc));
    1417 IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovsxdq_u256_fallback,(PRTUINT256U puDst, PCRTUINT128U puSrc));
    1418 
    1419 IEM_DECL_IMPL_DEF(void, iemAImpl_pmovzxbw_u128,(PRTUINT128U puDst, uint64_t uSrc));
    1420 IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxbw_u128,(PRTUINT128U puDst, uint64_t uSrc));
    1421 IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxbw_u128_fallback,(PRTUINT128U puDst, uint64_t uSrc));
    1422 IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxbw_u256,(PRTUINT256U puDst, PCRTUINT128U puSrc));
    1423 IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxbw_u256_fallback,(PRTUINT256U puDst, PCRTUINT128U puSrc));
    1424 
    1425 IEM_DECL_IMPL_DEF(void, iemAImpl_pmovzxbd_u128,(PRTUINT128U puDst, uint32_t uSrc));
    1426 IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxbd_u128,(PRTUINT128U puDst, uint32_t uSrc));
    1427 IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxbd_u128_fallback,(PRTUINT128U puDst, uint32_t uSrc));
    1428 IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxbd_u256,(PRTUINT256U puDst, PCRTUINT128U puSrc));
    1429 IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxbd_u256_fallback,(PRTUINT256U puDst, PCRTUINT128U puSrc));
    1430 
    1431 IEM_DECL_IMPL_DEF(void, iemAImpl_pmovzxbq_u128,(PRTUINT128U puDst, uint16_t uSrc));
    1432 IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxbq_u128,(PRTUINT128U puDst, uint16_t uSrc));
    1433 IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxbq_u128_fallback,(PRTUINT128U puDst, uint16_t uSrc));
    1434 IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxbq_u256,(PRTUINT256U puDst, PCRTUINT128U puSrc));
    1435 IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxbq_u256_fallback,(PRTUINT256U puDst, PCRTUINT128U puSrc));
    1436 
    1437 IEM_DECL_IMPL_DEF(void, iemAImpl_pmovzxwd_u128,(PRTUINT128U puDst, uint64_t uSrc));
    1438 IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxwd_u128,(PRTUINT128U puDst, uint64_t uSrc));
    1439 IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxwd_u128_fallback,(PRTUINT128U puDst, uint64_t uSrc));
    1440 IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxwd_u256,(PRTUINT256U puDst, PCRTUINT128U puSrc));
    1441 IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxwd_u256_fallback,(PRTUINT256U puDst, PCRTUINT128U puSrc));
    1442 
    1443 IEM_DECL_IMPL_DEF(void, iemAImpl_pmovzxwq_u128,(PRTUINT128U puDst, uint32_t uSrc));
    1444 IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxwq_u128,(PRTUINT128U puDst, uint32_t uSrc));
    1445 IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxwq_u128_fallback,(PRTUINT128U puDst, uint32_t uSrc));
    1446 IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxwq_u256,(PRTUINT256U puDst, PCRTUINT128U puSrc));
    1447 IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxwq_u256_fallback,(PRTUINT256U puDst, PCRTUINT128U puSrc));
    1448 
    1449 IEM_DECL_IMPL_DEF(void, iemAImpl_pmovzxdq_u128,(PRTUINT128U puDst, uint64_t uSrc));
    1450 IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxdq_u128,(PRTUINT128U puDst, uint64_t uSrc));
    1451 IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxdq_u128_fallback,(PRTUINT128U puDst, uint64_t uSrc));
    1452 IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxdq_u256,(PRTUINT256U puDst, PCRTUINT128U puSrc));
    1453 IEM_DECL_IMPL_DEF(void, iemAImpl_vpmovzxdq_u256_fallback,(PRTUINT256U puDst, PCRTUINT128U puSrc));
    1454 
    1455 IEM_DECL_IMPL_DEF(void, iemAImpl_shufpd_u128,(PRTUINT128U puDst, PCRTUINT128U puSrc, uint8_t bEvil));
    1456 IEM_DECL_IMPL_DEF(void, iemAImpl_vshufpd_u128,(PRTUINT128U puDst, PCRTUINT128U puSrc1, PCRTUINT128U puSrc2, uint8_t bEvil));
    1457 IEM_DECL_IMPL_DEF(void, iemAImpl_vshufpd_u128_fallback,(PRTUINT128U puDst, PCRTUINT128U puSrc1, PCRTUINT128U puSrc2, uint8_t bEvil));
    1458 IEM_DECL_IMPL_DEF(void, iemAImpl_vshufpd_u256,(PRTUINT256U puDst, PCRTUINT256U puSrc1, PCRTUINT256U puSrc2, uint8_t bEvil));
    1459 IEM_DECL_IMPL_DEF(void, iemAImpl_vshufpd_u256_fallback,(PRTUINT256U puDst, PCRTUINT256U puSrc1, PCRTUINT256U puSrc2, uint8_t bEvil));
    1460 
    1461 IEM_DECL_IMPL_DEF(void, iemAImpl_shufps_u128,(PRTUINT128U puDst, PCRTUINT128U puSrc, uint8_t bEvil));
    1462 IEM_DECL_IMPL_DEF(void, iemAImpl_vshufps_u128,(PRTUINT128U puDst, PCRTUINT128U puSrc1, PCRTUINT128U puSrc2, uint8_t bEvil));
    1463 IEM_DECL_IMPL_DEF(void, iemAImpl_vshufps_u128_fallback,(PRTUINT128U puDst, PCRTUINT128U puSrc1, PCRTUINT128U puSrc2, uint8_t bEvil));
    1464 IEM_DECL_IMPL_DEF(void, iemAImpl_vshufps_u256,(PRTUINT256U puDst, PCRTUINT256U puSrc1, PCRTUINT256U puSrc2, uint8_t bEvil));
    1465 IEM_DECL_IMPL_DEF(void, iemAImpl_vshufps_u256_fallback,(PRTUINT256U puDst, PCRTUINT256U puSrc1, PCRTUINT256U puSrc2, uint8_t bEvil));
    1466 
    1467 IEM_DECL_IMPL_DEF(void, iemAImpl_palignr_u64,(uint64_t *pu64Dst, uint64_t u64Src, uint8_t bEvil));
    1468 IEM_DECL_IMPL_DEF(void, iemAImpl_palignr_u64_fallback,(uint64_t *pu64Dst, uint64_t u64Src, uint8_t bEvil));
    1469 
    1470 IEM_DECL_IMPL_DEF(void, iemAImpl_movmskps_u128,(uint8_t *pu8Dst, PCRTUINT128U puSrc));
    1471 IEM_DECL_IMPL_DEF(void, iemAImpl_vmovmskps_u128,(uint8_t *pu8Dst, PCRTUINT128U puSrc));
    1472 IEM_DECL_IMPL_DEF(void, iemAImpl_vmovmskps_u128_fallback,(uint8_t *pu8Dst, PCRTUINT128U puSrc));
    1473 IEM_DECL_IMPL_DEF(void, iemAImpl_vmovmskps_u256,(uint8_t *pu8Dst, PCRTUINT256U puSrc));
    1474 IEM_DECL_IMPL_DEF(void, iemAImpl_vmovmskps_u256_fallback,(uint8_t *pu8Dst, PCRTUINT256U puSrc));
    1475 
    1476 IEM_DECL_IMPL_DEF(void, iemAImpl_movmskpd_u128,(uint8_t *pu8Dst, PCRTUINT128U puSrc));
    1477 IEM_DECL_IMPL_DEF(void, iemAImpl_vmovmskpd_u128,(uint8_t *pu8Dst, PCRTUINT128U puSrc));
    1478 IEM_DECL_IMPL_DEF(void, iemAImpl_vmovmskpd_u128_fallback,(uint8_t *pu8Dst, PCRTUINT128U puSrc));
    1479 IEM_DECL_IMPL_DEF(void, iemAImpl_vmovmskpd_u256,(uint8_t *pu8Dst, PCRTUINT256U puSrc));
    1480 IEM_DECL_IMPL_DEF(void, iemAImpl_vmovmskpd_u256_fallback,(uint8_t *pu8Dst, PCRTUINT256U puSrc));
    1481 
    1482 
    1483 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAOPTF2U128IMM8,(PRTUINT128U puDst, PCRTUINT128U puSrc, uint8_t bEvil));
    1484 typedef FNIEMAIMPLMEDIAOPTF2U128IMM8 *PFNIEMAIMPLMEDIAOPTF2U128IMM8;
    1485 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAOPTF2U256IMM8,(PRTUINT256U puDst, PCRTUINT256U puSrc, uint8_t bEvil));
    1486 typedef FNIEMAIMPLMEDIAOPTF2U256IMM8 *PFNIEMAIMPLMEDIAOPTF2U256IMM8;
    1487 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAOPTF3U128IMM8,(PRTUINT128U puDst, PCRTUINT128U puSrc1, PCRTUINT128U puSrc2, uint8_t bEvil));
    1488 typedef FNIEMAIMPLMEDIAOPTF3U128IMM8 *PFNIEMAIMPLMEDIAOPTF3U128IMM8;
    1489 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLMEDIAOPTF3U256IMM8,(PRTUINT256U puDst, PCRTUINT256U puSrc1, PCRTUINT256U puSrc2, uint8_t bEvil));
    1490 typedef FNIEMAIMPLMEDIAOPTF3U256IMM8 *PFNIEMAIMPLMEDIAOPTF3U256IMM8;
    1491 
    1492 FNIEMAIMPLMEDIAOPTF2U128IMM8 iemAImpl_palignr_u128, iemAImpl_palignr_u128_fallback;
    1493 FNIEMAIMPLMEDIAOPTF2U128IMM8 iemAImpl_pblendw_u128, iemAImpl_pblendw_u128_fallback;
    1494 FNIEMAIMPLMEDIAOPTF2U128IMM8 iemAImpl_blendps_u128, iemAImpl_blendps_u128_fallback;
    1495 FNIEMAIMPLMEDIAOPTF2U128IMM8 iemAImpl_blendpd_u128, iemAImpl_blendpd_u128_fallback;
    1496 
    1497 FNIEMAIMPLMEDIAOPTF3U128IMM8 iemAImpl_vpalignr_u128, iemAImpl_vpalignr_u128_fallback;
    1498 FNIEMAIMPLMEDIAOPTF3U128IMM8 iemAImpl_vpblendw_u128, iemAImpl_vpblendw_u128_fallback;
    1499 FNIEMAIMPLMEDIAOPTF3U128IMM8 iemAImpl_vpblendd_u128, iemAImpl_vpblendd_u128_fallback;
    1500 FNIEMAIMPLMEDIAOPTF3U128IMM8 iemAImpl_vblendps_u128, iemAImpl_vblendps_u128_fallback;
    1501 FNIEMAIMPLMEDIAOPTF3U128IMM8 iemAImpl_vblendpd_u128, iemAImpl_vblendpd_u128_fallback;
    1502 
    1503 FNIEMAIMPLMEDIAOPTF3U256IMM8 iemAImpl_vpalignr_u256, iemAImpl_vpalignr_u256_fallback;
    1504 FNIEMAIMPLMEDIAOPTF3U256IMM8 iemAImpl_vpblendw_u256, iemAImpl_vpblendw_u256_fallback;
    1505 FNIEMAIMPLMEDIAOPTF3U256IMM8 iemAImpl_vpblendd_u256, iemAImpl_vpblendd_u256_fallback;
    1506 FNIEMAIMPLMEDIAOPTF3U256IMM8 iemAImpl_vblendps_u256, iemAImpl_vblendps_u256_fallback;
    1507 FNIEMAIMPLMEDIAOPTF3U256IMM8 iemAImpl_vblendpd_u256, iemAImpl_vblendpd_u256_fallback;
    1508 FNIEMAIMPLMEDIAOPTF3U256IMM8 iemAImpl_vperm2i128_u256, iemAImpl_vperm2i128_u256_fallback;
    1509 FNIEMAIMPLMEDIAOPTF3U256IMM8 iemAImpl_vperm2f128_u256, iemAImpl_vperm2f128_u256_fallback;
    1510 
    1511 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_aesimc_u128,     iemAImpl_aesimc_u128_fallback;
    1512 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_aesenc_u128,     iemAImpl_aesenc_u128_fallback;
    1513 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_aesenclast_u128, iemAImpl_aesenclast_u128_fallback;
    1514 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_aesdec_u128,     iemAImpl_aesdec_u128_fallback;
    1515 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_aesdeclast_u128, iemAImpl_aesdeclast_u128_fallback;
    1516 
    1517 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_vaesimc_u128,     iemAImpl_vaesimc_u128_fallback;
    1518 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vaesenc_u128,     iemAImpl_vaesenc_u128_fallback;
    1519 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vaesenclast_u128, iemAImpl_vaesenclast_u128_fallback;
    1520 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vaesdec_u128,     iemAImpl_vaesdec_u128_fallback;
    1521 FNIEMAIMPLMEDIAOPTF3U128 iemAImpl_vaesdeclast_u128, iemAImpl_vaesdeclast_u128_fallback;
    1522 
    1523 FNIEMAIMPLMEDIAOPTF2U128IMM8 iemAImpl_aeskeygenassist_u128, iemAImpl_aeskeygenassist_u128_fallback;
    1524 
    1525 FNIEMAIMPLMEDIAOPTF2U128IMM8 iemAImpl_vaeskeygenassist_u128, iemAImpl_vaeskeygenassist_u128_fallback;
    1526 
    1527 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_sha1nexte_u128,       iemAImpl_sha1nexte_u128_fallback;
    1528 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_sha1msg1_u128,        iemAImpl_sha1msg1_u128_fallback;
    1529 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_sha1msg2_u128,        iemAImpl_sha1msg2_u128_fallback;
    1530 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_sha256msg1_u128,      iemAImpl_sha256msg1_u128_fallback;
    1531 FNIEMAIMPLMEDIAOPTF2U128 iemAImpl_sha256msg2_u128,      iemAImpl_sha256msg2_u128_fallback;
    1532 FNIEMAIMPLMEDIAOPTF2U128IMM8 iemAImpl_sha1rnds4_u128,   iemAImpl_sha1rnds4_u128_fallback;
    1533 IEM_DECL_IMPL_DEF(void, iemAImpl_sha256rnds2_u128,(PRTUINT128U puDst, PCRTUINT128U puSrc, PCRTUINT128U puXmm0Constants));
    1534 IEM_DECL_IMPL_DEF(void, iemAImpl_sha256rnds2_u128_fallback,(PRTUINT128U puDst, PCRTUINT128U puSrc, PCRTUINT128U puXmm0Constants));
    1535 
    1536 FNIEMAIMPLMEDIAOPTF2U256IMM8 iemAImpl_vpermq_u256,      iemAImpl_vpermq_u256_fallback;
    1537 FNIEMAIMPLMEDIAOPTF2U256IMM8 iemAImpl_vpermpd_u256,     iemAImpl_vpermpd_u256_fallback;
    1538 
    1539 typedef struct IEMPCMPISTRXSRC
    1540 {
    1541     RTUINT128U              uSrc1;
    1542     RTUINT128U              uSrc2;
    1543 } IEMPCMPISTRXSRC;
    1544 typedef IEMPCMPISTRXSRC *PIEMPCMPISTRXSRC;
    1545 typedef const IEMPCMPISTRXSRC *PCIEMPCMPISTRXSRC;
    1546 
    1547 typedef struct IEMPCMPESTRXSRC
    1548 {
    1549     RTUINT128U              uSrc1;
    1550     RTUINT128U              uSrc2;
    1551     uint64_t                u64Rax;
    1552     uint64_t                u64Rdx;
    1553 } IEMPCMPESTRXSRC;
    1554 typedef IEMPCMPESTRXSRC *PIEMPCMPESTRXSRC;
    1555 typedef const IEMPCMPESTRXSRC *PCIEMPCMPESTRXSRC;
    1556 
    1557 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLPCMPISTRIU128IMM8,(uint32_t *pEFlags, PCRTUINT128U pSrc1, PCRTUINT128U pSrc2, uint8_t bEvil));
    1558 typedef FNIEMAIMPLPCMPISTRIU128IMM8 *PFNIEMAIMPLPCMPISTRIU128IMM8;
    1559 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLPCMPESTRIU128IMM8,(uint32_t *pu32Ecx, uint32_t *pEFlags, PCIEMPCMPESTRXSRC pSrc, uint8_t bEvil));
    1560 typedef FNIEMAIMPLPCMPESTRIU128IMM8 *PFNIEMAIMPLPCMPESTRIU128IMM8;
    1561 
    1562 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLPCMPISTRMU128IMM8,(PRTUINT128U puDst, uint32_t *pEFlags, PCIEMPCMPISTRXSRC pSrc, uint8_t bEvil));
    1563 typedef FNIEMAIMPLPCMPISTRMU128IMM8 *PFNIEMAIMPLPCMPISTRMU128IMM8;
    1564 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLPCMPESTRMU128IMM8,(PRTUINT128U puDst, uint32_t *pEFlags, PCIEMPCMPESTRXSRC pSrc, uint8_t bEvil));
    1565 typedef FNIEMAIMPLPCMPESTRMU128IMM8 *PFNIEMAIMPLPCMPESTRMU128IMM8;
    1566 
    1567 FNIEMAIMPLPCMPISTRIU128IMM8 iemAImpl_pcmpistri_u128,  iemAImpl_pcmpistri_u128_fallback;
    1568 FNIEMAIMPLPCMPESTRIU128IMM8 iemAImpl_pcmpestri_u128,  iemAImpl_pcmpestri_u128_fallback;
    1569 FNIEMAIMPLPCMPISTRMU128IMM8 iemAImpl_pcmpistrm_u128,  iemAImpl_pcmpistrm_u128_fallback;
    1570 FNIEMAIMPLPCMPESTRMU128IMM8 iemAImpl_pcmpestrm_u128,  iemAImpl_pcmpestrm_u128_fallback;
    1571 FNIEMAIMPLPCMPISTRIU128IMM8 iemAImpl_vpcmpistri_u128, iemAImpl_vpcmpistri_u128_fallback;
    1572 FNIEMAIMPLPCMPESTRIU128IMM8 iemAImpl_vpcmpestri_u128, iemAImpl_vpcmpestri_u128_fallback;
    1573 FNIEMAIMPLPCMPISTRMU128IMM8 iemAImpl_vpcmpistrm_u128, iemAImpl_vpcmpistrm_u128_fallback;
    1574 FNIEMAIMPLPCMPESTRMU128IMM8 iemAImpl_vpcmpestrm_u128, iemAImpl_vpcmpestrm_u128_fallback;
    1575 
    1576 
    1577 FNIEMAIMPLMEDIAOPTF2U128IMM8 iemAImpl_pclmulqdq_u128, iemAImpl_pclmulqdq_u128_fallback;
    1578 FNIEMAIMPLMEDIAOPTF3U128IMM8 iemAImpl_vpclmulqdq_u128, iemAImpl_vpclmulqdq_u128_fallback;
    1579 
    1580 FNIEMAIMPLMEDIAOPTF2U128IMM8 iemAImpl_mpsadbw_u128, iemAImpl_mpsadbw_u128_fallback;
    1581 FNIEMAIMPLMEDIAOPTF3U128IMM8 iemAImpl_vmpsadbw_u128, iemAImpl_vmpsadbw_u128_fallback;
    1582 FNIEMAIMPLMEDIAOPTF3U256IMM8 iemAImpl_vmpsadbw_u256, iemAImpl_vmpsadbw_u256_fallback;
    1583 
    1584 FNIEMAIMPLMEDIAPSHUFU128 iemAImpl_vpsllw_imm_u128, iemAImpl_vpsllw_imm_u128_fallback;
    1585 FNIEMAIMPLMEDIAPSHUFU256 iemAImpl_vpsllw_imm_u256, iemAImpl_vpsllw_imm_u256_fallback;
    1586 FNIEMAIMPLMEDIAPSHUFU128 iemAImpl_vpslld_imm_u128, iemAImpl_vpslld_imm_u128_fallback;
    1587 FNIEMAIMPLMEDIAPSHUFU256 iemAImpl_vpslld_imm_u256, iemAImpl_vpslld_imm_u256_fallback;
    1588 FNIEMAIMPLMEDIAPSHUFU128 iemAImpl_vpsllq_imm_u128, iemAImpl_vpsllq_imm_u128_fallback;
    1589 FNIEMAIMPLMEDIAPSHUFU256 iemAImpl_vpsllq_imm_u256, iemAImpl_vpsllq_imm_u256_fallback;
    1590 IEM_DECL_IMPL_DEF(void, iemAImpl_vpslldq_imm_u128,(PRTUINT128U puDst, PCRTUINT128U puSrc, uint8_t uShift));
    1591 IEM_DECL_IMPL_DEF(void, iemAImpl_vpslldq_imm_u128_fallback,(PRTUINT128U puDst, PCRTUINT128U puSrc, uint8_t uShift));
    1592 IEM_DECL_IMPL_DEF(void, iemAImpl_vpslldq_imm_u256,(PRTUINT256U puDst, PCRTUINT256U puSrc, uint8_t uShift));
    1593 IEM_DECL_IMPL_DEF(void, iemAImpl_vpslldq_imm_u256_fallback,(PRTUINT256U puDst, PCRTUINT256U puSrc, uint8_t uShift));
    1594 
    1595 FNIEMAIMPLMEDIAPSHUFU128 iemAImpl_vpsraw_imm_u128, iemAImpl_vpsraw_imm_u128_fallback;
    1596 FNIEMAIMPLMEDIAPSHUFU256 iemAImpl_vpsraw_imm_u256, iemAImpl_vpsraw_imm_u256_fallback;
    1597 FNIEMAIMPLMEDIAPSHUFU128 iemAImpl_vpsrad_imm_u128, iemAImpl_vpsrad_imm_u128_fallback;
    1598 FNIEMAIMPLMEDIAPSHUFU256 iemAImpl_vpsrad_imm_u256, iemAImpl_vpsrad_imm_u256_fallback;
    1599 
    1600 FNIEMAIMPLMEDIAPSHUFU128 iemAImpl_vpsrlw_imm_u128, iemAImpl_vpsrlw_imm_u128_fallback;
    1601 FNIEMAIMPLMEDIAPSHUFU256 iemAImpl_vpsrlw_imm_u256, iemAImpl_vpsrlw_imm_u256_fallback;
    1602 FNIEMAIMPLMEDIAPSHUFU128 iemAImpl_vpsrld_imm_u128, iemAImpl_vpsrld_imm_u128_fallback;
    1603 FNIEMAIMPLMEDIAPSHUFU256 iemAImpl_vpsrld_imm_u256, iemAImpl_vpsrld_imm_u256_fallback;
    1604 FNIEMAIMPLMEDIAPSHUFU128 iemAImpl_vpsrlq_imm_u128, iemAImpl_vpsrlq_imm_u128_fallback;
    1605 FNIEMAIMPLMEDIAPSHUFU256 iemAImpl_vpsrlq_imm_u256, iemAImpl_vpsrlq_imm_u256_fallback;
    1606 IEM_DECL_IMPL_DEF(void, iemAImpl_vpsrldq_imm_u128,(PRTUINT128U puDst, PCRTUINT128U puSrc, uint8_t uShift));
    1607 IEM_DECL_IMPL_DEF(void, iemAImpl_vpsrldq_imm_u128_fallback,(PRTUINT128U puDst, PCRTUINT128U puSrc, uint8_t uShift));
    1608 IEM_DECL_IMPL_DEF(void, iemAImpl_vpsrldq_imm_u256,(PRTUINT256U puDst, PCRTUINT256U puSrc, uint8_t uShift));
    1609 IEM_DECL_IMPL_DEF(void, iemAImpl_vpsrldq_imm_u256_fallback,(PRTUINT256U puDst, PCRTUINT256U puSrc, uint8_t uShift));
    1610 
    1611 FNIEMAIMPLMEDIAOPTF3U128     iemAImpl_vpermilps_u128,     iemAImpl_vpermilps_u128_fallback;
    1612 FNIEMAIMPLMEDIAOPTF2U128IMM8 iemAImpl_vpermilps_imm_u128, iemAImpl_vpermilps_imm_u128_fallback;
    1613 FNIEMAIMPLMEDIAOPTF3U256     iemAImpl_vpermilps_u256,     iemAImpl_vpermilps_u256_fallback;
    1614 FNIEMAIMPLMEDIAOPTF2U256IMM8 iemAImpl_vpermilps_imm_u256, iemAImpl_vpermilps_imm_u256_fallback;
    1615 
    1616 FNIEMAIMPLMEDIAOPTF3U128     iemAImpl_vpermilpd_u128,     iemAImpl_vpermilpd_u128_fallback;
    1617 FNIEMAIMPLMEDIAOPTF2U128IMM8 iemAImpl_vpermilpd_imm_u128, iemAImpl_vpermilpd_imm_u128_fallback;
    1618 FNIEMAIMPLMEDIAOPTF3U256     iemAImpl_vpermilpd_u256,     iemAImpl_vpermilpd_u256_fallback;
    1619 FNIEMAIMPLMEDIAOPTF2U256IMM8 iemAImpl_vpermilpd_imm_u256, iemAImpl_vpermilpd_imm_u256_fallback;
    1620 
    1621 FNIEMAIMPLMEDIAOPTF3U128     iemAImpl_vpsllvd_u128, iemAImpl_vpsllvd_u128_fallback;
    1622 FNIEMAIMPLMEDIAOPTF3U256     iemAImpl_vpsllvd_u256, iemAImpl_vpsllvd_u256_fallback;
    1623 FNIEMAIMPLMEDIAOPTF3U128     iemAImpl_vpsllvq_u128, iemAImpl_vpsllvq_u128_fallback;
    1624 FNIEMAIMPLMEDIAOPTF3U256     iemAImpl_vpsllvq_u256, iemAImpl_vpsllvq_u256_fallback;
    1625 FNIEMAIMPLMEDIAOPTF3U128     iemAImpl_vpsravd_u128, iemAImpl_vpsravd_u128_fallback;
    1626 FNIEMAIMPLMEDIAOPTF3U256     iemAImpl_vpsravd_u256, iemAImpl_vpsravd_u256_fallback;
    1627 FNIEMAIMPLMEDIAOPTF3U128     iemAImpl_vpsrlvd_u128, iemAImpl_vpsrlvd_u128_fallback;
    1628 FNIEMAIMPLMEDIAOPTF3U256     iemAImpl_vpsrlvd_u256, iemAImpl_vpsrlvd_u256_fallback;
    1629 FNIEMAIMPLMEDIAOPTF3U128     iemAImpl_vpsrlvq_u128, iemAImpl_vpsrlvq_u128_fallback;
    1630 FNIEMAIMPLMEDIAOPTF3U256     iemAImpl_vpsrlvq_u256, iemAImpl_vpsrlvq_u256_fallback;
    1631 /** @} */
    1632 
    1633 /** @name Media Odds and Ends
    1634  * @{ */
    1635 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLCR32U8,(uint32_t *puDst, uint8_t uSrc));
    1636 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLCR32U16,(uint32_t *puDst, uint16_t uSrc));
    1637 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLCR32U32,(uint32_t *puDst, uint32_t uSrc));
    1638 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLCR32U64,(uint32_t *puDst, uint64_t uSrc));
    1639 FNIEMAIMPLCR32U8  iemAImpl_crc32_u8,  iemAImpl_crc32_u8_fallback;
    1640 FNIEMAIMPLCR32U16 iemAImpl_crc32_u16, iemAImpl_crc32_u16_fallback;
    1641 FNIEMAIMPLCR32U32 iemAImpl_crc32_u32, iemAImpl_crc32_u32_fallback;
    1642 FNIEMAIMPLCR32U64 iemAImpl_crc32_u64, iemAImpl_crc32_u64_fallback;
    1643 
    1644 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLF2EFL128,(PCRTUINT128U puSrc1, PCRTUINT128U puSrc2, uint32_t *pEFlags));
    1645 typedef FNIEMAIMPLF2EFL128 *PFNIEMAIMPLF2EFL128;
    1646 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLF2EFL256,(PCRTUINT256U puSrc1, PCRTUINT256U puSrc2, uint32_t *pEFlags));
    1647 typedef FNIEMAIMPLF2EFL256 *PFNIEMAIMPLF2EFL256;
    1648 FNIEMAIMPLF2EFL128 iemAImpl_ptest_u128;
    1649 FNIEMAIMPLF2EFL256 iemAImpl_vptest_u256, iemAImpl_vptest_u256_fallback;
    1650 FNIEMAIMPLF2EFL128 iemAImpl_vtestps_u128, iemAImpl_vtestps_u128_fallback;
    1651 FNIEMAIMPLF2EFL256 iemAImpl_vtestps_u256, iemAImpl_vtestps_u256_fallback;
    1652 FNIEMAIMPLF2EFL128 iemAImpl_vtestpd_u128, iemAImpl_vtestpd_u128_fallback;
    1653 FNIEMAIMPLF2EFL256 iemAImpl_vtestpd_u256, iemAImpl_vtestpd_u256_fallback;
    1654 
    1655 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSSEF2I32U64,(uint32_t uMxCsrIn, int32_t *pi32Dst, const uint64_t *pu64Src)); /* pu64Src is a double precision floating point. */
    1656 typedef FNIEMAIMPLSSEF2I32U64 *PFNIEMAIMPLSSEF2I32U64;
    1657 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSSEF2I64U64,(uint32_t uMxCsrIn, int64_t *pi64Dst, const uint64_t *pu64Src)); /* pu64Src is a double precision floating point. */
    1658 typedef FNIEMAIMPLSSEF2I64U64 *PFNIEMAIMPLSSEF2I64U64;
    1659 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSSEF2I32U32,(uint32_t uMxCsrIn, int32_t *pi32Dst, const uint32_t *pu32Src)); /* pu32Src is a single precision floating point. */
    1660 typedef FNIEMAIMPLSSEF2I32U32 *PFNIEMAIMPLSSEF2I32U32;
    1661 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSSEF2I64U32,(uint32_t uMxCsrIn, int64_t *pi64Dst, const uint32_t *pu32Src)); /* pu32Src is a single precision floating point. */
    1662 typedef FNIEMAIMPLSSEF2I64U32 *PFNIEMAIMPLSSEF2I64U32;
    1663 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSSEF2I32R32,(uint32_t uMxCsrIn, int32_t *pi32Dst, PCRTFLOAT32U pr32Src));
    1664 typedef FNIEMAIMPLSSEF2I32R32 *PFNIEMAIMPLSSEF2I32R32;
    1665 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSSEF2I64R32,(uint32_t uMxCsrIn, int64_t *pi64Dst, PCRTFLOAT32U pr32Src));
    1666 typedef FNIEMAIMPLSSEF2I64R32 *PFNIEMAIMPLSSEF2I64R32;
    1667 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSSEF2I32R64,(uint32_t uMxCsrIn, int32_t *pi32Dst, PCRTFLOAT64U pr64Src));
    1668 typedef FNIEMAIMPLSSEF2I32R64 *PFNIEMAIMPLSSEF2I32R64;
    1669 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSSEF2I64R64,(uint32_t uMxCsrIn, int64_t *pi64Dst, PCRTFLOAT64U pr64Src));
    1670 typedef FNIEMAIMPLSSEF2I64R64 *PFNIEMAIMPLSSEF2I64R64;
    1671 
    1672 FNIEMAIMPLSSEF2I32U64 iemAImpl_cvttsd2si_i32_r64;
    1673 FNIEMAIMPLSSEF2I32U64 iemAImpl_cvtsd2si_i32_r64;
    1674 
    1675 FNIEMAIMPLSSEF2I64U64 iemAImpl_cvttsd2si_i64_r64;
    1676 FNIEMAIMPLSSEF2I64U64 iemAImpl_cvtsd2si_i64_r64;
    1677 
    1678 FNIEMAIMPLSSEF2I32U32 iemAImpl_cvttss2si_i32_r32;
    1679 FNIEMAIMPLSSEF2I32U32 iemAImpl_cvtss2si_i32_r32;
    1680 
    1681 FNIEMAIMPLSSEF2I64U32 iemAImpl_cvttss2si_i64_r32;
    1682 FNIEMAIMPLSSEF2I64U32 iemAImpl_cvtss2si_i64_r32;
    1683 
    1684 FNIEMAIMPLSSEF2I32R32 iemAImpl_vcvttss2si_i32_r32, iemAImpl_vcvttss2si_i32_r32_fallback;
    1685 FNIEMAIMPLSSEF2I64R32 iemAImpl_vcvttss2si_i64_r32, iemAImpl_vcvttss2si_i64_r32_fallback;
    1686 FNIEMAIMPLSSEF2I32R32 iemAImpl_vcvtss2si_i32_r32,  iemAImpl_vcvtss2si_i32_r32_fallback;
    1687 FNIEMAIMPLSSEF2I64R32 iemAImpl_vcvtss2si_i64_r32,  iemAImpl_vcvtss2si_i64_r32_fallback;
    1688 
    1689 FNIEMAIMPLSSEF2I32R64 iemAImpl_vcvttss2si_i32_r64, iemAImpl_vcvttss2si_i32_r64_fallback;
    1690 FNIEMAIMPLSSEF2I64R64 iemAImpl_vcvttss2si_i64_r64, iemAImpl_vcvttss2si_i64_r64_fallback;
    1691 FNIEMAIMPLSSEF2I32R64 iemAImpl_vcvtss2si_i32_r64,  iemAImpl_vcvtss2si_i32_r64_fallback;
    1692 FNIEMAIMPLSSEF2I64R64 iemAImpl_vcvtss2si_i64_r64,  iemAImpl_vcvtss2si_i64_r64_fallback;
    1693 
    1694 FNIEMAIMPLSSEF2I32R32 iemAImpl_vcvttsd2si_i32_r32, iemAImpl_vcvttsd2si_i32_r32_fallback;
    1695 FNIEMAIMPLSSEF2I64R32 iemAImpl_vcvttsd2si_i64_r32, iemAImpl_vcvttsd2si_i64_r32_fallback;
    1696 FNIEMAIMPLSSEF2I32R32 iemAImpl_vcvtsd2si_i32_r32,  iemAImpl_vcvtsd2si_i32_r32_fallback;
    1697 FNIEMAIMPLSSEF2I64R32 iemAImpl_vcvtsd2si_i64_r32,  iemAImpl_vcvtsd2si_i64_r32_fallback;
    1698 
    1699 FNIEMAIMPLSSEF2I32R64 iemAImpl_vcvttsd2si_i32_r64, iemAImpl_vcvttsd2si_i32_r64_fallback;
    1700 FNIEMAIMPLSSEF2I64R64 iemAImpl_vcvttsd2si_i64_r64, iemAImpl_vcvttsd2si_i64_r64_fallback;
    1701 FNIEMAIMPLSSEF2I32R64 iemAImpl_vcvtsd2si_i32_r64,  iemAImpl_vcvtsd2si_i32_r64_fallback;
    1702 FNIEMAIMPLSSEF2I64R64 iemAImpl_vcvtsd2si_i64_r64,  iemAImpl_vcvtsd2si_i64_r64_fallback;
    1703 
    1704 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSSEF2R32I32,(uint32_t uMxCsrIn, PRTFLOAT32U pr32Dst, const int32_t *pi32Src));
    1705 typedef FNIEMAIMPLSSEF2R32I32 *PFNIEMAIMPLSSEF2R32I32;
    1706 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSSEF2R32I64,(uint32_t uMxCsrIn, PRTFLOAT32U pr32Dst, const int64_t *pi64Src));
    1707 typedef FNIEMAIMPLSSEF2R32I64 *PFNIEMAIMPLSSEF2R32I64;
    1708 
    1709 FNIEMAIMPLSSEF2R32I32 iemAImpl_cvtsi2ss_r32_i32;
    1710 FNIEMAIMPLSSEF2R32I64 iemAImpl_cvtsi2ss_r32_i64;
    1711 
    1712 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLAVXF3XMMI32,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86XMMREG puSrc, const int32_t *pi32Src));
    1713 typedef FNIEMAIMPLAVXF3XMMI32 *PFNIEMAIMPLAVXF3XMMI32;
    1714 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLAVXF3XMMI64,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86XMMREG puSrc, const int64_t *pi64Src));
    1715 typedef FNIEMAIMPLAVXF3XMMI64 *PFNIEMAIMPLAVXF3XMMI64;
    1716 
    1717 FNIEMAIMPLAVXF3XMMI32 iemAImpl_vcvtsi2ss_u128_i32, iemAImpl_vcvtsi2ss_u128_i32_fallback;
    1718 FNIEMAIMPLAVXF3XMMI64 iemAImpl_vcvtsi2ss_u128_i64, iemAImpl_vcvtsi2ss_u128_i64_fallback;
    1719 
    1720 
    1721 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSSEF2R64I32,(uint32_t uMxCsrIn, PRTFLOAT64U pr64Dst, const int32_t *pi32Src));
    1722 typedef FNIEMAIMPLSSEF2R64I32 *PFNIEMAIMPLSSEF2R64I32;
    1723 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLSSEF2R64I64,(uint32_t uMxCsrIn, PRTFLOAT64U pr64Dst, const int64_t *pi64Src));
    1724 typedef FNIEMAIMPLSSEF2R64I64 *PFNIEMAIMPLSSEF2R64I64;
    1725 
    1726 FNIEMAIMPLSSEF2R64I32 iemAImpl_cvtsi2sd_r64_i32;
    1727 FNIEMAIMPLSSEF2R64I64 iemAImpl_cvtsi2sd_r64_i64;
    1728 
    1729 FNIEMAIMPLAVXF3XMMI32 iemAImpl_vcvtsi2sd_u128_i32, iemAImpl_vcvtsi2sd_u128_i32_fallback;
    1730 FNIEMAIMPLAVXF3XMMI64 iemAImpl_vcvtsi2sd_u128_i64, iemAImpl_vcvtsi2sd_u128_i64_fallback;
    1731 
    1732 IEM_DECL_IMPL_DEF(uint32_t, iemAImpl_vcvtps2pd_u128_u64,(uint32_t uMxCsrIn, PX86XMMREG puDst, const uint64_t *pu64Src)); /* Actually two single precision floating point values. */
    1733 IEM_DECL_IMPL_DEF(uint32_t, iemAImpl_vcvtps2pd_u128_u64_fallback,(uint32_t uMxCsrIn, PX86XMMREG puDst,  const uint64_t *pu64Src)); /* Actually two single precision floating point values. */
    1734 IEM_DECL_IMPL_DEF(uint32_t, iemAImpl_vcvtps2pd_u256_u128,(uint32_t uMxCsrIn, PX86YMMREG puDst, PCX86XMMREG puSrc));
    1735 IEM_DECL_IMPL_DEF(uint32_t, iemAImpl_vcvtps2pd_u256_u128_fallback,(uint32_t uMxCsrIn, PX86YMMREG puDst, PCX86XMMREG puSrc));
    1736 
    1737 
    1738 IEM_DECL_IMPL_DEF(uint32_t, iemAImpl_vcvtdq2pd_u128_u64,(uint32_t uMxCsrIn, PX86XMMREG puDst, const uint64_t *pu64Src)); /* Actually two single precision floating point values. */
    1739 IEM_DECL_IMPL_DEF(uint32_t, iemAImpl_vcvtdq2pd_u128_u64_fallback,(uint32_t uMxCsrIn, PX86XMMREG puDst,  const uint64_t *pu64Src)); /* Actually two single precision floating point values. */
    1740 IEM_DECL_IMPL_DEF(uint32_t, iemAImpl_vcvtdq2pd_u256_u128,(uint32_t uMxCsrIn, PX86YMMREG puDst, PCX86XMMREG puSrc));
    1741 IEM_DECL_IMPL_DEF(uint32_t, iemAImpl_vcvtdq2pd_u256_u128_fallback,(uint32_t uMxCsrIn, PX86YMMREG puDst, PCX86XMMREG puSrc));
    1742 
    1743 
    1744 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLF2EFLMXCSRR32R32,(uint32_t uMxCsrIn, uint32_t *pfEFlags, RTFLOAT32U uSrc1, RTFLOAT32U uSrc2));
    1745 typedef FNIEMAIMPLF2EFLMXCSRR32R32 *PFNIEMAIMPLF2EFLMXCSRR32R32;
    1746 
    1747 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLF2EFLMXCSRR64R64,(uint32_t uMxCsrIn, uint32_t *pfEFlags, RTFLOAT64U uSrc1, RTFLOAT64U uSrc2));
    1748 typedef FNIEMAIMPLF2EFLMXCSRR64R64 *PFNIEMAIMPLF2EFLMXCSRR64R64;
    1749 
    1750 FNIEMAIMPLF2EFLMXCSRR32R32 iemAImpl_ucomiss_u128;
    1751 FNIEMAIMPLF2EFLMXCSRR32R32 iemAImpl_vucomiss_u128, iemAImpl_vucomiss_u128_fallback;
    1752 
    1753 FNIEMAIMPLF2EFLMXCSRR64R64 iemAImpl_ucomisd_u128;
    1754 FNIEMAIMPLF2EFLMXCSRR64R64 iemAImpl_vucomisd_u128, iemAImpl_vucomisd_u128_fallback;
    1755 
    1756 FNIEMAIMPLF2EFLMXCSRR32R32 iemAImpl_comiss_u128;
    1757 FNIEMAIMPLF2EFLMXCSRR32R32 iemAImpl_vcomiss_u128, iemAImpl_vcomiss_u128_fallback;
    1758 
    1759 FNIEMAIMPLF2EFLMXCSRR64R64 iemAImpl_comisd_u128;
    1760 FNIEMAIMPLF2EFLMXCSRR64R64 iemAImpl_vcomisd_u128, iemAImpl_vcomisd_u128_fallback;
    1761 
    1762 
    1763 typedef struct IEMMEDIAF2XMMSRC
    1764 {
    1765     X86XMMREG               uSrc1;
    1766     X86XMMREG               uSrc2;
    1767 } IEMMEDIAF2XMMSRC;
    1768 typedef IEMMEDIAF2XMMSRC *PIEMMEDIAF2XMMSRC;
    1769 typedef const IEMMEDIAF2XMMSRC *PCIEMMEDIAF2XMMSRC;
    1770 
    1771 
    1772 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMEDIAF3XMMIMM8,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCIEMMEDIAF2XMMSRC puSrc, uint8_t bEvil));
    1773 typedef FNIEMAIMPLMEDIAF3XMMIMM8 *PFNIEMAIMPLMEDIAF3XMMIMM8;
    1774 
    1775 
    1776 typedef struct IEMMEDIAF2YMMSRC
    1777 {
    1778     X86YMMREG               uSrc1;
    1779     X86YMMREG               uSrc2;
    1780 } IEMMEDIAF2YMMSRC;
    1781 typedef IEMMEDIAF2YMMSRC *PIEMMEDIAF2YMMSRC;
    1782 typedef const IEMMEDIAF2YMMSRC *PCIEMMEDIAF2YMMSRC;
    1783 
    1784 
    1785 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMEDIAF3YMMIMM8,(uint32_t uMxCsrIn, PX86YMMREG puDst, PCIEMMEDIAF2YMMSRC puSrc, uint8_t bEvil));
    1786 typedef FNIEMAIMPLMEDIAF3YMMIMM8 *PFNIEMAIMPLMEDIAF3YMMIMM8;
    1787 
    1788 
    1789 FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_cmpps_u128;
    1790 FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_cmppd_u128;
    1791 FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_cmpss_u128;
    1792 FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_cmpsd_u128;
    1793 
    1794 FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_vcmpps_u128, iemAImpl_vcmpps_u128_fallback;
    1795 FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_vcmppd_u128, iemAImpl_vcmppd_u128_fallback;
    1796 FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_vcmpss_u128, iemAImpl_vcmpss_u128_fallback;
    1797 FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_vcmpsd_u128, iemAImpl_vcmpsd_u128_fallback;
    1798 
    1799 FNIEMAIMPLMEDIAF3YMMIMM8 iemAImpl_vcmpps_u256, iemAImpl_vcmpps_u256_fallback;
    1800 FNIEMAIMPLMEDIAF3YMMIMM8 iemAImpl_vcmppd_u256, iemAImpl_vcmppd_u256_fallback;
    1801 
    1802 FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_roundss_u128;
    1803 FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_roundsd_u128;
    1804 
    1805 FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_dpps_u128,     iemAImpl_dpps_u128_fallback;
    1806 FNIEMAIMPLMEDIAF3XMMIMM8 iemAImpl_dppd_u128,     iemAImpl_dppd_u128_fallback;
    1807 
    1808 
    1809 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMEDIAF2U128IMM8,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86XMMREG puSrc, uint8_t bEvil));
    1810 typedef FNIEMAIMPLMEDIAF2U128IMM8 *PFNIEMAIMPLMEDIAF2U128IMM8;
    1811 
    1812 
    1813 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMEDIAF2U256IMM8,(uint32_t uMxCsrIn, PX86YMMREG puDst, PCX86YMMREG puSrc, uint8_t bEvil));
    1814 typedef FNIEMAIMPLMEDIAF2U256IMM8 *PFNIEMAIMPLMEDIAF2U256IMM8;
    1815 
    1816 
    1817 FNIEMAIMPLMEDIAF2U128IMM8 iemAImpl_roundps_u128,  iemAImpl_roundps_u128_fallback;
    1818 FNIEMAIMPLMEDIAF2U128IMM8 iemAImpl_roundpd_u128,  iemAImpl_roundpd_u128_fallback;
    1819 
    1820 FNIEMAIMPLMEDIAF2U128IMM8 iemAImpl_vroundps_u128, iemAImpl_vroundps_u128_fallback;
    1821 FNIEMAIMPLMEDIAF2U128IMM8 iemAImpl_vroundpd_u128, iemAImpl_vroundpd_u128_fallback;
    1822 
    1823 FNIEMAIMPLMEDIAF2U256IMM8 iemAImpl_vroundps_u256, iemAImpl_vroundps_u256_fallback;
    1824 FNIEMAIMPLMEDIAF2U256IMM8 iemAImpl_vroundpd_u256, iemAImpl_vroundpd_u256_fallback;
    1825 
    1826 FNIEMAIMPLMEDIAF3XMMIMM8  iemAImpl_vroundss_u128, iemAImpl_vroundss_u128_fallback;
    1827 FNIEMAIMPLMEDIAF3XMMIMM8  iemAImpl_vroundsd_u128, iemAImpl_vroundsd_u128_fallback;
    1828 
    1829 FNIEMAIMPLMEDIAF3XMMIMM8  iemAImpl_vdpps_u128,     iemAImpl_vdpps_u128_fallback;
    1830 FNIEMAIMPLMEDIAF3XMMIMM8  iemAImpl_vdppd_u128,     iemAImpl_vdppd_u128_fallback;
    1831 
    1832 FNIEMAIMPLMEDIAF3YMMIMM8  iemAImpl_vdpps_u256,     iemAImpl_vdpps_u256_fallback;
    1833 
    1834 
    1835 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMXCSRU64U128,(uint32_t fMxCsrIn, uint64_t *pu64Dst, PCX86XMMREG pSrc));
    1836 typedef FNIEMAIMPLMXCSRU64U128 *PFNIEMAIMPLMXCSRU64U128;
    1837 
    1838 FNIEMAIMPLMXCSRU64U128 iemAImpl_cvtpd2pi_u128;
    1839 FNIEMAIMPLMXCSRU64U128 iemAImpl_cvttpd2pi_u128;
    1840 
    1841 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMXCSRU128U64,(uint32_t fMxCsrIn, PX86XMMREG pDst, uint64_t u64Src));
    1842 typedef FNIEMAIMPLMXCSRU128U64 *PFNIEMAIMPLMXCSRU128U64;
    1843 
    1844 FNIEMAIMPLMXCSRU128U64 iemAImpl_cvtpi2ps_u128;
    1845 FNIEMAIMPLMXCSRU128U64 iemAImpl_cvtpi2pd_u128;
    1846 
    1847 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLMXCSRU64U64,(uint32_t fMxCsrIn, uint64_t *pu64Dst, uint64_t u64Src));
    1848 typedef FNIEMAIMPLMXCSRU64U64 *PFNIEMAIMPLMXCSRU64U64;
    1849 
    1850 FNIEMAIMPLMXCSRU64U64 iemAImpl_cvtps2pi_u128;
    1851 FNIEMAIMPLMXCSRU64U64 iemAImpl_cvttps2pi_u128;
    1852 
    1853 /** @} */
    1854 
    1855 
    1856 /** @name Function tables.
    1857  * @{
    1858  */
    1859 
    1860 /**
    1861  * Function table for a binary operator providing implementation based on
    1862  * operand size.
    1863  */
    1864 typedef struct IEMOPBINSIZES
    1865 {
    1866     PFNIEMAIMPLBINU8  pfnNormalU8,    pfnLockedU8;
    1867     PFNIEMAIMPLBINU16 pfnNormalU16,   pfnLockedU16;
    1868     PFNIEMAIMPLBINU32 pfnNormalU32,   pfnLockedU32;
    1869     PFNIEMAIMPLBINU64 pfnNormalU64,   pfnLockedU64;
    1870 } IEMOPBINSIZES;
    1871 /** Pointer to a binary operator function table. */
    1872 typedef IEMOPBINSIZES const *PCIEMOPBINSIZES;
    1873 
    1874 
    1875 /**
    1876  * Function table for a unary operator providing implementation based on
    1877  * operand size.
    1878  */
    1879 typedef struct IEMOPUNARYSIZES
    1880 {
    1881     PFNIEMAIMPLUNARYU8  pfnNormalU8,    pfnLockedU8;
    1882     PFNIEMAIMPLUNARYU16 pfnNormalU16,   pfnLockedU16;
    1883     PFNIEMAIMPLUNARYU32 pfnNormalU32,   pfnLockedU32;
    1884     PFNIEMAIMPLUNARYU64 pfnNormalU64,   pfnLockedU64;
    1885 } IEMOPUNARYSIZES;
    1886 /** Pointer to a unary operator function table. */
    1887 typedef IEMOPUNARYSIZES const *PCIEMOPUNARYSIZES;
    1888 
    1889 
    1890 /**
    1891  * Function table for a shift operator providing implementation based on
    1892  * operand size.
    1893  */
    1894 typedef struct IEMOPSHIFTSIZES
    1895 {
    1896     PFNIEMAIMPLSHIFTU8  pfnNormalU8;
    1897     PFNIEMAIMPLSHIFTU16 pfnNormalU16;
    1898     PFNIEMAIMPLSHIFTU32 pfnNormalU32;
    1899     PFNIEMAIMPLSHIFTU64 pfnNormalU64;
    1900 } IEMOPSHIFTSIZES;
    1901 /** Pointer to a shift operator function table. */
    1902 typedef IEMOPSHIFTSIZES const *PCIEMOPSHIFTSIZES;
    1903 
    1904 
    1905 /**
    1906  * Function table for a multiplication or division operation.
    1907  */
    1908 typedef struct IEMOPMULDIVSIZES
    1909 {
    1910     PFNIEMAIMPLMULDIVU8  pfnU8;
    1911     PFNIEMAIMPLMULDIVU16 pfnU16;
    1912     PFNIEMAIMPLMULDIVU32 pfnU32;
    1913     PFNIEMAIMPLMULDIVU64 pfnU64;
    1914 } IEMOPMULDIVSIZES;
    1915 /** Pointer to a multiplication or division operation function table. */
    1916 typedef IEMOPMULDIVSIZES const *PCIEMOPMULDIVSIZES;
    1917 
    1918 
    1919 /**
    1920  * Function table for a double precision shift operator providing implementation
    1921  * based on operand size.
    1922  */
    1923 typedef struct IEMOPSHIFTDBLSIZES
    1924 {
    1925     PFNIEMAIMPLSHIFTDBLU16 pfnNormalU16;
    1926     PFNIEMAIMPLSHIFTDBLU32 pfnNormalU32;
    1927     PFNIEMAIMPLSHIFTDBLU64 pfnNormalU64;
    1928 } IEMOPSHIFTDBLSIZES;
    1929 /** Pointer to a double precision shift function table. */
    1930 typedef IEMOPSHIFTDBLSIZES const *PCIEMOPSHIFTDBLSIZES;
    1931 
    1932 
    1933 /**
    1934  * Function table for media instruction taking two full sized media source
    1935  * registers and one full sized destination register (AVX).
    1936  */
    1937 typedef struct IEMOPMEDIAF3
    1938 {
    1939     PFNIEMAIMPLMEDIAF3U128 pfnU128;
    1940     PFNIEMAIMPLMEDIAF3U256 pfnU256;
    1941 } IEMOPMEDIAF3;
    1942 /** Pointer to a media operation function table for 3 full sized ops (AVX). */
    1943 typedef IEMOPMEDIAF3 const *PCIEMOPMEDIAF3;
    1944 
    1945 /** @def IEMOPMEDIAF3_INIT_VARS_EX
    1946  * Declares a s_Host (x86 & amd64 only) and a s_Fallback variable with the
    1947  * given functions as initializers.  For use in AVX functions where a pair of
    1948  * functions are only used once and the function table need not be public. */
    1949 #ifndef TST_IEM_CHECK_MC
    1950 # if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
    1951 #  define IEMOPMEDIAF3_INIT_VARS_EX(a_pfnHostU128, a_pfnHostU256, a_pfnFallbackU128, a_pfnFallbackU256) \
    1952     static IEMOPMEDIAF3 const s_Host     = { a_pfnHostU128,     a_pfnHostU256 }; \
    1953     static IEMOPMEDIAF3 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
    1954 # else
    1955 #  define IEMOPMEDIAF3_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) \
    1956     static IEMOPMEDIAF3 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
    1957 # endif
    1958 #else
    1959 # define IEMOPMEDIAF3_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) (void)0
    1960 #endif
    1961 /** @def IEMOPMEDIAF3_INIT_VARS
    1962  * Generate AVX function tables for the @a a_InstrNm instruction.
    1963  * @sa IEMOPMEDIAF3_INIT_VARS_EX */
    1964 #define IEMOPMEDIAF3_INIT_VARS(a_InstrNm) \
    1965     IEMOPMEDIAF3_INIT_VARS_EX(RT_CONCAT3(iemAImpl_,a_InstrNm,_u128),           RT_CONCAT3(iemAImpl_,a_InstrNm,_u256),\
    1966                               RT_CONCAT3(iemAImpl_,a_InstrNm,_u128_fallback),  RT_CONCAT3(iemAImpl_,a_InstrNm,_u256_fallback))
    1967 
    1968 
    1969 /**
    1970  * Function table for media instruction taking one full sized media source
    1971  * registers and one full sized destination register (AVX).
    1972  */
    1973 typedef struct IEMOPMEDIAF2
    1974 {
    1975     PFNIEMAIMPLMEDIAF2U128 pfnU128;
    1976     PFNIEMAIMPLMEDIAF2U256 pfnU256;
    1977 } IEMOPMEDIAF2;
    1978 /** Pointer to a media operation function table for 2 full sized ops (AVX). */
    1979 typedef IEMOPMEDIAF2 const *PCIEMOPMEDIAF2;
    1980 
    1981 /** @def IEMOPMEDIAF2_INIT_VARS_EX
    1982  * Declares a s_Host (x86 & amd64 only) and a s_Fallback variable with the
    1983  * given functions as initializers.  For use in AVX functions where a pair of
    1984  * functions are only used once and the function table need not be public. */
    1985 #ifndef TST_IEM_CHECK_MC
    1986 # if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
    1987 #  define IEMOPMEDIAF2_INIT_VARS_EX(a_pfnHostU128, a_pfnHostU256, a_pfnFallbackU128, a_pfnFallbackU256) \
    1988     static IEMOPMEDIAF2 const s_Host     = { a_pfnHostU128,     a_pfnHostU256 }; \
    1989     static IEMOPMEDIAF2 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
    1990 # else
    1991 #  define IEMOPMEDIAF2_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) \
    1992     static IEMOPMEDIAF2 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
    1993 # endif
    1994 #else
    1995 # define IEMOPMEDIAF2_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) (void)0
    1996 #endif
    1997 /** @def IEMOPMEDIAF2_INIT_VARS
    1998  * Generate AVX function tables for the @a a_InstrNm instruction.
    1999  * @sa IEMOPMEDIAF2_INIT_VARS_EX */
    2000 #define IEMOPMEDIAF2_INIT_VARS(a_InstrNm) \
    2001     IEMOPMEDIAF2_INIT_VARS_EX(RT_CONCAT3(iemAImpl_,a_InstrNm,_u128),           RT_CONCAT3(iemAImpl_,a_InstrNm,_u256),\
    2002                               RT_CONCAT3(iemAImpl_,a_InstrNm,_u128_fallback),  RT_CONCAT3(iemAImpl_,a_InstrNm,_u256_fallback))
    2003 
    2004 
    2005 /**
    2006  * Function table for media instruction taking two full sized media source
    2007  * registers and one full sized destination register, but no additional state
    2008  * (AVX).
    2009  */
    2010 typedef struct IEMOPMEDIAOPTF3
    2011 {
    2012     PFNIEMAIMPLMEDIAOPTF3U128 pfnU128;
    2013     PFNIEMAIMPLMEDIAOPTF3U256 pfnU256;
    2014 } IEMOPMEDIAOPTF3;
    2015 /** Pointer to a media operation function table for 3 full sized ops (AVX). */
    2016 typedef IEMOPMEDIAOPTF3 const *PCIEMOPMEDIAOPTF3;
    2017 
    2018 /** @def IEMOPMEDIAOPTF3_INIT_VARS_EX
    2019  * Declares a s_Host (x86 & amd64 only) and a s_Fallback variable with the
    2020  * given functions as initializers.  For use in AVX functions where a pair of
    2021  * functions are only used once and the function table need not be public. */
    2022 #ifndef TST_IEM_CHECK_MC
    2023 # if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
    2024 #  define IEMOPMEDIAOPTF3_INIT_VARS_EX(a_pfnHostU128, a_pfnHostU256, a_pfnFallbackU128, a_pfnFallbackU256) \
    2025     static IEMOPMEDIAOPTF3 const s_Host     = { a_pfnHostU128,     a_pfnHostU256 }; \
    2026     static IEMOPMEDIAOPTF3 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
    2027 # else
    2028 #  define IEMOPMEDIAOPTF3_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) \
    2029     static IEMOPMEDIAOPTF3 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
    2030 # endif
    2031 #else
    2032 # define IEMOPMEDIAOPTF3_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) (void)0
    2033 #endif
    2034 /** @def IEMOPMEDIAOPTF3_INIT_VARS
    2035  * Generate AVX function tables for the @a a_InstrNm instruction.
    2036  * @sa IEMOPMEDIAOPTF3_INIT_VARS_EX */
    2037 #define IEMOPMEDIAOPTF3_INIT_VARS(a_InstrNm) \
    2038     IEMOPMEDIAOPTF3_INIT_VARS_EX(RT_CONCAT3(iemAImpl_,a_InstrNm,_u128),           RT_CONCAT3(iemAImpl_,a_InstrNm,_u256),\
    2039                                  RT_CONCAT3(iemAImpl_,a_InstrNm,_u128_fallback),  RT_CONCAT3(iemAImpl_,a_InstrNm,_u256_fallback))
    2040 
    2041 /**
    2042  * Function table for media instruction taking one full sized media source
    2043  * registers and one full sized destination register, but no additional state
    2044  * (AVX).
    2045  */
    2046 typedef struct IEMOPMEDIAOPTF2
    2047 {
    2048     PFNIEMAIMPLMEDIAOPTF2U128 pfnU128;
    2049     PFNIEMAIMPLMEDIAOPTF2U256 pfnU256;
    2050 } IEMOPMEDIAOPTF2;
    2051 /** Pointer to a media operation function table for 2 full sized ops (AVX). */
    2052 typedef IEMOPMEDIAOPTF2 const *PCIEMOPMEDIAOPTF2;
    2053 
    2054 /** @def IEMOPMEDIAOPTF2_INIT_VARS_EX
    2055  * Declares a s_Host (x86 & amd64 only) and a s_Fallback variable with the
    2056  * given functions as initializers.  For use in AVX functions where a pair of
    2057  * functions are only used once and the function table need not be public. */
    2058 #ifndef TST_IEM_CHECK_MC
    2059 # if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
    2060 #  define IEMOPMEDIAOPTF2_INIT_VARS_EX(a_pfnHostU128, a_pfnHostU256, a_pfnFallbackU128, a_pfnFallbackU256) \
    2061     static IEMOPMEDIAOPTF2 const s_Host     = { a_pfnHostU128,     a_pfnHostU256 }; \
    2062     static IEMOPMEDIAOPTF2 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
    2063 # else
    2064 #  define IEMOPMEDIAOPTF2_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) \
    2065     static IEMOPMEDIAOPTF2 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
    2066 # endif
    2067 #else
    2068 # define IEMOPMEDIAOPTF2_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) (void)0
    2069 #endif
    2070 /** @def IEMOPMEDIAOPTF2_INIT_VARS
    2071  * Generate AVX function tables for the @a a_InstrNm instruction.
    2072  * @sa IEMOPMEDIAOPTF2_INIT_VARS_EX */
    2073 #define IEMOPMEDIAOPTF2_INIT_VARS(a_InstrNm) \
    2074     IEMOPMEDIAOPTF2_INIT_VARS_EX(RT_CONCAT3(iemAImpl_,a_InstrNm,_u128),           RT_CONCAT3(iemAImpl_,a_InstrNm,_u256),\
    2075                                  RT_CONCAT3(iemAImpl_,a_InstrNm,_u128_fallback),  RT_CONCAT3(iemAImpl_,a_InstrNm,_u256_fallback))
    2076 
    2077 
    2078 /**
    2079  * Function table for media instruction taking one full sized media source
    2080  * register and one full sized destination register and an 8-bit immediate (AVX).
    2081  */
    2082 typedef struct IEMOPMEDIAF2IMM8
    2083 {
    2084     PFNIEMAIMPLMEDIAF2U128IMM8 pfnU128;
    2085     PFNIEMAIMPLMEDIAF2U256IMM8 pfnU256;
    2086 } IEMOPMEDIAF2IMM8;
    2087 /** Pointer to a media operation function table for 2 full sized ops (AVX). */
    2088 typedef IEMOPMEDIAF2IMM8 const *PCIEMOPMEDIAF2IMM8;
    2089 
    2090 /** @def IEMOPMEDIAF2IMM8_INIT_VARS_EX
    2091  * Declares a s_Host (x86 & amd64 only) and a s_Fallback variable with the
    2092  * given functions as initializers.  For use in AVX functions where a pair of
    2093  * functions are only used once and the function table need not be public. */
    2094 #ifndef TST_IEM_CHECK_MC
    2095 # if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
    2096 #  define IEMOPMEDIAF2IMM8_INIT_VARS_EX(a_pfnHostU128, a_pfnHostU256, a_pfnFallbackU128, a_pfnFallbackU256) \
    2097     static IEMOPMEDIAF2IMM8 const s_Host     = { a_pfnHostU128,     a_pfnHostU256 }; \
    2098     static IEMOPMEDIAF2IMM8 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
    2099 # else
    2100 #  define IEMOPMEDIAF2IMM8_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) \
    2101     static IEMOPMEDIAF2IMM8 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
    2102 # endif
    2103 #else
    2104 # define IEMOPMEDIAF2IMM8_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) (void)0
    2105 #endif
    2106 /** @def IEMOPMEDIAF2IMM8_INIT_VARS
    2107  * Generate AVX function tables for the @a a_InstrNm instruction.
    2108  * @sa IEMOPMEDIAF2IMM8_INIT_VARS_EX */
    2109 #define IEMOPMEDIAF2IMM8_INIT_VARS(a_InstrNm) \
    2110     IEMOPMEDIAF2IMM8_INIT_VARS_EX(RT_CONCAT3(iemAImpl_,a_InstrNm,_u128),           RT_CONCAT3(iemAImpl_,a_InstrNm,_u256),\
    2111                                   RT_CONCAT3(iemAImpl_,a_InstrNm,_u128_fallback),  RT_CONCAT3(iemAImpl_,a_InstrNm,_u256_fallback))
    2112 
    2113 
    2114 /**
    2115  * Function table for media instruction taking one full sized media source
    2116  * register and one full sized destination register and an 8-bit immediate, but no additional state
    2117  * (AVX).
    2118  */
    2119 typedef struct IEMOPMEDIAOPTF2IMM8
    2120 {
    2121     PFNIEMAIMPLMEDIAOPTF2U128IMM8 pfnU128;
    2122     PFNIEMAIMPLMEDIAOPTF2U256IMM8 pfnU256;
    2123 } IEMOPMEDIAOPTF2IMM8;
    2124 /** Pointer to a media operation function table for 2 full sized ops (AVX). */
    2125 typedef IEMOPMEDIAOPTF2IMM8 const *PCIEMOPMEDIAOPTF2IMM8;
    2126 
    2127 /** @def IEMOPMEDIAOPTF2IMM8_INIT_VARS_EX
    2128  * Declares a s_Host (x86 & amd64 only) and a s_Fallback variable with the
    2129  * given functions as initializers.  For use in AVX functions where a pair of
    2130  * functions are only used once and the function table need not be public. */
    2131 #ifndef TST_IEM_CHECK_MC
    2132 # if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
    2133 #  define IEMOPMEDIAOPTF2IMM8_INIT_VARS_EX(a_pfnHostU128, a_pfnHostU256, a_pfnFallbackU128, a_pfnFallbackU256) \
    2134     static IEMOPMEDIAOPTF2IMM8 const s_Host     = { a_pfnHostU128,     a_pfnHostU256 }; \
    2135     static IEMOPMEDIAOPTF2IMM8 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
    2136 # else
    2137 #  define IEMOPMEDIAOPTF2IMM8_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) \
    2138     static IEMOPMEDIAOPTF2IMM8 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
    2139 # endif
    2140 #else
    2141 # define IEMOPMEDIAOPTF2IMM8_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) (void)0
    2142 #endif
    2143 /** @def IEMOPMEDIAOPTF2IMM8_INIT_VARS
    2144  * Generate AVX function tables for the @a a_InstrNm instruction.
    2145  * @sa IEMOPMEDIAOPTF2IMM8_INIT_VARS_EX */
    2146 #define IEMOPMEDIAOPTF2IMM8_INIT_VARS(a_InstrNm) \
    2147     IEMOPMEDIAOPTF2IMM8_INIT_VARS_EX(RT_CONCAT3(iemAImpl_,a_InstrNm,_imm_u128),           RT_CONCAT3(iemAImpl_,a_InstrNm,_imm_u256),\
    2148                                      RT_CONCAT3(iemAImpl_,a_InstrNm,_imm_u128_fallback),  RT_CONCAT3(iemAImpl_,a_InstrNm,_imm_u256_fallback))
    2149 
    2150 /**
    2151  * Function table for media instruction taking two full sized media source
    2152  * registers and one full sized destination register and an 8-bit immediate, but no additional state
    2153  * (AVX).
    2154  */
    2155 typedef struct IEMOPMEDIAOPTF3IMM8
    2156 {
    2157     PFNIEMAIMPLMEDIAOPTF3U128IMM8 pfnU128;
    2158     PFNIEMAIMPLMEDIAOPTF3U256IMM8 pfnU256;
    2159 } IEMOPMEDIAOPTF3IMM8;
    2160 /** Pointer to a media operation function table for 3 full sized ops (AVX). */
    2161 typedef IEMOPMEDIAOPTF3IMM8 const *PCIEMOPMEDIAOPTF3IMM8;
    2162 
    2163 /** @def IEMOPMEDIAOPTF3IMM8_INIT_VARS_EX
    2164  * Declares a s_Host (x86 & amd64 only) and a s_Fallback variable with the
    2165  * given functions as initializers.  For use in AVX functions where a pair of
    2166  * functions are only used once and the function table need not be public. */
    2167 #ifndef TST_IEM_CHECK_MC
    2168 # if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
    2169 #  define IEMOPMEDIAOPTF3IMM8_INIT_VARS_EX(a_pfnHostU128, a_pfnHostU256, a_pfnFallbackU128, a_pfnFallbackU256) \
    2170     static IEMOPMEDIAOPTF3IMM8 const s_Host     = { a_pfnHostU128,     a_pfnHostU256 }; \
    2171     static IEMOPMEDIAOPTF3IMM8 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
    2172 # else
    2173 #  define IEMOPMEDIAOPTF3IMM8_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) \
    2174     static IEMOPMEDIAOPTF3IMM8 const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
    2175 # endif
    2176 #else
    2177 # define IEMOPMEDIAOPTF3IMM8_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) (void)0
    2178 #endif
    2179 /** @def IEMOPMEDIAOPTF3IMM8_INIT_VARS
    2180  * Generate AVX function tables for the @a a_InstrNm instruction.
    2181  * @sa IEMOPMEDIAOPTF3IMM8_INIT_VARS_EX */
    2182 #define IEMOPMEDIAOPTF3IMM8_INIT_VARS(a_InstrNm) \
    2183     IEMOPMEDIAOPTF3IMM8_INIT_VARS_EX(RT_CONCAT3(iemAImpl_,a_InstrNm,_u128),           RT_CONCAT3(iemAImpl_,a_InstrNm,_u256),\
    2184                                      RT_CONCAT3(iemAImpl_,a_InstrNm,_u128_fallback),  RT_CONCAT3(iemAImpl_,a_InstrNm,_u256_fallback))
    2185 /** @} */
    2186 
    2187 
    2188 /**
    2189  * Function table for blend type instruction taking three full sized media source
    2190  * registers and one full sized destination register, but no additional state
    2191  * (AVX).
    2192  */
    2193 typedef struct IEMOPBLENDOP
    2194 {
    2195     PFNIEMAIMPLAVXBLENDU128 pfnU128;
    2196     PFNIEMAIMPLAVXBLENDU256 pfnU256;
    2197 } IEMOPBLENDOP;
    2198 /** Pointer to a media operation function table for 4 full sized ops (AVX). */
    2199 typedef IEMOPBLENDOP const *PCIEMOPBLENDOP;
    2200 
    2201 /** @def IEMOPBLENDOP_INIT_VARS_EX
    2202  * Declares a s_Host (x86 & amd64 only) and a s_Fallback variable with the
    2203  * given functions as initializers.  For use in AVX functions where a pair of
    2204  * functions are only used once and the function table need not be public. */
    2205 #ifndef TST_IEM_CHECK_MC
    2206 # if (defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)) && !defined(IEM_WITHOUT_ASSEMBLY)
    2207 #  define IEMOPBLENDOP_INIT_VARS_EX(a_pfnHostU128, a_pfnHostU256, a_pfnFallbackU128, a_pfnFallbackU256) \
    2208     static IEMOPBLENDOP const s_Host     = { a_pfnHostU128,     a_pfnHostU256 }; \
    2209     static IEMOPBLENDOP const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
    2210 # else
    2211 #  define IEMOPBLENDOP_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) \
    2212     static IEMOPBLENDOP const s_Fallback = { a_pfnFallbackU128, a_pfnFallbackU256 }
    2213 # endif
    2214 #else
    2215 # define IEMOPBLENDOP_INIT_VARS_EX(a_pfnU128, a_pfnU256, a_pfnFallbackU128, a_pfnFallbackU256) (void)0
    2216 #endif
    2217 /** @def IEMOPBLENDOP_INIT_VARS
    2218  * Generate AVX function tables for the @a a_InstrNm instruction.
    2219  * @sa IEMOPBLENDOP_INIT_VARS_EX */
    2220 #define IEMOPBLENDOP_INIT_VARS(a_InstrNm) \
    2221     IEMOPBLENDOP_INIT_VARS_EX(RT_CONCAT3(iemAImpl_,a_InstrNm,_u128),           RT_CONCAT3(iemAImpl_,a_InstrNm,_u256),\
    2222                               RT_CONCAT3(iemAImpl_,a_InstrNm,_u128_fallback),  RT_CONCAT3(iemAImpl_,a_InstrNm,_u256_fallback))
    2223 
    2224 
    2225 /** @name SSE/AVX single/double precision floating point operations.
    2226  * @{ */
    2227 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLFPSSEF2U128,(uint32_t uMxCsrIn, PX86XMMREG pResult, PCX86XMMREG puSrc1, PCX86XMMREG puSrc2));
    2228 typedef FNIEMAIMPLFPSSEF2U128  *PFNIEMAIMPLFPSSEF2U128;
    2229 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLFPSSEF2U128R32,(uint32_t uMxCsrIn, PX86XMMREG Result, PCX86XMMREG puSrc1, PCRTFLOAT32U pr32Src2));
    2230 typedef FNIEMAIMPLFPSSEF2U128R32  *PFNIEMAIMPLFPSSEF2U128R32;
    2231 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLFPSSEF2U128R64,(uint32_t uMxCsrIn, PX86XMMREG pResult, PCX86XMMREG puSrc1, PCRTFLOAT64U pr64Src2));
    2232 typedef FNIEMAIMPLFPSSEF2U128R64  *PFNIEMAIMPLFPSSEF2U128R64;
    2233 
    2234 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLFPAVXF3U128,(uint32_t uMxCsrIn, PX86XMMREG pResult, PCX86XMMREG puSrc1, PCX86XMMREG puSrc2));
    2235 typedef FNIEMAIMPLFPAVXF3U128  *PFNIEMAIMPLFPAVXF3U128;
    2236 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLFPAVXF3U128R32,(uint32_t uMxCsrIn, PX86XMMREG pResult, PCX86XMMREG puSrc1, PCRTFLOAT32U pr32Src2));
    2237 typedef FNIEMAIMPLFPAVXF3U128R32  *PFNIEMAIMPLFPAVXF3U128R32;
    2238 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLFPAVXF3U128R64,(uint32_t uMxCsrIn, PX86XMMREG pResult, PCX86XMMREG puSrc1, PCRTFLOAT64U pr64Src2));
    2239 typedef FNIEMAIMPLFPAVXF3U128R64  *PFNIEMAIMPLFPAVXF3U128R64;
    2240 
    2241 typedef IEM_DECL_IMPL_TYPE(uint32_t, FNIEMAIMPLFPAVXF3U256,(uint32_t uMxCsrIn, PX86YMMREG pResult, PCX86YMMREG puSrc1, PCX86YMMREG puSrc2));
    2242 typedef FNIEMAIMPLFPAVXF3U256  *PFNIEMAIMPLFPAVXF3U256;
    2243 
    2244 FNIEMAIMPLFPSSEF2U128 iemAImpl_addps_u128;
    2245 FNIEMAIMPLFPSSEF2U128 iemAImpl_addpd_u128;
    2246 FNIEMAIMPLFPSSEF2U128 iemAImpl_mulps_u128;
    2247 FNIEMAIMPLFPSSEF2U128 iemAImpl_mulpd_u128;
    2248 FNIEMAIMPLFPSSEF2U128 iemAImpl_subps_u128;
    2249 FNIEMAIMPLFPSSEF2U128 iemAImpl_subpd_u128;
    2250 FNIEMAIMPLFPSSEF2U128 iemAImpl_minps_u128;
    2251 FNIEMAIMPLFPSSEF2U128 iemAImpl_minpd_u128;
    2252 FNIEMAIMPLFPSSEF2U128 iemAImpl_divps_u128;
    2253 FNIEMAIMPLFPSSEF2U128 iemAImpl_divpd_u128;
    2254 FNIEMAIMPLFPSSEF2U128 iemAImpl_maxps_u128;
    2255 FNIEMAIMPLFPSSEF2U128 iemAImpl_maxpd_u128;
    2256 FNIEMAIMPLFPSSEF2U128 iemAImpl_haddps_u128;
    2257 FNIEMAIMPLFPSSEF2U128 iemAImpl_haddpd_u128;
    2258 FNIEMAIMPLFPSSEF2U128 iemAImpl_hsubps_u128;
    2259 FNIEMAIMPLFPSSEF2U128 iemAImpl_hsubpd_u128;
    2260 FNIEMAIMPLFPSSEF2U128 iemAImpl_sqrtps_u128;
    2261 FNIEMAIMPLFPSSEF2U128 iemAImpl_rsqrtps_u128;
    2262 FNIEMAIMPLFPSSEF2U128 iemAImpl_sqrtpd_u128;
    2263 FNIEMAIMPLFPSSEF2U128 iemAImpl_rcpps_u128;
    2264 FNIEMAIMPLFPSSEF2U128 iemAImpl_addsubps_u128;
    2265 FNIEMAIMPLFPSSEF2U128 iemAImpl_addsubpd_u128;
    2266 
    2267 FNIEMAIMPLFPSSEF2U128 iemAImpl_cvtpd2ps_u128;
    2268 IEM_DECL_IMPL_PROTO(uint32_t, iemAImpl_cvtps2pd_u128,(uint32_t uMxCsrIn, PX86XMMREG pResult, uint64_t const *pu64Src));
    2269 
    2270 FNIEMAIMPLFPSSEF2U128 iemAImpl_cvtdq2ps_u128;
    2271 FNIEMAIMPLFPSSEF2U128 iemAImpl_cvtps2dq_u128;
    2272 FNIEMAIMPLFPSSEF2U128 iemAImpl_cvttps2dq_u128;
    2273 FNIEMAIMPLFPSSEF2U128 iemAImpl_cvttpd2dq_u128;
    2274 FNIEMAIMPLFPSSEF2U128 iemAImpl_cvtdq2pd_u128;
    2275 FNIEMAIMPLFPSSEF2U128 iemAImpl_cvtpd2dq_u128;
    2276 
    2277 FNIEMAIMPLFPSSEF2U128R32 iemAImpl_addss_u128_r32;
    2278 FNIEMAIMPLFPSSEF2U128R64 iemAImpl_addsd_u128_r64;
    2279 FNIEMAIMPLFPSSEF2U128R32 iemAImpl_mulss_u128_r32;
    2280 FNIEMAIMPLFPSSEF2U128R64 iemAImpl_mulsd_u128_r64;
    2281 FNIEMAIMPLFPSSEF2U128R32 iemAImpl_subss_u128_r32;
    2282 FNIEMAIMPLFPSSEF2U128R64 iemAImpl_subsd_u128_r64;
    2283 FNIEMAIMPLFPSSEF2U128R32 iemAImpl_minss_u128_r32;
    2284 FNIEMAIMPLFPSSEF2U128R64 iemAImpl_minsd_u128_r64;
    2285 FNIEMAIMPLFPSSEF2U128R32 iemAImpl_divss_u128_r32;
    2286 FNIEMAIMPLFPSSEF2U128R64 iemAImpl_divsd_u128_r64;
    2287 FNIEMAIMPLFPSSEF2U128R32 iemAImpl_maxss_u128_r32;
    2288 FNIEMAIMPLFPSSEF2U128R64 iemAImpl_maxsd_u128_r64;
    2289 FNIEMAIMPLFPSSEF2U128R32 iemAImpl_cvtss2sd_u128_r32;
    2290 FNIEMAIMPLFPSSEF2U128R64 iemAImpl_cvtsd2ss_u128_r64;
    2291 FNIEMAIMPLFPSSEF2U128R32 iemAImpl_sqrtss_u128_r32;
    2292 FNIEMAIMPLFPSSEF2U128R64 iemAImpl_sqrtsd_u128_r64;
    2293 FNIEMAIMPLFPSSEF2U128R32 iemAImpl_rsqrtss_u128_r32;
    2294 FNIEMAIMPLFPSSEF2U128R32 iemAImpl_rcpss_u128_r32;
    2295 
    2296 FNIEMAIMPLMEDIAF3U128 iemAImpl_vaddps_u128, iemAImpl_vaddps_u128_fallback;
    2297 FNIEMAIMPLMEDIAF3U128 iemAImpl_vaddpd_u128, iemAImpl_vaddpd_u128_fallback;
    2298 FNIEMAIMPLMEDIAF3U128 iemAImpl_vmulps_u128, iemAImpl_vmulps_u128_fallback;
    2299 FNIEMAIMPLMEDIAF3U128 iemAImpl_vmulpd_u128, iemAImpl_vmulpd_u128_fallback;
    2300 FNIEMAIMPLMEDIAF3U128 iemAImpl_vsubps_u128, iemAImpl_vsubps_u128_fallback;
    2301 FNIEMAIMPLMEDIAF3U128 iemAImpl_vsubpd_u128, iemAImpl_vsubpd_u128_fallback;
    2302 FNIEMAIMPLMEDIAF3U128 iemAImpl_vminps_u128, iemAImpl_vminps_u128_fallback;
    2303 FNIEMAIMPLMEDIAF3U128 iemAImpl_vminpd_u128, iemAImpl_vminpd_u128_fallback;
    2304 FNIEMAIMPLMEDIAF3U128 iemAImpl_vdivps_u128, iemAImpl_vdivps_u128_fallback;
    2305 FNIEMAIMPLMEDIAF3U128 iemAImpl_vdivpd_u128, iemAImpl_vdivpd_u128_fallback;
    2306 FNIEMAIMPLMEDIAF3U128 iemAImpl_vmaxps_u128, iemAImpl_vmaxps_u128_fallback;
    2307 FNIEMAIMPLMEDIAF3U128 iemAImpl_vmaxpd_u128, iemAImpl_vmaxpd_u128_fallback;
    2308 FNIEMAIMPLMEDIAF3U128 iemAImpl_vhaddps_u128, iemAImpl_vhaddps_u128_fallback;
    2309 FNIEMAIMPLMEDIAF3U128 iemAImpl_vhaddpd_u128, iemAImpl_vhaddpd_u128_fallback;
    2310 FNIEMAIMPLMEDIAF3U128 iemAImpl_vhsubps_u128, iemAImpl_vhsubps_u128_fallback;
    2311 FNIEMAIMPLMEDIAF3U128 iemAImpl_vhsubpd_u128, iemAImpl_vhsubpd_u128_fallback;
    2312 FNIEMAIMPLMEDIAF2U128 iemAImpl_vsqrtps_u128, iemAImpl_vsqrtps_u128_fallback;
    2313 FNIEMAIMPLMEDIAF2U128 iemAImpl_vsqrtpd_u128, iemAImpl_vsqrtpd_u128_fallback;
    2314 FNIEMAIMPLMEDIAF2U128 iemAImpl_vrsqrtps_u128,  iemAImpl_vrsqrtps_u128_fallback;
    2315 FNIEMAIMPLMEDIAF2U128 iemAImpl_vrcpps_u128,    iemAImpl_vrcpps_u128_fallback;
    2316 FNIEMAIMPLMEDIAF3U128 iemAImpl_vaddsubps_u128, iemAImpl_vaddsubps_u128_fallback;
    2317 FNIEMAIMPLMEDIAF3U128 iemAImpl_vaddsubpd_u128, iemAImpl_vaddsubpd_u128_fallback;
    2318 FNIEMAIMPLMEDIAF2U128 iemAImpl_vcvtdq2ps_u128, iemAImpl_vcvtdq2ps_u128_fallback;
    2319 FNIEMAIMPLMEDIAF2U128 iemAImpl_vcvtps2dq_u128, iemAImpl_vcvtps2dq_u128_fallback;
    2320 FNIEMAIMPLMEDIAF2U128 iemAImpl_vcvttps2dq_u128, iemAImpl_vcvttps2dq_u128_fallback;
    2321 IEM_DECL_IMPL_PROTO(uint32_t, iemAImpl_vcvtpd2ps_u128_u128,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86XMMREG puSrc));
    2322 IEM_DECL_IMPL_PROTO(uint32_t, iemAImpl_vcvtpd2ps_u128_u128_fallback,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86XMMREG puSrc));
    2323 IEM_DECL_IMPL_PROTO(uint32_t, iemAImpl_vcvttpd2dq_u128_u128,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86XMMREG puSrc));
    2324 IEM_DECL_IMPL_PROTO(uint32_t, iemAImpl_vcvttpd2dq_u128_u128_fallback,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86XMMREG puSrc));
    2325 IEM_DECL_IMPL_PROTO(uint32_t, iemAImpl_vcvtpd2dq_u128_u128,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86XMMREG puSrc));
    2326 IEM_DECL_IMPL_PROTO(uint32_t, iemAImpl_vcvtpd2dq_u128_u128_fallback,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86XMMREG puSrc));
    2327 
    2328 
    2329 FNIEMAIMPLFPAVXF3U128R32 iemAImpl_vaddss_u128_r32, iemAImpl_vaddss_u128_r32_fallback;
    2330 FNIEMAIMPLFPAVXF3U128R64 iemAImpl_vaddsd_u128_r64, iemAImpl_vaddsd_u128_r64_fallback;
    2331 FNIEMAIMPLFPAVXF3U128R32 iemAImpl_vmulss_u128_r32, iemAImpl_vmulss_u128_r32_fallback;
    2332 FNIEMAIMPLFPAVXF3U128R64 iemAImpl_vmulsd_u128_r64, iemAImpl_vmulsd_u128_r64_fallback;
    2333 FNIEMAIMPLFPAVXF3U128R32 iemAImpl_vsubss_u128_r32, iemAImpl_vsubss_u128_r32_fallback;
    2334 FNIEMAIMPLFPAVXF3U128R64 iemAImpl_vsubsd_u128_r64, iemAImpl_vsubsd_u128_r64_fallback;
    2335 FNIEMAIMPLFPAVXF3U128R32 iemAImpl_vminss_u128_r32, iemAImpl_vminss_u128_r32_fallback;
    2336 FNIEMAIMPLFPAVXF3U128R64 iemAImpl_vminsd_u128_r64, iemAImpl_vminsd_u128_r64_fallback;
    2337 FNIEMAIMPLFPAVXF3U128R32 iemAImpl_vdivss_u128_r32, iemAImpl_vdivss_u128_r32_fallback;
    2338 FNIEMAIMPLFPAVXF3U128R64 iemAImpl_vdivsd_u128_r64, iemAImpl_vdivsd_u128_r64_fallback;
    2339 FNIEMAIMPLFPAVXF3U128R32 iemAImpl_vmaxss_u128_r32, iemAImpl_vmaxss_u128_r32_fallback;
    2340 FNIEMAIMPLFPAVXF3U128R64 iemAImpl_vmaxsd_u128_r64, iemAImpl_vmaxsd_u128_r64_fallback;
    2341 FNIEMAIMPLFPAVXF3U128R32 iemAImpl_vsqrtss_u128_r32, iemAImpl_vsqrtss_u128_r32_fallback;
    2342 FNIEMAIMPLFPAVXF3U128R64 iemAImpl_vsqrtsd_u128_r64, iemAImpl_vsqrtsd_u128_r64_fallback;
    2343 FNIEMAIMPLFPAVXF3U128R32 iemAImpl_vrsqrtss_u128_r32, iemAImpl_vrsqrtss_u128_r32_fallback;
    2344 FNIEMAIMPLFPAVXF3U128R32 iemAImpl_vrcpss_u128_r32,   iemAImpl_vrcpss_u128_r32_fallback;
    2345 FNIEMAIMPLFPAVXF3U128R32 iemAImpl_vcvtss2sd_u128_r32, iemAImpl_vcvtss2sd_u128_r32_fallback;
    2346 FNIEMAIMPLFPAVXF3U128R64 iemAImpl_vcvtsd2ss_u128_r64, iemAImpl_vcvtsd2ss_u128_r64_fallback;
    2347 
    2348 
    2349 FNIEMAIMPLFPAVXF3U256 iemAImpl_vaddps_u256, iemAImpl_vaddps_u256_fallback;
    2350 FNIEMAIMPLFPAVXF3U256 iemAImpl_vaddpd_u256, iemAImpl_vaddpd_u256_fallback;
    2351 FNIEMAIMPLFPAVXF3U256 iemAImpl_vmulps_u256, iemAImpl_vmulps_u256_fallback;
    2352 FNIEMAIMPLFPAVXF3U256 iemAImpl_vmulpd_u256, iemAImpl_vmulpd_u256_fallback;
    2353 FNIEMAIMPLFPAVXF3U256 iemAImpl_vsubps_u256, iemAImpl_vsubps_u256_fallback;
    2354 FNIEMAIMPLFPAVXF3U256 iemAImpl_vsubpd_u256, iemAImpl_vsubpd_u256_fallback;
    2355 FNIEMAIMPLFPAVXF3U256 iemAImpl_vminps_u256, iemAImpl_vminps_u256_fallback;
    2356 FNIEMAIMPLFPAVXF3U256 iemAImpl_vminpd_u256, iemAImpl_vminpd_u256_fallback;
    2357 FNIEMAIMPLFPAVXF3U256 iemAImpl_vdivps_u256, iemAImpl_vdivps_u256_fallback;
    2358 FNIEMAIMPLFPAVXF3U256 iemAImpl_vdivpd_u256, iemAImpl_vdivpd_u256_fallback;
    2359 FNIEMAIMPLFPAVXF3U256 iemAImpl_vmaxps_u256, iemAImpl_vmaxps_u256_fallback;
    2360 FNIEMAIMPLFPAVXF3U256 iemAImpl_vmaxpd_u256, iemAImpl_vmaxpd_u256_fallback;
    2361 FNIEMAIMPLFPAVXF3U256 iemAImpl_vhaddps_u256, iemAImpl_vhaddps_u256_fallback;
    2362 FNIEMAIMPLFPAVXF3U256 iemAImpl_vhaddpd_u256, iemAImpl_vhaddpd_u256_fallback;
    2363 FNIEMAIMPLFPAVXF3U256 iemAImpl_vhsubps_u256, iemAImpl_vhsubps_u256_fallback;
    2364 FNIEMAIMPLFPAVXF3U256 iemAImpl_vhsubpd_u256, iemAImpl_vhsubpd_u256_fallback;
    2365 FNIEMAIMPLMEDIAF3U256 iemAImpl_vaddsubps_u256, iemAImpl_vaddsubps_u256_fallback;
    2366 FNIEMAIMPLMEDIAF3U256 iemAImpl_vaddsubpd_u256, iemAImpl_vaddsubpd_u256_fallback;
    2367 FNIEMAIMPLMEDIAF2U256 iemAImpl_vsqrtps_u256, iemAImpl_vsqrtps_u256_fallback;
    2368 FNIEMAIMPLMEDIAF2U256 iemAImpl_vsqrtpd_u256, iemAImpl_vsqrtpd_u256_fallback;
    2369 FNIEMAIMPLMEDIAF2U256 iemAImpl_vrsqrtps_u256,  iemAImpl_vrsqrtps_u256_fallback;
    2370 FNIEMAIMPLMEDIAF2U256 iemAImpl_vrcpps_u256,    iemAImpl_vrcpps_u256_fallback;
    2371 FNIEMAIMPLMEDIAF2U256 iemAImpl_vcvtdq2ps_u256,  iemAImpl_vcvtdq2ps_u256_fallback;
    2372 FNIEMAIMPLMEDIAF2U256 iemAImpl_vcvtps2dq_u256,  iemAImpl_vcvtps2dq_u256_fallback;
    2373 FNIEMAIMPLMEDIAF2U256 iemAImpl_vcvttps2dq_u256, iemAImpl_vcvttps2dq_u256_fallback;
    2374 IEM_DECL_IMPL_PROTO(uint32_t, iemAImpl_vcvtpd2ps_u128_u256,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86YMMREG puSrc));
    2375 IEM_DECL_IMPL_PROTO(uint32_t, iemAImpl_vcvtpd2ps_u128_u256_fallback,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86YMMREG puSrc));
    2376 IEM_DECL_IMPL_PROTO(uint32_t, iemAImpl_vcvttpd2dq_u128_u256,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86YMMREG puSrc));
    2377 IEM_DECL_IMPL_PROTO(uint32_t, iemAImpl_vcvttpd2dq_u128_u256_fallback,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86YMMREG puSrc));
    2378 IEM_DECL_IMPL_PROTO(uint32_t, iemAImpl_vcvtpd2dq_u128_u256,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86YMMREG puSrc));
    2379 IEM_DECL_IMPL_PROTO(uint32_t, iemAImpl_vcvtpd2dq_u128_u256_fallback,(uint32_t uMxCsrIn, PX86XMMREG puDst, PCX86YMMREG puSrc));
    2380 /** @} */
    2381 
    2382 
    238347/** @name Misc Helpers
    238448 * @{  */
    238549
    2386 /** @def IEM_GET_INSTR_LEN
    2387  * Gets the instruction length. */
    2388 #ifdef IEM_WITH_CODE_TLB
    2389 # define IEM_GET_INSTR_LEN(a_pVCpu)         ((a_pVCpu)->iem.s.offInstrNextByte - (uint32_t)(int32_t)(a_pVCpu)->iem.s.offCurInstrStart)
    2390 #else
    2391 # define IEM_GET_INSTR_LEN(a_pVCpu)         ((a_pVCpu)->iem.s.offOpcode)
    2392 #endif
    2393 
    2394 /**
    2395  * Gets the CPU mode (from fExec) as a IEMMODE value.
    2396  *
    2397  * @returns IEMMODE
    2398  * @param   a_pVCpu         The cross context virtual CPU structure of the calling thread.
    2399  */
    2400 #define IEM_GET_CPU_MODE(a_pVCpu)           ((a_pVCpu)->iem.s.fExec & IEM_F_MODE_X86_CPUMODE_MASK)
    2401 
    2402 /**
    2403  * Check if we're currently executing in real or virtual 8086 mode.
    2404  *
    2405  * @returns @c true if it is, @c false if not.
    2406  * @param   a_pVCpu         The cross context virtual CPU structure of the calling thread.
    2407  */
    2408 #define IEM_IS_REAL_OR_V86_MODE(a_pVCpu)    ((  ((a_pVCpu)->iem.s.fExec  ^ IEM_F_MODE_X86_PROT_MASK) \
    2409                                               & (IEM_F_MODE_X86_V86_MASK | IEM_F_MODE_X86_PROT_MASK)) != 0)
    2410 
    2411 /**
    2412  * Check if we're currently executing in virtual 8086 mode.
    2413  *
    2414  * @returns @c true if it is, @c false if not.
    2415  * @param   a_pVCpu         The cross context virtual CPU structure of the calling thread.
    2416  */
    2417 #define IEM_IS_V86_MODE(a_pVCpu)            (((a_pVCpu)->iem.s.fExec & IEM_F_MODE_X86_V86_MASK) != 0)
    2418 
    2419 /**
    2420  * Check if we're currently executing in long mode.
    2421  *
    2422  * @returns @c true if it is, @c false if not.
    2423  * @param   a_pVCpu         The cross context virtual CPU structure of the calling thread.
    2424  */
    2425 #define IEM_IS_LONG_MODE(a_pVCpu)           (CPUMIsGuestInLongModeEx(IEM_GET_CTX(a_pVCpu)))
    2426 
    2427 /**
    2428  * Check if we're currently executing in a 16-bit code segment.
    2429  *
    2430  * @returns @c true if it is, @c false if not.
    2431  * @param   a_pVCpu         The cross context virtual CPU structure of the calling thread.
    2432  */
    2433 #define IEM_IS_16BIT_CODE(a_pVCpu)          (IEM_GET_CPU_MODE(a_pVCpu) == IEMMODE_16BIT)
    2434 
    2435 /**
    2436  * Check if we're currently executing in a 32-bit code segment.
    2437  *
    2438  * @returns @c true if it is, @c false if not.
    2439  * @param   a_pVCpu         The cross context virtual CPU structure of the calling thread.
    2440  */
    2441 #define IEM_IS_32BIT_CODE(a_pVCpu)          (IEM_GET_CPU_MODE(a_pVCpu) == IEMMODE_32BIT)
    2442 
    2443 /**
    2444  * Check if we're currently executing in a 64-bit code segment.
    2445  *
    2446  * @returns @c true if it is, @c false if not.
    2447  * @param   a_pVCpu         The cross context virtual CPU structure of the calling thread.
    2448  */
    2449 #define IEM_IS_64BIT_CODE(a_pVCpu)          (IEM_GET_CPU_MODE(a_pVCpu) == IEMMODE_64BIT)
    2450 
    2451 /**
    2452  * Check if we're currently executing in real mode.
    2453  *
    2454  * @returns @c true if it is, @c false if not.
    2455  * @param   a_pVCpu         The cross context virtual CPU structure of the calling thread.
    2456  */
    2457 #define IEM_IS_REAL_MODE(a_pVCpu)           (!((a_pVCpu)->iem.s.fExec & IEM_F_MODE_X86_PROT_MASK))
    2458 
    2459 /**
    2460  * Gets the current protection level (CPL).
    2461  *
    2462  * @returns 0..3
    2463  * @param   a_pVCpu         The cross context virtual CPU structure of the calling thread.
    2464  */
    2465 #define IEM_GET_CPL(a_pVCpu)                (((a_pVCpu)->iem.s.fExec >> IEM_F_X86_CPL_SHIFT) & IEM_F_X86_CPL_SMASK)
    2466 
    2467 /**
    2468  * Sets the current protection level (CPL).
    2469  *
    2470  * @param   a_pVCpu         The cross context virtual CPU structure of the calling thread.
    2471  */
    2472 #define IEM_SET_CPL(a_pVCpu, a_uCpl) \
    2473     do { (a_pVCpu)->iem.s.fExec = ((a_pVCpu)->iem.s.fExec & ~IEM_F_X86_CPL_MASK) | ((a_uCpl) << IEM_F_X86_CPL_SHIFT); } while (0)
    2474 
    2475 /**
    2476  * Returns a (const) pointer to the CPUMFEATURES for the guest CPU.
    2477  * @returns PCCPUMFEATURES
    2478  * @param   a_pVCpu         The cross context virtual CPU structure of the calling thread.
    2479  */
    2480 #define IEM_GET_GUEST_CPU_FEATURES(a_pVCpu) (&((a_pVCpu)->CTX_SUFF(pVM)->cpum.ro.GuestFeatures))
    2481 
    2482 /**
    2483  * Returns a (const) pointer to the CPUMFEATURES for the host CPU.
    2484  * @returns PCCPUMFEATURES
    2485  * @param   a_pVCpu         The cross context virtual CPU structure of the calling thread.
    2486  */
    2487 #define IEM_GET_HOST_CPU_FEATURES(a_pVCpu)  (&g_CpumHostFeatures.s)
    2488 
    2489 /**
    2490  * Evaluates to true if we're presenting an Intel CPU to the guest.
    2491  */
    2492 #define IEM_IS_GUEST_CPU_INTEL(a_pVCpu)     ( (a_pVCpu)->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL )
    2493 
    2494 /**
    2495  * Evaluates to true if we're presenting an AMD CPU to the guest.
    2496  */
    2497 #define IEM_IS_GUEST_CPU_AMD(a_pVCpu)       ( (a_pVCpu)->iem.s.enmCpuVendor == CPUMCPUVENDOR_AMD || (a_pVCpu)->iem.s.enmCpuVendor == CPUMCPUVENDOR_HYGON )
    2498 
    2499 /**
    2500  * Check if the address is canonical.
    2501  */
    2502 #define IEM_IS_CANONICAL(a_u64Addr)         X86_IS_CANONICAL(a_u64Addr)
    2503 
    2504 /** Checks if the ModR/M byte is in register mode or not.  */
    2505 #define IEM_IS_MODRM_REG_MODE(a_bRm)        ( ((a_bRm) & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT) )
    2506 /** Checks if the ModR/M byte is in memory mode or not.  */
    2507 #define IEM_IS_MODRM_MEM_MODE(a_bRm)        ( ((a_bRm) & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT) )
    2508 
    2509 /**
    2510  * Gets the register (reg) part of a ModR/M encoding, with REX.R added in.
    2511  *
    2512  * For use during decoding.
    2513  */
    2514 #define IEM_GET_MODRM_REG(a_pVCpu, a_bRm)   ( (((a_bRm) >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | (a_pVCpu)->iem.s.uRexReg )
    2515 /**
    2516  * Gets the r/m part of a ModR/M encoding as a register index, with REX.B added in.
    2517  *
    2518  * For use during decoding.
    2519  */
    2520 #define IEM_GET_MODRM_RM(a_pVCpu, a_bRm)    ( ((a_bRm) & X86_MODRM_RM_MASK) | (a_pVCpu)->iem.s.uRexB )
    2521 
    2522 /**
    2523  * Gets the register (reg) part of a ModR/M encoding, without REX.R.
    2524  *
    2525  * For use during decoding.
    2526  */
    2527 #define IEM_GET_MODRM_REG_8(a_bRm)          ( (((a_bRm) >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) )
    2528 /**
    2529  * Gets the r/m part of a ModR/M encoding as a register index, without REX.B.
    2530  *
    2531  * For use during decoding.
    2532  */
    2533 #define IEM_GET_MODRM_RM_8(a_bRm)           ( ((a_bRm) & X86_MODRM_RM_MASK) )
    2534 
    2535 /**
    2536  * Gets the register (reg) part of a ModR/M encoding as an extended 8-bit
    2537  * register index, with REX.R added in.
    2538  *
    2539  * For use during decoding.
    2540  *
    2541  * @see iemGRegRefU8Ex, iemGRegFetchU8Ex, iemGRegStoreU8Ex
    2542  */
    2543 #define IEM_GET_MODRM_REG_EX8(a_pVCpu, a_bRm) \
    2544     (   (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX) \
    2545      || !((a_bRm) & (4 << X86_MODRM_REG_SHIFT)) /* IEM_GET_MODRM_REG(pVCpu, a_bRm) < 4 */ \
    2546      ? IEM_GET_MODRM_REG(pVCpu, a_bRm) : (((a_bRm) >> X86_MODRM_REG_SHIFT) & 3) | 16)
    2547 /**
    2548  * Gets the r/m part of a ModR/M encoding as an extended 8-bit register index,
    2549  * with REX.B added in.
    2550  *
    2551  * For use during decoding.
    2552  *
    2553  * @see iemGRegRefU8Ex, iemGRegFetchU8Ex, iemGRegStoreU8Ex
    2554  */
    2555 #define IEM_GET_MODRM_RM_EX8(a_pVCpu, a_bRm) \
    2556     (   (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX) \
    2557      || !((a_bRm) & 4) /* IEM_GET_MODRM_RM(pVCpu, a_bRm) < 4 */ \
    2558      ? IEM_GET_MODRM_RM(pVCpu, a_bRm) : ((a_bRm) & 3) | 16)
    2559 
    2560 /**
    2561  * Combines the prefix REX and ModR/M byte for passing to
    2562  * iemOpHlpCalcRmEffAddrThreadedAddr64().
    2563  *
    2564  * @returns The ModRM byte but with bit 3 set to REX.B and bit 4 to REX.X.
    2565  *          The two bits are part of the REG sub-field, which isn't needed in
    2566  *          iemOpHlpCalcRmEffAddrThreadedAddr64().
    2567  *
    2568  * For use during decoding/recompiling.
    2569  */
    2570 #define IEM_GET_MODRM_EX(a_pVCpu, a_bRm) \
    2571     (  ((a_bRm) & ~X86_MODRM_REG_MASK) \
    2572      | (uint8_t)( (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X)) >> (25 - 3) ) )
    2573 AssertCompile(IEM_OP_PRF_REX_B == RT_BIT_32(25));
    2574 AssertCompile(IEM_OP_PRF_REX_X == RT_BIT_32(26));
    2575 
    2576 /**
    2577  * Gets the effective VEX.VVVV value.
    2578  *
    2579  * The 4th bit is ignored if not 64-bit code.
    2580  * @returns effective V-register value.
    2581  * @param   a_pVCpu         The cross context virtual CPU structure of the calling thread.
    2582  */
    2583 #define IEM_GET_EFFECTIVE_VVVV(a_pVCpu) \
    2584     (IEM_IS_64BIT_CODE(a_pVCpu) ? (a_pVCpu)->iem.s.uVex3rdReg : (a_pVCpu)->iem.s.uVex3rdReg & 7)
    2585 
    2586 
    2587 /**
    2588  * Gets the register (reg) part of a the special 4th register byte used by
    2589  * vblendvps and vblendvpd.
    2590  *
    2591  * For use during decoding.
    2592  */
    2593 #define IEM_GET_IMM8_REG(a_pVCpu, a_bRegImm8) \
    2594     (IEM_IS_64BIT_CODE(a_pVCpu) ? (a_bRegImm8) >> 4 : ((a_bRegImm8) >> 4) & 7)
    2595 
    2596 
    2597 /**
    2598  * Checks if we're executing inside an AMD-V or VT-x guest.
    2599  */
    2600 #if defined(VBOX_WITH_NESTED_HWVIRT_VMX) || defined(VBOX_WITH_NESTED_HWVIRT_SVM)
    2601 # define IEM_IS_IN_GUEST(a_pVCpu)       RT_BOOL((a_pVCpu)->iem.s.fExec & IEM_F_X86_CTX_IN_GUEST)
    2602 #else
    2603 # define IEM_IS_IN_GUEST(a_pVCpu)       false
    2604 #endif
    2605 
    2606 
    2607 #ifdef VBOX_WITH_NESTED_HWVIRT_VMX
    2608 
    2609 /**
    2610  * Check if the guest has entered VMX root operation.
    2611  */
    2612 # define IEM_VMX_IS_ROOT_MODE(a_pVCpu)      (CPUMIsGuestInVmxRootMode(IEM_GET_CTX(a_pVCpu)))
    2613 
    2614 /**
    2615  * Check if the guest has entered VMX non-root operation.
    2616  */
    2617 # define IEM_VMX_IS_NON_ROOT_MODE(a_pVCpu)  (   ((a_pVCpu)->iem.s.fExec & (IEM_F_X86_CTX_VMX | IEM_F_X86_CTX_IN_GUEST)) \
    2618                                              ==                           (IEM_F_X86_CTX_VMX | IEM_F_X86_CTX_IN_GUEST) )
    2619 
    2620 /**
    2621  * Check if the nested-guest has the given Pin-based VM-execution control set.
    2622  */
    2623 # define IEM_VMX_IS_PINCTLS_SET(a_pVCpu, a_PinCtl)  (CPUMIsGuestVmxPinCtlsSet(IEM_GET_CTX(a_pVCpu), (a_PinCtl)))
    2624 
    2625 /**
    2626  * Check if the nested-guest has the given Processor-based VM-execution control set.
    2627  */
    2628 # define IEM_VMX_IS_PROCCTLS_SET(a_pVCpu, a_ProcCtl) (CPUMIsGuestVmxProcCtlsSet(IEM_GET_CTX(a_pVCpu), (a_ProcCtl)))
    2629 
    2630 /**
    2631  * Check if the nested-guest has the given Secondary Processor-based VM-execution
    2632  * control set.
    2633  */
    2634 # define IEM_VMX_IS_PROCCTLS2_SET(a_pVCpu, a_ProcCtl2) (CPUMIsGuestVmxProcCtls2Set(IEM_GET_CTX(a_pVCpu), (a_ProcCtl2)))
    2635 
    2636 /** Gets the guest-physical address of the shadows VMCS for the given VCPU. */
    2637 # define IEM_VMX_GET_SHADOW_VMCS(a_pVCpu)           ((a_pVCpu)->cpum.GstCtx.hwvirt.vmx.GCPhysShadowVmcs)
    2638 
    2639 /** Whether a shadow VMCS is present for the given VCPU. */
    2640 # define IEM_VMX_HAS_SHADOW_VMCS(a_pVCpu)           RT_BOOL(IEM_VMX_GET_SHADOW_VMCS(a_pVCpu) != NIL_RTGCPHYS)
    2641 
    2642 /** Gets the VMXON region pointer. */
    2643 # define IEM_VMX_GET_VMXON_PTR(a_pVCpu)             ((a_pVCpu)->cpum.GstCtx.hwvirt.vmx.GCPhysVmxon)
    2644 
    2645 /** Gets the guest-physical address of the current VMCS for the given VCPU. */
    2646 # define IEM_VMX_GET_CURRENT_VMCS(a_pVCpu)          ((a_pVCpu)->cpum.GstCtx.hwvirt.vmx.GCPhysVmcs)
    2647 
    2648 /** Whether a current VMCS is present for the given VCPU. */
    2649 # define IEM_VMX_HAS_CURRENT_VMCS(a_pVCpu)          RT_BOOL(IEM_VMX_GET_CURRENT_VMCS(a_pVCpu) != NIL_RTGCPHYS)
    2650 
    2651 /** Assigns the guest-physical address of the current VMCS for the given VCPU. */
    2652 # define IEM_VMX_SET_CURRENT_VMCS(a_pVCpu, a_GCPhysVmcs) \
    2653     do \
    2654     { \
    2655         Assert((a_GCPhysVmcs) != NIL_RTGCPHYS); \
    2656         (a_pVCpu)->cpum.GstCtx.hwvirt.vmx.GCPhysVmcs = (a_GCPhysVmcs); \
    2657     } while (0)
    2658 
    2659 /** Clears any current VMCS for the given VCPU. */
    2660 # define IEM_VMX_CLEAR_CURRENT_VMCS(a_pVCpu) \
    2661     do \
    2662     { \
    2663         (a_pVCpu)->cpum.GstCtx.hwvirt.vmx.GCPhysVmcs = NIL_RTGCPHYS; \
    2664     } while (0)
    2665 
    2666 /**
    2667  * Invokes the VMX VM-exit handler for an instruction intercept.
    2668  */
    2669 # define IEM_VMX_VMEXIT_INSTR_RET(a_pVCpu, a_uExitReason, a_cbInstr) \
    2670     do { return iemVmxVmexitInstr((a_pVCpu), (a_uExitReason), (a_cbInstr)); } while (0)
    2671 
    2672 /**
    2673  * Invokes the VMX VM-exit handler for an instruction intercept where the
    2674  * instruction provides additional VM-exit information.
    2675  */
    2676 # define IEM_VMX_VMEXIT_INSTR_NEEDS_INFO_RET(a_pVCpu, a_uExitReason, a_uInstrId, a_cbInstr) \
    2677     do { return iemVmxVmexitInstrNeedsInfo((a_pVCpu), (a_uExitReason), (a_uInstrId), (a_cbInstr)); } while (0)
    2678 
    2679 /**
    2680  * Invokes the VMX VM-exit handler for a task switch.
    2681  */
    2682 # define IEM_VMX_VMEXIT_TASK_SWITCH_RET(a_pVCpu, a_enmTaskSwitch, a_SelNewTss, a_cbInstr) \
    2683     do { return iemVmxVmexitTaskSwitch((a_pVCpu), (a_enmTaskSwitch), (a_SelNewTss), (a_cbInstr)); } while (0)
    2684 
    2685 /**
    2686  * Invokes the VMX VM-exit handler for MWAIT.
    2687  */
    2688 # define IEM_VMX_VMEXIT_MWAIT_RET(a_pVCpu, a_fMonitorArmed, a_cbInstr) \
    2689     do { return iemVmxVmexitInstrMwait((a_pVCpu), (a_fMonitorArmed), (a_cbInstr)); } while (0)
    2690 
    2691 /**
    2692  * Invokes the VMX VM-exit handler for EPT faults.
    2693  */
    2694 # define IEM_VMX_VMEXIT_EPT_RET(a_pVCpu, a_pPtWalk, a_fAccess, a_fSlatFail, a_cbInstr) \
    2695     do { return iemVmxVmexitEpt(a_pVCpu, a_pPtWalk, a_fAccess, a_fSlatFail, a_cbInstr); } while (0)
    2696 
    2697 /**
    2698  * Invokes the VMX VM-exit handler.
    2699  */
    2700 # define IEM_VMX_VMEXIT_TRIPLE_FAULT_RET(a_pVCpu, a_uExitReason, a_uExitQual) \
    2701     do { return iemVmxVmexit((a_pVCpu), (a_uExitReason), (a_uExitQual)); } while (0)
    2702 
    2703 #else
    2704 # define IEM_VMX_IS_ROOT_MODE(a_pVCpu)                                          (false)
    2705 # define IEM_VMX_IS_NON_ROOT_MODE(a_pVCpu)                                      (false)
    2706 # define IEM_VMX_IS_PINCTLS_SET(a_pVCpu, a_cbInstr)                             (false)
    2707 # define IEM_VMX_IS_PROCCTLS_SET(a_pVCpu, a_cbInstr)                            (false)
    2708 # define IEM_VMX_IS_PROCCTLS2_SET(a_pVCpu, a_cbInstr)                           (false)
    2709 # define IEM_VMX_VMEXIT_INSTR_RET(a_pVCpu, a_uExitReason, a_cbInstr)            do { return VERR_VMX_IPE_1; } while (0)
    2710 # define IEM_VMX_VMEXIT_INSTR_NEEDS_INFO_RET(a_pVCpu, a_uExitReason, a_uInstrId, a_cbInstr)  do { return VERR_VMX_IPE_1; } while (0)
    2711 # define IEM_VMX_VMEXIT_TASK_SWITCH_RET(a_pVCpu, a_enmTaskSwitch, a_SelNewTss, a_cbInstr)    do { return VERR_VMX_IPE_1; } while (0)
    2712 # define IEM_VMX_VMEXIT_MWAIT_RET(a_pVCpu, a_fMonitorArmed, a_cbInstr)          do { return VERR_VMX_IPE_1; } while (0)
    2713 # define IEM_VMX_VMEXIT_EPT_RET(a_pVCpu, a_pPtWalk, a_fAccess, a_fSlatFail, a_cbInstr)       do { return VERR_VMX_IPE_1; } while (0)
    2714 # define IEM_VMX_VMEXIT_TRIPLE_FAULT_RET(a_pVCpu, a_uExitReason, a_uExitQual)   do { return VERR_VMX_IPE_1; } while (0)
    2715 
    2716 #endif
    2717 
    2718 #ifdef VBOX_WITH_NESTED_HWVIRT_SVM
    2719 /**
    2720  * Checks if we're executing a guest using AMD-V.
    2721  */
    2722 # define IEM_SVM_IS_IN_GUEST(a_pVCpu) (   (a_pVCpu->iem.s.fExec & (IEM_F_X86_CTX_SVM | IEM_F_X86_CTX_IN_GUEST)) \
    2723                                        ==                         (IEM_F_X86_CTX_SVM | IEM_F_X86_CTX_IN_GUEST))
    2724 /**
    2725  * Check if an SVM control/instruction intercept is set.
    2726  */
    2727 # define IEM_SVM_IS_CTRL_INTERCEPT_SET(a_pVCpu, a_Intercept) \
    2728     (IEM_SVM_IS_IN_GUEST(a_pVCpu) && CPUMIsGuestSvmCtrlInterceptSet(a_pVCpu, IEM_GET_CTX(a_pVCpu), (a_Intercept)))
    2729 
    2730 /**
    2731  * Check if an SVM read CRx intercept is set.
    2732  */
    2733 # define IEM_SVM_IS_READ_CR_INTERCEPT_SET(a_pVCpu, a_uCr) \
    2734     (IEM_SVM_IS_IN_GUEST(a_pVCpu) && CPUMIsGuestSvmReadCRxInterceptSet(a_pVCpu, IEM_GET_CTX(a_pVCpu), (a_uCr)))
    2735 
    2736 /**
    2737  * Check if an SVM write CRx intercept is set.
    2738  */
    2739 # define IEM_SVM_IS_WRITE_CR_INTERCEPT_SET(a_pVCpu, a_uCr) \
    2740     (IEM_SVM_IS_IN_GUEST(a_pVCpu) && CPUMIsGuestSvmWriteCRxInterceptSet(a_pVCpu, IEM_GET_CTX(a_pVCpu), (a_uCr)))
    2741 
    2742 /**
    2743  * Check if an SVM read DRx intercept is set.
    2744  */
    2745 # define IEM_SVM_IS_READ_DR_INTERCEPT_SET(a_pVCpu, a_uDr) \
    2746     (IEM_SVM_IS_IN_GUEST(a_pVCpu) && CPUMIsGuestSvmReadDRxInterceptSet(a_pVCpu, IEM_GET_CTX(a_pVCpu), (a_uDr)))
    2747 
    2748 /**
    2749  * Check if an SVM write DRx intercept is set.
    2750  */
    2751 # define IEM_SVM_IS_WRITE_DR_INTERCEPT_SET(a_pVCpu, a_uDr) \
    2752     (IEM_SVM_IS_IN_GUEST(a_pVCpu) && CPUMIsGuestSvmWriteDRxInterceptSet(a_pVCpu, IEM_GET_CTX(a_pVCpu), (a_uDr)))
    2753 
    2754 /**
    2755  * Check if an SVM exception intercept is set.
    2756  */
    2757 # define IEM_SVM_IS_XCPT_INTERCEPT_SET(a_pVCpu, a_uVector) \
    2758     (IEM_SVM_IS_IN_GUEST(a_pVCpu) && CPUMIsGuestSvmXcptInterceptSet(a_pVCpu, IEM_GET_CTX(a_pVCpu), (a_uVector)))
    2759 
    2760 /**
    2761  * Invokes the SVM \#VMEXIT handler for the nested-guest.
    2762  */
    2763 # define IEM_SVM_VMEXIT_RET(a_pVCpu, a_uExitCode, a_uExitInfo1, a_uExitInfo2) \
    2764     do { return iemSvmVmexit((a_pVCpu), (a_uExitCode), (a_uExitInfo1), (a_uExitInfo2)); } while (0)
    2765 
    2766 /**
    2767  * Invokes the 'MOV CRx' SVM \#VMEXIT handler after constructing the
    2768  * corresponding decode assist information.
    2769  */
    2770 # define IEM_SVM_CRX_VMEXIT_RET(a_pVCpu, a_uExitCode, a_enmAccessCrX, a_iGReg) \
    2771     do \
    2772     { \
    2773         uint64_t uExitInfo1; \
    2774         if (   IEM_GET_GUEST_CPU_FEATURES(a_pVCpu)->fSvmDecodeAssists \
    2775             && (a_enmAccessCrX) == IEMACCESSCRX_MOV_CRX) \
    2776             uExitInfo1 = SVM_EXIT1_MOV_CRX_MASK | ((a_iGReg) & 7); \
    2777         else \
    2778             uExitInfo1 = 0; \
    2779         IEM_SVM_VMEXIT_RET(a_pVCpu, a_uExitCode, uExitInfo1, 0); \
    2780     } while (0)
    2781 
    2782 /** Check and handles SVM nested-guest instruction intercept and updates
    2783  *  NRIP if needed.
    2784  */
    2785 # define IEM_SVM_CHECK_INSTR_INTERCEPT(a_pVCpu, a_Intercept, a_uExitCode, a_uExitInfo1, a_uExitInfo2, a_cbInstr) \
    2786     do \
    2787     { \
    2788         if (IEM_SVM_IS_CTRL_INTERCEPT_SET(a_pVCpu, a_Intercept)) \
    2789         { \
    2790             IEM_SVM_UPDATE_NRIP(a_pVCpu, a_cbInstr); \
    2791             IEM_SVM_VMEXIT_RET(a_pVCpu, a_uExitCode, a_uExitInfo1, a_uExitInfo2); \
    2792         } \
    2793     } while (0)
    2794 
    2795 /** Checks and handles SVM nested-guest CR0 read intercept. */
    2796 # define IEM_SVM_CHECK_READ_CR0_INTERCEPT(a_pVCpu, a_uExitInfo1, a_uExitInfo2, a_cbInstr) \
    2797     do \
    2798     { \
    2799         if (!IEM_SVM_IS_READ_CR_INTERCEPT_SET(a_pVCpu, 0)) \
    2800         { /* probably likely */ } \
    2801         else \
    2802         { \
    2803             IEM_SVM_UPDATE_NRIP(a_pVCpu, a_cbInstr); \
    2804             IEM_SVM_VMEXIT_RET(a_pVCpu, SVM_EXIT_READ_CR0, a_uExitInfo1, a_uExitInfo2); \
    2805         } \
    2806     } while (0)
    2807 
    2808 /**
    2809  * Updates the NextRIP (NRI) field in the nested-guest VMCB.
    2810  */
    2811 # define IEM_SVM_UPDATE_NRIP(a_pVCpu, a_cbInstr) \
    2812     do { \
    2813         if (IEM_GET_GUEST_CPU_FEATURES(a_pVCpu)->fSvmNextRipSave) \
    2814             CPUMGuestSvmUpdateNRip(a_pVCpu, IEM_GET_CTX(a_pVCpu), (a_cbInstr)); \
    2815     } while (0)
    2816 
    2817 #else
    2818 # define IEM_SVM_IS_CTRL_INTERCEPT_SET(a_pVCpu, a_Intercept)                                (false)
    2819 # define IEM_SVM_IS_READ_CR_INTERCEPT_SET(a_pVCpu, a_uCr)                                   (false)
    2820 # define IEM_SVM_IS_WRITE_CR_INTERCEPT_SET(a_pVCpu, a_uCr)                                  (false)
    2821 # define IEM_SVM_IS_READ_DR_INTERCEPT_SET(a_pVCpu, a_uDr)                                   (false)
    2822 # define IEM_SVM_IS_WRITE_DR_INTERCEPT_SET(a_pVCpu, a_uDr)                                  (false)
    2823 # define IEM_SVM_IS_XCPT_INTERCEPT_SET(a_pVCpu, a_uVector)                                  (false)
    2824 # define IEM_SVM_VMEXIT_RET(a_pVCpu, a_uExitCode, a_uExitInfo1, a_uExitInfo2)               do { return VERR_SVM_IPE_1; } while (0)
    2825 # define IEM_SVM_CRX_VMEXIT_RET(a_pVCpu, a_uExitCode, a_enmAccessCrX, a_iGReg)              do { return VERR_SVM_IPE_1; } while (0)
    2826 # define IEM_SVM_CHECK_INSTR_INTERCEPT(a_pVCpu, a_Intercept, a_uExitCode, \
    2827                                        a_uExitInfo1, a_uExitInfo2, a_cbInstr)               do { } while (0)
    2828 # define IEM_SVM_CHECK_READ_CR0_INTERCEPT(a_pVCpu, a_uExitInfo1, a_uExitInfo2, a_cbInstr)   do { } while (0)
    2829 # define IEM_SVM_UPDATE_NRIP(a_pVCpu, a_cbInstr)                                            do { } while (0)
    2830 
    2831 #endif
    2832 
    2833 /** @} */
    2834 
    2835 
    2836 /**
    2837  * Selector descriptor table entry as fetched by iemMemFetchSelDesc.
    2838  */
    2839 typedef union IEMSELDESC
    2840 {
    2841     /** The legacy view. */
    2842     X86DESC     Legacy;
    2843     /** The long mode view. */
    2844     X86DESC64   Long;
    2845 } IEMSELDESC;
    2846 /** Pointer to a selector descriptor table entry. */
    2847 typedef IEMSELDESC *PIEMSELDESC;
     50/** @} */
     51
    284852
    284953/** @name  Raising Exceptions.
    285054 * @{ */
    2851 VBOXSTRICTRC            iemTaskSwitch(PVMCPUCC pVCpu, IEMTASKSWITCH enmTaskSwitch, uint32_t uNextEip, uint32_t fFlags,
    2852                                       uint16_t uErr, uint64_t uCr2, RTSEL SelTSS, PIEMSELDESC pNewDescTSS) RT_NOEXCEPT;
    2853 
    2854 VBOXSTRICTRC            iemRaiseXcptOrInt(PVMCPUCC pVCpu, uint8_t cbInstr, uint8_t u8Vector, uint32_t fFlags,
    2855                                           uint16_t uErr, uint64_t uCr2) RT_NOEXCEPT;
    2856 DECL_NO_RETURN(void)    iemRaiseXcptOrIntJmp(PVMCPUCC pVCpu, uint8_t cbInstr, uint8_t u8Vector,
    2857                                              uint32_t fFlags, uint16_t uErr, uint64_t uCr2) IEM_NOEXCEPT_MAY_LONGJMP;
    2858 VBOXSTRICTRC            iemRaiseDivideError(PVMCPUCC pVCpu) RT_NOEXCEPT;
    2859 DECL_NO_RETURN(void)    iemRaiseDivideErrorJmp(PVMCPUCC pVCpu) IEM_NOEXCEPT_MAY_LONGJMP;
    2860 VBOXSTRICTRC            iemRaiseDebugException(PVMCPUCC pVCpu) RT_NOEXCEPT;
    2861 VBOXSTRICTRC            iemRaiseBoundRangeExceeded(PVMCPUCC pVCpu) RT_NOEXCEPT;
    2862 VBOXSTRICTRC            iemRaiseUndefinedOpcode(PVMCPUCC pVCpu) RT_NOEXCEPT;
    2863 DECL_NO_RETURN(void)    iemRaiseUndefinedOpcodeJmp(PVMCPUCC pVCpu) IEM_NOEXCEPT_MAY_LONGJMP;
    2864 VBOXSTRICTRC            iemRaiseDeviceNotAvailable(PVMCPUCC pVCpu) RT_NOEXCEPT;
    2865 DECL_NO_RETURN(void)    iemRaiseDeviceNotAvailableJmp(PVMCPUCC pVCpu) IEM_NOEXCEPT_MAY_LONGJMP;
    2866 VBOXSTRICTRC            iemRaiseTaskSwitchFaultWithErr(PVMCPUCC pVCpu, uint16_t uErr) RT_NOEXCEPT;
    2867 VBOXSTRICTRC            iemRaiseTaskSwitchFaultCurrentTSS(PVMCPUCC pVCpu) RT_NOEXCEPT;
    2868 VBOXSTRICTRC            iemRaiseTaskSwitchFault0(PVMCPUCC pVCpu) RT_NOEXCEPT;
    2869 VBOXSTRICTRC            iemRaiseTaskSwitchFaultBySelector(PVMCPUCC pVCpu, uint16_t uSel) RT_NOEXCEPT;
    2870 /*VBOXSTRICTRC            iemRaiseSelectorNotPresent(PVMCPUCC pVCpu, uint32_t iSegReg, uint32_t fAccess) RT_NOEXCEPT;*/
    2871 VBOXSTRICTRC            iemRaiseSelectorNotPresentWithErr(PVMCPUCC pVCpu, uint16_t uErr) RT_NOEXCEPT;
    2872 VBOXSTRICTRC            iemRaiseSelectorNotPresentBySelector(PVMCPUCC pVCpu, uint16_t uSel) RT_NOEXCEPT;
    2873 VBOXSTRICTRC            iemRaiseStackSelectorNotPresentBySelector(PVMCPUCC pVCpu, uint16_t uSel) RT_NOEXCEPT;
    2874 VBOXSTRICTRC            iemRaiseStackSelectorNotPresentWithErr(PVMCPUCC pVCpu, uint16_t uErr) RT_NOEXCEPT;
    2875 VBOXSTRICTRC            iemRaiseGeneralProtectionFault(PVMCPUCC pVCpu, uint16_t uErr) RT_NOEXCEPT;
    2876 VBOXSTRICTRC            iemRaiseGeneralProtectionFault0(PVMCPUCC pVCpu) RT_NOEXCEPT;
    2877 DECL_NO_RETURN(void)    iemRaiseGeneralProtectionFault0Jmp(PVMCPUCC pVCpu) IEM_NOEXCEPT_MAY_LONGJMP;
    2878 VBOXSTRICTRC            iemRaiseGeneralProtectionFaultBySelector(PVMCPUCC pVCpu, RTSEL Sel) RT_NOEXCEPT;
    2879 VBOXSTRICTRC            iemRaiseNotCanonical(PVMCPUCC pVCpu) RT_NOEXCEPT;
    2880 VBOXSTRICTRC            iemRaiseSelectorBounds(PVMCPUCC pVCpu, uint32_t iSegReg, uint32_t fAccess) RT_NOEXCEPT;
    2881 DECL_NO_RETURN(void)    iemRaiseSelectorBoundsJmp(PVMCPUCC pVCpu, uint32_t iSegReg, uint32_t fAccess) IEM_NOEXCEPT_MAY_LONGJMP;
    2882 VBOXSTRICTRC            iemRaiseSelectorBoundsBySelector(PVMCPUCC pVCpu, RTSEL Sel) RT_NOEXCEPT;
    2883 DECL_NO_RETURN(void)    iemRaiseSelectorBoundsBySelectorJmp(PVMCPUCC pVCpu, RTSEL Sel) IEM_NOEXCEPT_MAY_LONGJMP;
    2884 VBOXSTRICTRC            iemRaiseSelectorInvalidAccess(PVMCPUCC pVCpu, uint32_t iSegReg, uint32_t fAccess) RT_NOEXCEPT;
    2885 DECL_NO_RETURN(void)    iemRaiseSelectorInvalidAccessJmp(PVMCPUCC pVCpu, uint32_t iSegReg, uint32_t fAccess) IEM_NOEXCEPT_MAY_LONGJMP;
    2886 VBOXSTRICTRC            iemRaisePageFault(PVMCPUCC pVCpu, RTGCPTR GCPtrWhere, uint32_t cbAccess, uint32_t fAccess, int rc) RT_NOEXCEPT;
    2887 DECL_NO_RETURN(void)    iemRaisePageFaultJmp(PVMCPUCC pVCpu, RTGCPTR GCPtrWhere, uint32_t cbAccess, uint32_t fAccess, int rc) IEM_NOEXCEPT_MAY_LONGJMP;
    2888 VBOXSTRICTRC            iemRaiseMathFault(PVMCPUCC pVCpu) RT_NOEXCEPT;
    2889 DECL_NO_RETURN(void)    iemRaiseMathFaultJmp(PVMCPUCC pVCpu) IEM_NOEXCEPT_MAY_LONGJMP;
    2890 VBOXSTRICTRC            iemRaiseAlignmentCheckException(PVMCPUCC pVCpu) RT_NOEXCEPT;
    2891 DECL_NO_RETURN(void)    iemRaiseAlignmentCheckExceptionJmp(PVMCPUCC pVCpu) IEM_NOEXCEPT_MAY_LONGJMP;
    2892 VBOXSTRICTRC            iemRaiseSimdFpException(PVMCPUCC pVCpu) RT_NOEXCEPT;
    2893 DECL_NO_RETURN(void)    iemRaiseSimdFpExceptionJmp(PVMCPUCC pVCpu) IEM_NOEXCEPT_MAY_LONGJMP;
    2894 
    2895 void                    iemLogSyscallRealModeInt(PVMCPUCC pVCpu, uint8_t u8Vector, uint8_t cbInstr);
    2896 void                    iemLogSyscallProtModeInt(PVMCPUCC pVCpu, uint8_t u8Vector, uint8_t cbInstr);
    2897 
    2898 IEM_CIMPL_DEF_0(iemCImplRaiseDivideError);
    2899 IEM_CIMPL_DEF_0(iemCImplRaiseInvalidLockPrefix);
    2900 IEM_CIMPL_DEF_0(iemCImplRaiseInvalidOpcode);
     55//VBOXSTRICTRC            iemRaiseXcptOrInt(PVMCPUCC pVCpu, uint8_t cbInstr, uint8_t u8Vector, uint32_t fFlags,
     56//                                          uint16_t uErr, uint64_t uCr2) RT_NOEXCEPT;
     57//DECL_NO_RETURN(void)    iemRaiseXcptOrIntJmp(PVMCPUCC pVCpu, uint8_t cbInstr, uint8_t u8Vector,
     58//                                             uint32_t fFlags, uint16_t uErr, uint64_t uCr2) IEM_NOEXCEPT_MAY_LONGJMP;
     59
     60#define IEM_RAISE_PROTOS(a_Name, ...) \
     61    VBOXSTRICTRC a_Name(__VA_ARGS__) RT_NOEXCEPT; \
     62    DECL_NO_RETURN(void) RT_CONCAT(a_Name,Jmp)(__VA_ARGS__) IEM_NOEXCEPT_MAY_LONGJMP
     63
     64IEM_RAISE_PROTOS(iemRaiseDataAbortFromWalk,
     65                 PVMCPUCC pVCpu, RTGCPTR GCPtrMem, uint32_t cbMem, uint32_t fAccess, int rc, PCPGMPTWALKFAST pWalkFast);
     66
     67IEM_RAISE_PROTOS(iemRaiseDebugDataAccessOrInvokeDbgf,
     68                 PVMCPUCC pVCpu, uint32_t fDataBps, RTGCPTR GCPtrMem, size_t cbMem, uint32_t fAccess);
     69
     70
     71IEM_CIMPL_PROTO_0(iemCImplRaiseInvalidOpcode);
    290172
    290273/**
    2903  * Macro for calling iemCImplRaiseDivideError().
    2904  *
    2905  * This is for things that will _always_ decode to an \#DE, taking the
    2906  * recompiler into consideration and everything.
    2907  *
    2908  * @return  Strict VBox status code.
    2909  */
    2910 #define IEMOP_RAISE_DIVIDE_ERROR_RET()          IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_XCPT, 0, iemCImplRaiseDivideError)
    2911 
    2912 /**
    2913  * Macro for calling iemCImplRaiseInvalidLockPrefix().
     74 * Macro for calling iemCImplRaiseInvalidOpcode() for decode/static \#UDs.
    291475 *
    291576 * This is for things that will _always_ decode to an \#UD, taking the
     
    291879 * @return  Strict VBox status code.
    291980 */
    2920 #define IEMOP_RAISE_INVALID_LOCK_PREFIX_RET()   IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_XCPT, 0, iemCImplRaiseInvalidLockPrefix)
    2921 
    2922 /**
    2923  * Macro for calling iemCImplRaiseInvalidOpcode() for decode/static \#UDs.
    2924  *
    2925  * This is for things that will _always_ decode to an \#UD, taking the
    2926  * recompiler into consideration and everything.
    2927  *
    2928  * @return  Strict VBox status code.
    2929  */
    293081#define IEMOP_RAISE_INVALID_OPCODE_RET()        IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_XCPT, 0, iemCImplRaiseInvalidOpcode)
    293182
    2932 /**
    2933  * Macro for calling iemCImplRaiseInvalidOpcode() for runtime-style \#UDs.
    2934  *
    2935  * Using this macro means you've got _buggy_ _code_ and are doing things that
    2936  * belongs exclusively in IEMAllCImpl.cpp during decoding.
    2937  *
    2938  * @return  Strict VBox status code.
    2939  * @see     IEMOP_RAISE_INVALID_OPCODE_RET
    2940  */
    2941 #define IEMOP_RAISE_INVALID_OPCODE_RUNTIME_RET() IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_XCPT, 0, iemCImplRaiseInvalidOpcode)
    2942 
    2943 /** @} */
     83/** @} */
     84
    294485
    294586/** @name Register Access.
     
    295293/** @} */
    295394
    2954 /** @name FPU access and helpers.
    2955  * @{ */
    2956 void            iemFpuPushResult(PVMCPUCC pVCpu, PIEMFPURESULT pResult, uint16_t uFpuOpcode) RT_NOEXCEPT;
    2957 void            iemFpuPushResultWithMemOp(PVMCPUCC pVCpu, PIEMFPURESULT pResult, uint8_t iEffSeg, RTGCPTR GCPtrEff, uint16_t uFpuOpcode) RT_NOEXCEPT;
    2958 void            iemFpuPushResultTwo(PVMCPUCC pVCpu, PIEMFPURESULTTWO pResult, uint16_t uFpuOpcode) RT_NOEXCEPT;
    2959 void            iemFpuStoreResult(PVMCPUCC pVCpu, PIEMFPURESULT pResult, uint8_t iStReg, uint16_t uFpuOpcode) RT_NOEXCEPT;
    2960 void            iemFpuStoreResultThenPop(PVMCPUCC pVCpu, PIEMFPURESULT pResult, uint8_t iStReg, uint16_t uFpuOpcode) RT_NOEXCEPT;
    2961 void            iemFpuStoreResultWithMemOp(PVMCPUCC pVCpu, PIEMFPURESULT pResult, uint8_t iStReg,
    2962                                            uint8_t iEffSeg, RTGCPTR GCPtrEff, uint16_t uFpuOpcode) RT_NOEXCEPT;
    2963 void            iemFpuStoreResultWithMemOpThenPop(PVMCPUCC pVCpu, PIEMFPURESULT pResult, uint8_t iStReg,
    2964                                                   uint8_t iEffSeg, RTGCPTR GCPtrEff, uint16_t uFpuOpcode) RT_NOEXCEPT;
    2965 void            iemFpuUpdateOpcodeAndIp(PVMCPUCC pVCpu, uint16_t uFpuOpcode) RT_NOEXCEPT;
    2966 void            iemFpuUpdateFSW(PVMCPUCC pVCpu, uint16_t u16FSW, uint16_t uFpuOpcode) RT_NOEXCEPT;
    2967 void            iemFpuUpdateFSWThenPop(PVMCPUCC pVCpu, uint16_t u16FSW, uint16_t uFpuOpcode) RT_NOEXCEPT;
    2968 void            iemFpuUpdateFSWWithMemOp(PVMCPUCC pVCpu, uint16_t u16FSW, uint8_t iEffSeg, RTGCPTR GCPtrEff, uint16_t uFpuOpcode) RT_NOEXCEPT;
    2969 void            iemFpuUpdateFSWThenPopPop(PVMCPUCC pVCpu, uint16_t u16FSW, uint16_t uFpuOpcode) RT_NOEXCEPT;
    2970 void            iemFpuUpdateFSWWithMemOpThenPop(PVMCPUCC pVCpu, uint16_t u16FSW, uint8_t iEffSeg, RTGCPTR GCPtrEff, uint16_t uFpuOpcode) RT_NOEXCEPT;
    2971 void            iemFpuStackUnderflow(PVMCPUCC pVCpu, uint8_t iStReg, uint16_t uFpuOpcode) RT_NOEXCEPT;
    2972 void            iemFpuStackUnderflowWithMemOp(PVMCPUCC pVCpu, uint8_t iStReg, uint8_t iEffSeg, RTGCPTR GCPtrEff, uint16_t uFpuOpcode) RT_NOEXCEPT;
    2973 void            iemFpuStackUnderflowThenPop(PVMCPUCC pVCpu, uint8_t iStReg, uint16_t uFpuOpcode) RT_NOEXCEPT;
    2974 void            iemFpuStackUnderflowWithMemOpThenPop(PVMCPUCC pVCpu, uint8_t iStReg, uint8_t iEffSeg, RTGCPTR GCPtrEff, uint16_t uFpuOpcode) RT_NOEXCEPT;
    2975 void            iemFpuStackUnderflowThenPopPop(PVMCPUCC pVCpu, uint16_t uFpuOpcode) RT_NOEXCEPT;
    2976 void            iemFpuStackPushUnderflow(PVMCPUCC pVCpu, uint16_t uFpuOpcode) RT_NOEXCEPT;
    2977 void            iemFpuStackPushUnderflowTwo(PVMCPUCC pVCpu, uint16_t uFpuOpcode) RT_NOEXCEPT;
    2978 void            iemFpuStackPushOverflow(PVMCPUCC pVCpu, uint16_t uFpuOpcode) RT_NOEXCEPT;
    2979 void            iemFpuStackPushOverflowWithMemOp(PVMCPUCC pVCpu, uint8_t iEffSeg, RTGCPTR GCPtrEff, uint16_t uFpuOpcode) RT_NOEXCEPT;
    2980 /** @} */
    2981 
    2982 /** @name SSE+AVX SIMD access and helpers.
    2983  * @{ */
    2984 void            iemSseUpdateMxcsr(PVMCPUCC pVCpu, uint32_t fMxcsr) RT_NOEXCEPT;
    2985 /** @} */
    298695
    298796/** @name   Memory access.
    298897 * @{ */
    298998
    2990 /** Report a \#GP instead of \#AC and do not restrict to ring-3 */
    2991 #define IEM_MEMMAP_F_ALIGN_GP       RT_BIT_32(16)
    2992 /** SSE access that should report a \#GP instead of \#AC, unless MXCSR.MM=1
    2993  *  when it works like normal \#AC. Always used with IEM_MEMMAP_F_ALIGN_GP. */
    2994 #define IEM_MEMMAP_F_ALIGN_SSE      RT_BIT_32(17)
    2995 /** If \#AC is applicable, raise it. Always used with IEM_MEMMAP_F_ALIGN_GP.
    2996  * Users include FXSAVE & FXRSTOR. */
    2997 #define IEM_MEMMAP_F_ALIGN_GP_OR_AC RT_BIT_32(18)
    2998 
    2999 VBOXSTRICTRC    iemMemMap(PVMCPUCC pVCpu, void **ppvMem, uint8_t *pbUnmapInfo, size_t cbMem, uint8_t iSegReg, RTGCPTR GCPtrMem,
     99/** XXX */
     100#define IEM_MEMMAP_F_ALIGN_XXX      RT_BIT_32(16)
     101
     102VBOXSTRICTRC    iemMemMap(PVMCPUCC pVCpu, void **ppvMem, uint8_t *pbUnmapInfo, size_t cbMem, RTGCPTR GCPtrMem,
    3000103                          uint32_t fAccess, uint32_t uAlignCtl) RT_NOEXCEPT;
    3001 #ifndef IN_RING3
    3002 VBOXSTRICTRC    iemMemCommitAndUnmapPostponeTroubleToR3(PVMCPUCC pVCpu, uint8_t bUnmapInfo) RT_NOEXCEPT;
    3003 #endif
    3004 VBOXSTRICTRC    iemMemApplySegment(PVMCPUCC pVCpu, uint32_t fAccess, uint8_t iSegReg, size_t cbMem, PRTGCPTR pGCPtrMem) RT_NOEXCEPT;
    3005 VBOXSTRICTRC    iemMemMarkSelDescAccessed(PVMCPUCC pVCpu, uint16_t uSel) RT_NOEXCEPT;
    3006 
    3007 void            iemOpcodeFlushLight(PVMCPUCC pVCpu, uint8_t cbInstr);
    3008 void            iemOpcodeFlushHeavy(PVMCPUCC pVCpu, uint8_t cbInstr);
     104
    3009105#ifdef IEM_WITH_CODE_TLB
    3010106void            iemOpcodeFetchBytesJmp(PVMCPUCC pVCpu, size_t cbDst, void *pvDst) IEM_NOEXCEPT_MAY_LONGJMP;
     
    3012108VBOXSTRICTRC    iemOpcodeFetchMoreBytes(PVMCPUCC pVCpu, size_t cbMin) RT_NOEXCEPT;
    3013109#endif
    3014 uint8_t         iemOpcodeGetNextU8SlowJmp(PVMCPUCC pVCpu) IEM_NOEXCEPT_MAY_LONGJMP;
    3015110uint16_t        iemOpcodeGetNextU16SlowJmp(PVMCPUCC pVCpu) IEM_NOEXCEPT_MAY_LONGJMP;
    3016 uint32_t        iemOpcodeGetNextU32SlowJmp(PVMCPUCC pVCpu) IEM_NOEXCEPT_MAY_LONGJMP;
    3017 uint64_t        iemOpcodeGetNextU64SlowJmp(PVMCPUCC pVCpu) IEM_NOEXCEPT_MAY_LONGJMP;
    3018 
    3019 VBOXSTRICTRC    iemMemFetchDataU8(PVMCPUCC pVCpu, uint8_t *pu8Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
    3020 VBOXSTRICTRC    iemMemFetchDataU16(PVMCPUCC pVCpu, uint16_t *pu16Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
    3021 VBOXSTRICTRC    iemMemFetchDataU32(PVMCPUCC pVCpu, uint32_t *pu32Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
    3022 VBOXSTRICTRC    iemMemFetchDataU32NoAc(PVMCPUCC pVCpu, uint32_t *pu32Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
    3023 VBOXSTRICTRC    iemMemFetchDataU32_ZX_U64(PVMCPUCC pVCpu, uint64_t *pu64Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
    3024 VBOXSTRICTRC    iemMemFetchDataU64(PVMCPUCC pVCpu, uint64_t *pu64Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
    3025 VBOXSTRICTRC    iemMemFetchDataU64NoAc(PVMCPUCC pVCpu, uint64_t *pu64Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
    3026 VBOXSTRICTRC    iemMemFetchDataU64AlignedU128(PVMCPUCC pVCpu, uint64_t *pu64Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
    3027 VBOXSTRICTRC    iemMemFetchDataR80(PVMCPUCC pVCpu, PRTFLOAT80U pr80Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
     111
     112#if 0 /* rework this later */
     113VBOXSTRICTRC    iemMemFetchDataU8(PVMCPUCC pVCpu, uint8_t *pu8Dst, RTGCPTR GCPtrMem) RT_NOEXCEPT;
     114VBOXSTRICTRC    iemMemFetchDataU16(PVMCPUCC pVCpu, uint16_t *pu16Dst, RTGCPTR GCPtrMem) RT_NOEXCEPT;
     115VBOXSTRICTRC    iemMemFetchDataU32(PVMCPUCC pVCpu, uint32_t *pu32Dst, RTGCPTR GCPtrMem) RT_NOEXCEPT;
     116VBOXSTRICTRC    iemMemFetchDataU32NoAc(PVMCPUCC pVCpu, uint32_t *pu32Dst, RTGCPTR GCPtrMem) RT_NOEXCEPT;
     117VBOXSTRICTRC    iemMemFetchDataU32_ZX_U64(PVMCPUCC pVCpu, uint64_t *pu64Dst, RTGCPTR GCPtrMem) RT_NOEXCEPT;
     118VBOXSTRICTRC    iemMemFetchDataU64(PVMCPUCC pVCpu, uint64_t *pu64Dst, RTGCPTR GCPtrMem) RT_NOEXCEPT;
     119VBOXSTRICTRC    iemMemFetchDataU64NoAc(PVMCPUCC pVCpu, uint64_t *pu64Dst, RTGCPTR GCPtrMem) RT_NOEXCEPT;
     120VBOXSTRICTRC    iemMemFetchDataU64AlignedU128(PVMCPUCC pVCpu, uint64_t *pu64Dst, RTGCPTR GCPtrMem) RT_NOEXCEPT;
     121VBOXSTRICTRC    iemMemFetchDataR80(PVMCPUCC pVCpu, PRTFLOAT80U pr80Dst, RTGCPTR GCPtrMem) RT_NOEXCEPT;
    3028122VBOXSTRICTRC    iemMemFetchDataD80(PVMCPUCC pVCpu, PRTPBCD80U pd80Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
    3029123VBOXSTRICTRC    iemMemFetchDataU128(PVMCPUCC pVCpu, PRTUINT128U pu128Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
    3030124VBOXSTRICTRC    iemMemFetchDataU128NoAc(PVMCPUCC pVCpu, PRTUINT128U pu128Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
    3031 VBOXSTRICTRC    iemMemFetchDataU128AlignedSse(PVMCPUCC pVCpu, PRTUINT128U pu128Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
    3032 VBOXSTRICTRC    iemMemFetchDataU256(PVMCPUCC pVCpu, PRTUINT256U pu256Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
    3033 VBOXSTRICTRC    iemMemFetchDataU256NoAc(PVMCPUCC pVCpu, PRTUINT256U pu256Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
    3034 VBOXSTRICTRC    iemMemFetchDataU256AlignedAvx(PVMCPUCC pVCpu, PRTUINT256U pu256Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
    3035 VBOXSTRICTRC    iemMemFetchDataXdtr(PVMCPUCC pVCpu, uint16_t *pcbLimit, PRTGCPTR pGCPtrBase, uint8_t iSegReg,
    3036                                     RTGCPTR GCPtrMem, IEMMODE enmOpSize) RT_NOEXCEPT;
    3037125uint8_t         iemMemFetchDataU8SafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
    3038126uint16_t        iemMemFetchDataU16SafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
     
    3048136void            iemMemFetchDataU128NoAcSafeJmp(PVMCPUCC pVCpu, PRTUINT128U pu128Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
    3049137void            iemMemFetchDataU128AlignedSseSafeJmp(PVMCPUCC pVCpu, PRTUINT128U pu128Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
    3050 void            iemMemFetchDataU256SafeJmp(PVMCPUCC pVCpu, PRTUINT256U pu256Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
    3051 void            iemMemFetchDataU256NoAcSafeJmp(PVMCPUCC pVCpu, PRTUINT256U pu256Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
    3052 void            iemMemFetchDataU256AlignedAvxSafeJmp(PVMCPUCC pVCpu, PRTUINT256U pu256Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
    3053 #if 0 /* these are inlined now */
    3054 uint8_t         iemMemFetchDataU8Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
    3055 uint16_t        iemMemFetchDataU16Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
    3056 uint32_t        iemMemFetchDataU32Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
    3057 uint32_t        iemMemFlatFetchDataU32Jmp(PVMCPUCC pVCpu, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
    3058 uint64_t        iemMemFetchDataU64Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
    3059 uint64_t        iemMemFetchDataU64AlignedU128Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
    3060 void            iemMemFetchDataR80Jmp(PVMCPUCC pVCpu, PRTFLOAT80U pr80Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
    3061 void            iemMemFetchDataD80Jmp(PVMCPUCC pVCpu, PRTPBCD80U pd80Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
    3062 void            iemMemFetchDataU128Jmp(PVMCPUCC pVCpu, PRTUINT128U pu128Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
    3063 void            iemMemFetchDataU128NoAcJmp(PVMCPUCC pVCpu, PRTUINT128U pu128Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
    3064 void            iemMemFetchDataU128AlignedSseJmp(PVMCPUCC pVCpu, PRTUINT128U pu128Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
    3065 void            iemMemFetchDataU256NoAcJmp(PVMCPUCC pVCpu, PRTUINT256U pu256Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
    3066 void            iemMemFetchDataU256AlignedAvxJmp(PVMCPUCC pVCpu, PRTUINT256U pu256Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
    3067 #endif
    3068 void            iemMemFetchDataU256Jmp(PVMCPUCC pVCpu, PRTUINT256U pu256Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
    3069 
    3070 VBOXSTRICTRC    iemMemFetchSysU8(PVMCPUCC pVCpu, uint8_t *pu8Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
    3071 VBOXSTRICTRC    iemMemFetchSysU16(PVMCPUCC pVCpu, uint16_t *pu16Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
    3072 VBOXSTRICTRC    iemMemFetchSysU32(PVMCPUCC pVCpu, uint32_t *pu32Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
    3073 VBOXSTRICTRC    iemMemFetchSysU64(PVMCPUCC pVCpu, uint64_t *pu64Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT;
    3074 VBOXSTRICTRC    iemMemFetchSelDesc(PVMCPUCC pVCpu, PIEMSELDESC pDesc, uint16_t uSel, uint8_t uXcpt) RT_NOEXCEPT;
    3075 VBOXSTRICTRC    iemMemFetchSelDescWithErr(PVMCPUCC pVCpu, PIEMSELDESC pDesc, uint16_t uSel,
    3076                                           uint8_t uXcpt, uint16_t uErrorCode) RT_NOEXCEPT;
     138
     139VBOXSTRICTRC    iemMemFetchSysU8(PVMCPUCC pVCpu, uint8_t *pu8Dst, RTGCPTR GCPtrMem) RT_NOEXCEPT;
     140VBOXSTRICTRC    iemMemFetchSysU16(PVMCPUCC pVCpu, uint16_t *pu16Dst, RTGCPTR GCPtrMem) RT_NOEXCEPT;
     141VBOXSTRICTRC    iemMemFetchSysU32(PVMCPUCC pVCpu, uint32_t *pu32Dst, RTGCPTR GCPtrMem) RT_NOEXCEPT;
     142VBOXSTRICTRC    iemMemFetchSysU64(PVMCPUCC pVCpu, uint64_t *pu64Dst, RTGCPTR GCPtrMem) RT_NOEXCEPT;
     143VBOXSTRICTRC    iemMemFetchSysU128(PVMCPUCC pVCpu, uint128_t *pu128Dst, RTGCPTR GCPtrMem) RT_NOEXCEPT;
    3077144
    3078145VBOXSTRICTRC    iemMemStoreDataU8(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint8_t u8Value) RT_NOEXCEPT;
     
    3099166void            iemMemStoreDataR80SafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTFLOAT80U pr80Value) IEM_NOEXCEPT_MAY_LONGJMP;
    3100167void            iemMemStoreDataD80SafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTPBCD80U pd80Value) IEM_NOEXCEPT_MAY_LONGJMP;
    3101 #if 0 /* inlined */
    3102 void            iemMemStoreDataU8Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint8_t u8Value) IEM_NOEXCEPT_MAY_LONGJMP;
    3103 void            iemMemStoreDataU16Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint16_t u16Value) IEM_NOEXCEPT_MAY_LONGJMP;
    3104 void            iemMemStoreDataU32Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint32_t u32Value) IEM_NOEXCEPT_MAY_LONGJMP;
    3105 void            iemMemStoreDataU64Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint64_t u64Value) IEM_NOEXCEPT_MAY_LONGJMP;
    3106 void            iemMemStoreDataU128Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, RTUINT128U u128Value) IEM_NOEXCEPT_MAY_LONGJMP;
    3107 void            iemMemStoreDataNoAcU128Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, RTUINT128U u128Value) IEM_NOEXCEPT_MAY_LONGJMP;
    3108 void            iemMemStoreDataU256NoAcJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTUINT256U pu256Value) IEM_NOEXCEPT_MAY_LONGJMP;
    3109 void            iemMemStoreDataU256AlignedAvxJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTUINT256U pu256Value) IEM_NOEXCEPT_MAY_LONGJMP;
    3110 #endif
    3111168void            iemMemStoreDataU128AlignedSseJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, RTUINT128U u128Value) IEM_NOEXCEPT_MAY_LONGJMP;
    3112169void            iemMemStoreDataU256Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTUINT256U pu256Value) IEM_NOEXCEPT_MAY_LONGJMP;
     
    3189246uint64_t        iemMemFetchStackU64SafeJmp(PVMCPUCC pVCpu, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
    3190247
     248#endif /* later */
    3191249/** @} */
    3192250
     
    3195253 * @{ */
    3196254
    3197 /**
    3198  * INT instruction types - iemCImpl_int().
    3199  * @note x86 specific
    3200  */
    3201 typedef enum IEMINT
    3202 {
    3203     /** INT n instruction (opcode 0xcd imm). */
    3204     IEMINT_INTN  = 0,
    3205     /** Single byte INT3 instruction (opcode 0xcc). */
    3206     IEMINT_INT3  = IEM_XCPT_FLAGS_BP_INSTR,
    3207     /** Single byte INTO instruction (opcode 0xce). */
    3208     IEMINT_INTO  = IEM_XCPT_FLAGS_OF_INSTR,
    3209     /** Single byte INT1 (ICEBP) instruction (opcode 0xf1). */
    3210     IEMINT_INT1 = IEM_XCPT_FLAGS_ICEBP_INSTR
    3211 } IEMINT;
    3212 AssertCompileSize(IEMINT, 4);
    3213 
    3214 IEM_CIMPL_PROTO_2(iemCImpl_pop_mem16, uint16_t, iEffSeg, RTGCPTR, GCPtrEffDst);
    3215 IEM_CIMPL_PROTO_2(iemCImpl_pop_mem32, uint16_t, iEffSeg, RTGCPTR, GCPtrEffDst);
    3216 IEM_CIMPL_PROTO_2(iemCImpl_pop_mem64, uint16_t, iEffSeg, RTGCPTR, GCPtrEffDst);
    3217 IEM_CIMPL_PROTO_0(iemCImpl_popa_16);
    3218 IEM_CIMPL_PROTO_0(iemCImpl_popa_32);
    3219 IEM_CIMPL_PROTO_0(iemCImpl_pusha_16);
    3220 IEM_CIMPL_PROTO_0(iemCImpl_pusha_32);
    3221 IEM_CIMPL_PROTO_1(iemCImpl_pushf, IEMMODE, enmEffOpSize);
    3222 IEM_CIMPL_PROTO_1(iemCImpl_popf, IEMMODE, enmEffOpSize);
    3223 IEM_CIMPL_PROTO_3(iemCImpl_FarJmp, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmEffOpSize);
    3224 IEM_CIMPL_PROTO_3(iemCImpl_callf, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmEffOpSize);
    3225 typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmEffOpSize);
    3226 typedef FNIEMCIMPLFARBRANCH *PFNIEMCIMPLFARBRANCH;
    3227 IEM_CIMPL_PROTO_2(iemCImpl_retf, IEMMODE, enmEffOpSize, uint16_t, cbPop);
    3228 IEM_CIMPL_PROTO_3(iemCImpl_enter, IEMMODE, enmEffOpSize, uint16_t, cbFrame, uint8_t, cParameters);
    3229 IEM_CIMPL_PROTO_1(iemCImpl_leave, IEMMODE, enmEffOpSize);
    3230 IEM_CIMPL_PROTO_2(iemCImpl_int, uint8_t, u8Int, IEMINT, enmInt);
    3231 IEM_CIMPL_PROTO_1(iemCImpl_iret_real_v8086, IEMMODE, enmEffOpSize);
    3232 IEM_CIMPL_PROTO_4(iemCImpl_iret_prot_v8086, uint32_t, uNewEip, uint16_t, uNewCs, uint32_t, uNewFlags, uint64_t, uNewRsp);
    3233 IEM_CIMPL_PROTO_1(iemCImpl_iret_prot_NestedTask, IEMMODE, enmEffOpSize);
    3234 IEM_CIMPL_PROTO_1(iemCImpl_iret_prot, IEMMODE, enmEffOpSize);
    3235 IEM_CIMPL_PROTO_1(iemCImpl_iret_64bit, IEMMODE, enmEffOpSize);
    3236 IEM_CIMPL_PROTO_1(iemCImpl_iret, IEMMODE, enmEffOpSize);
    3237 IEM_CIMPL_PROTO_0(iemCImpl_loadall286);
    3238 IEM_CIMPL_PROTO_0(iemCImpl_syscall);
    3239 IEM_CIMPL_PROTO_1(iemCImpl_sysret, IEMMODE, enmEffOpSize);
    3240 IEM_CIMPL_PROTO_0(iemCImpl_sysenter);
    3241 IEM_CIMPL_PROTO_1(iemCImpl_sysexit, IEMMODE, enmEffOpSize);
    3242 IEM_CIMPL_PROTO_2(iemCImpl_LoadSReg, uint8_t, iSegReg, uint16_t, uSel);
    3243 IEM_CIMPL_PROTO_2(iemCImpl_load_SReg, uint8_t, iSegReg, uint16_t, uSel);
    3244 IEM_CIMPL_PROTO_2(iemCImpl_pop_Sreg, uint8_t, iSegReg, IEMMODE, enmEffOpSize);
    3245 IEM_CIMPL_PROTO_5(iemCImpl_load_SReg_Greg, uint16_t, uSel, uint64_t, offSeg, uint8_t, iSegReg, uint8_t, iGReg, IEMMODE, enmEffOpSize);
    3246 IEM_CIMPL_PROTO_2(iemCImpl_VerX, uint16_t, uSel, bool, fWrite);
    3247 IEM_CIMPL_PROTO_3(iemCImpl_LarLsl_u64, uint64_t *, pu64Dst, uint16_t, uSel, bool, fIsLar);
    3248 IEM_CIMPL_PROTO_3(iemCImpl_LarLsl_u16, uint16_t *, pu16Dst, uint16_t, uSel, bool, fIsLar);
    3249 IEM_CIMPL_PROTO_3(iemCImpl_lgdt, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc, IEMMODE, enmEffOpSize);
    3250 IEM_CIMPL_PROTO_2(iemCImpl_sgdt, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst);
    3251 IEM_CIMPL_PROTO_3(iemCImpl_lidt, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc, IEMMODE, enmEffOpSize);
    3252 IEM_CIMPL_PROTO_2(iemCImpl_sidt, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst);
    3253 IEM_CIMPL_PROTO_1(iemCImpl_lldt, uint16_t, uNewLdt);
    3254 IEM_CIMPL_PROTO_2(iemCImpl_sldt_reg, uint8_t, iGReg, uint8_t, enmEffOpSize);
    3255 IEM_CIMPL_PROTO_2(iemCImpl_sldt_mem, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst);
    3256 IEM_CIMPL_PROTO_1(iemCImpl_ltr, uint16_t, uNewTr);
    3257 IEM_CIMPL_PROTO_2(iemCImpl_str_reg, uint8_t, iGReg, uint8_t, enmEffOpSize);
    3258 IEM_CIMPL_PROTO_2(iemCImpl_str_mem, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst);
    3259 IEM_CIMPL_PROTO_2(iemCImpl_mov_Rd_Cd, uint8_t, iGReg, uint8_t, iCrReg);
    3260 IEM_CIMPL_PROTO_2(iemCImpl_smsw_reg, uint8_t, iGReg, uint8_t, enmEffOpSize);
    3261 IEM_CIMPL_PROTO_2(iemCImpl_smsw_mem, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst);
    3262 IEM_CIMPL_PROTO_4(iemCImpl_load_CrX, uint8_t, iCrReg, uint64_t, uNewCrX, IEMACCESSCRX, enmAccessCrX, uint8_t, iGReg);
    3263 IEM_CIMPL_PROTO_2(iemCImpl_mov_Cd_Rd, uint8_t, iCrReg, uint8_t, iGReg);
    3264 IEM_CIMPL_PROTO_2(iemCImpl_lmsw, uint16_t, u16NewMsw, RTGCPTR, GCPtrEffDst);
    3265 IEM_CIMPL_PROTO_0(iemCImpl_clts);
    3266 IEM_CIMPL_PROTO_2(iemCImpl_mov_Rd_Dd, uint8_t, iGReg, uint8_t, iDrReg);
    3267 IEM_CIMPL_PROTO_2(iemCImpl_mov_Dd_Rd, uint8_t, iDrReg, uint8_t, iGReg);
    3268 IEM_CIMPL_PROTO_2(iemCImpl_mov_Rd_Td, uint8_t, iGReg, uint8_t, iTrReg);
    3269 IEM_CIMPL_PROTO_2(iemCImpl_mov_Td_Rd, uint8_t, iTrReg, uint8_t, iGReg);
    3270 IEM_CIMPL_PROTO_1(iemCImpl_invlpg, RTGCPTR, GCPtrPage);
    3271 IEM_CIMPL_PROTO_3(iemCImpl_invpcid, uint8_t, iEffSeg, RTGCPTR, GCPtrInvpcidDesc, uint64_t, uInvpcidType);
    3272 IEM_CIMPL_PROTO_0(iemCImpl_invd);
    3273 IEM_CIMPL_PROTO_0(iemCImpl_wbinvd);
    3274 IEM_CIMPL_PROTO_0(iemCImpl_rsm);
    3275 IEM_CIMPL_PROTO_0(iemCImpl_rdtsc);
    3276 IEM_CIMPL_PROTO_0(iemCImpl_rdtscp);
    3277 IEM_CIMPL_PROTO_0(iemCImpl_rdpmc);
    3278 IEM_CIMPL_PROTO_0(iemCImpl_rdmsr);
    3279 IEM_CIMPL_PROTO_0(iemCImpl_wrmsr);
    3280 IEM_CIMPL_PROTO_3(iemCImpl_in, uint16_t, u16Port, uint8_t, cbReg, uint8_t, bImmAndEffAddrMode);
    3281 IEM_CIMPL_PROTO_2(iemCImpl_in_eAX_DX, uint8_t, cbReg, IEMMODE, enmEffAddrMode);
    3282 IEM_CIMPL_PROTO_3(iemCImpl_out, uint16_t, u16Port, uint8_t, cbReg, uint8_t, bImmAndEffAddrMode);
    3283 IEM_CIMPL_PROTO_2(iemCImpl_out_DX_eAX, uint8_t, cbReg, IEMMODE, enmEffAddrMode);
    3284 IEM_CIMPL_PROTO_0(iemCImpl_cli);
    3285 IEM_CIMPL_PROTO_0(iemCImpl_sti);
    3286 IEM_CIMPL_PROTO_0(iemCImpl_hlt);
    3287 IEM_CIMPL_PROTO_1(iemCImpl_monitor, uint8_t, iEffSeg);
    3288 IEM_CIMPL_PROTO_0(iemCImpl_mwait);
    3289 IEM_CIMPL_PROTO_0(iemCImpl_swapgs);
    3290 IEM_CIMPL_PROTO_0(iemCImpl_cpuid);
    3291 IEM_CIMPL_PROTO_1(iemCImpl_aad, uint8_t, bImm);
    3292 IEM_CIMPL_PROTO_1(iemCImpl_aam, uint8_t, bImm);
    3293 IEM_CIMPL_PROTO_0(iemCImpl_daa);
    3294 IEM_CIMPL_PROTO_0(iemCImpl_das);
    3295 IEM_CIMPL_PROTO_0(iemCImpl_aaa);
    3296 IEM_CIMPL_PROTO_0(iemCImpl_aas);
    3297 IEM_CIMPL_PROTO_3(iemCImpl_bound_16, int16_t, idxArray, int16_t, idxLowerBound, int16_t, idxUpperBound);
    3298 IEM_CIMPL_PROTO_3(iemCImpl_bound_32, int32_t, idxArray, int32_t, idxLowerBound, int32_t, idxUpperBound);
    3299 IEM_CIMPL_PROTO_0(iemCImpl_xgetbv);
    3300 IEM_CIMPL_PROTO_0(iemCImpl_xsetbv);
    3301 IEM_CIMPL_PROTO_5(iemCImpl_cmpxchg16b_fallback_rendezvous, PRTUINT128U, pu128Dst, PRTUINT128U, pu128RaxRdx,
    3302                   PRTUINT128U, pu128RbxRcx, uint32_t *, pEFlags, uint8_t, bUnmapInfo);
    3303 IEM_CIMPL_PROTO_2(iemCImpl_clflush_clflushopt, uint8_t, iEffSeg, RTGCPTR, GCPtrEff);
    3304 IEM_CIMPL_PROTO_1(iemCImpl_finit, bool, fCheckXcpts);
    3305 IEM_CIMPL_PROTO_3(iemCImpl_fxsave, uint8_t, iEffSeg, RTGCPTR, GCPtrEff, IEMMODE, enmEffOpSize);
    3306 IEM_CIMPL_PROTO_3(iemCImpl_fxrstor, uint8_t, iEffSeg, RTGCPTR, GCPtrEff, IEMMODE, enmEffOpSize);
    3307 IEM_CIMPL_PROTO_3(iemCImpl_xsave, uint8_t, iEffSeg, RTGCPTR, GCPtrEff, IEMMODE, enmEffOpSize);
    3308 IEM_CIMPL_PROTO_3(iemCImpl_xrstor, uint8_t, iEffSeg, RTGCPTR, GCPtrEff, IEMMODE, enmEffOpSize);
    3309 IEM_CIMPL_PROTO_2(iemCImpl_stmxcsr, uint8_t, iEffSeg, RTGCPTR, GCPtrEff);
    3310 IEM_CIMPL_PROTO_2(iemCImpl_vstmxcsr, uint8_t, iEffSeg, RTGCPTR, GCPtrEff);
    3311 IEM_CIMPL_PROTO_2(iemCImpl_ldmxcsr, uint8_t, iEffSeg, RTGCPTR, GCPtrEff);
    3312 IEM_CIMPL_PROTO_2(iemCImpl_vldmxcsr, uint8_t, iEffSeg, RTGCPTR, GCPtrEff);
    3313 IEM_CIMPL_PROTO_3(iemCImpl_fnstenv, IEMMODE, enmEffOpSize, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst);
    3314 IEM_CIMPL_PROTO_3(iemCImpl_fnsave, IEMMODE, enmEffOpSize, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst);
    3315 IEM_CIMPL_PROTO_3(iemCImpl_fldenv, IEMMODE, enmEffOpSize, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc);
    3316 IEM_CIMPL_PROTO_3(iemCImpl_frstor, IEMMODE, enmEffOpSize, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc);
    3317 IEM_CIMPL_PROTO_1(iemCImpl_fldcw, uint16_t, u16Fcw);
    3318 IEM_CIMPL_PROTO_2(iemCImpl_fxch_underflow, uint8_t, iStReg, uint16_t, uFpuOpcode);
    3319 IEM_CIMPL_PROTO_3(iemCImpl_fcomi_fucomi, uint8_t, iStReg, bool, fUCmp, uint32_t, uPopAndFpuOpcode);
    3320 IEM_CIMPL_PROTO_2(iemCImpl_rdseed, uint8_t, iReg, IEMMODE, enmEffOpSize);
    3321 IEM_CIMPL_PROTO_2(iemCImpl_rdrand, uint8_t, iReg, IEMMODE, enmEffOpSize);
    3322 IEM_CIMPL_PROTO_4(iemCImpl_vmaskmovps_load_u128, uint8_t, iXRegDst, uint8_t, iXRegMsk, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc);
    3323 IEM_CIMPL_PROTO_4(iemCImpl_vmaskmovps_load_u256, uint8_t, iYRegDst, uint8_t, iYRegMsk, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc);
    3324 IEM_CIMPL_PROTO_4(iemCImpl_vmaskmovps_store_u128, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst, uint8_t, iXRegMsk, uint8_t, iXRegSrc);
    3325 IEM_CIMPL_PROTO_4(iemCImpl_vmaskmovps_store_u256, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst, uint8_t, iYRegMsk, uint8_t, iYRegSrc);
    3326 IEM_CIMPL_PROTO_4(iemCImpl_vpmaskmovd_load_u128, uint8_t, iXRegDst, uint8_t, iXRegMsk, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc);
    3327 IEM_CIMPL_PROTO_4(iemCImpl_vpmaskmovd_load_u256, uint8_t, iYRegDst, uint8_t, iYRegMsk, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc);
    3328 IEM_CIMPL_PROTO_4(iemCImpl_vpmaskmovd_store_u128, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst, uint8_t, iXRegMsk, uint8_t, iXRegSrc);
    3329 IEM_CIMPL_PROTO_4(iemCImpl_vpmaskmovd_store_u256, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst, uint8_t, iYRegMsk, uint8_t, iYRegSrc);
    3330 IEM_CIMPL_PROTO_4(iemCImpl_vmaskmovpd_load_u128, uint8_t, iXRegDst, uint8_t, iXRegMsk, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc);
    3331 IEM_CIMPL_PROTO_4(iemCImpl_vmaskmovpd_load_u256, uint8_t, iYRegDst, uint8_t, iYRegMsk, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc);
    3332 IEM_CIMPL_PROTO_4(iemCImpl_vmaskmovpd_store_u128, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst, uint8_t, iXRegMsk, uint8_t, iXRegSrc);
    3333 IEM_CIMPL_PROTO_4(iemCImpl_vmaskmovpd_store_u256, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst, uint8_t, iYRegMsk, uint8_t, iYRegSrc);
    3334 IEM_CIMPL_PROTO_4(iemCImpl_vpmaskmovq_load_u128, uint8_t, iXRegDst, uint8_t, iXRegMsk, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc);
    3335 IEM_CIMPL_PROTO_4(iemCImpl_vpmaskmovq_load_u256, uint8_t, iYRegDst, uint8_t, iYRegMsk, uint8_t, iEffSeg, RTGCPTR, GCPtrEffSrc);
    3336 IEM_CIMPL_PROTO_4(iemCImpl_vpmaskmovq_store_u128, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst, uint8_t, iXRegMsk, uint8_t, iXRegSrc);
    3337 IEM_CIMPL_PROTO_4(iemCImpl_vpmaskmovq_store_u256, uint8_t, iEffSeg, RTGCPTR, GCPtrEffDst, uint8_t, iYRegMsk, uint8_t, iYRegSrc);
    3338 IEM_CIMPL_PROTO_2(iemCImpl_vpgather_worker_xx, uint32_t, u32PackedArgs, uint32_t, u32Disp);
    3339 
    3340 /** Packed 32-bit argument for iemCImpl_vpgather_worker_xx. */
    3341 typedef union IEMGATHERARGS
    3342 {
    3343     /** Integer view. */
    3344     uint32_t u;
    3345     /** Bitfield view. */
    3346     struct
    3347     {
    3348         uint32_t iYRegDst       : 4; /**<  0 - XMM or YMM register number (destination) */
    3349         uint32_t iYRegIdc       : 4; /**<  4 - XMM or YMM register number (indices)     */
    3350         uint32_t iYRegMsk       : 4; /**<  8 - XMM or YMM register number (mask)        */
    3351         uint32_t iGRegBase      : 4; /**< 12 - general register number    (base ptr)    */
    3352         uint32_t iScale         : 2; /**< 16 - scale factor               (1/2/4/8)     */
    3353         uint32_t enmEffOpSize   : 2; /**< 18 - operand size               (16/32/64/--) */
    3354         uint32_t enmEffAddrMode : 2; /**< 20 - addressing  mode           (16/32/64/--) */
    3355         uint32_t iEffSeg        : 3; /**< 22 - effective segment (ES/CS/SS/DS/FS/GS)    */
    3356         uint32_t fVex256        : 1; /**< 25 - overall instruction width (128/256 bits) */
    3357         uint32_t fIdxQword      : 1; /**< 26 - individual index width     (4/8 bytes)   */
    3358         uint32_t fValQword      : 1; /**< 27 - individual value width     (4/8 bytes)   */
    3359     } s;
    3360 } IEMGATHERARGS;
    3361 AssertCompileSize(IEMGATHERARGS, sizeof(uint32_t));
    3362 
    3363 /** @} */
    3364 
    3365 /** @name IEMAllCImplStrInstr.cpp.h
    3366  * @note sed -e '/IEM_CIMPL_DEF_/!d' -e 's/IEM_CIMPL_DEF_/IEM_CIMPL_PROTO_/' -e 's/$/;/' -e 's/RT_CONCAT4(//' \
    3367  *           -e 's/,ADDR_SIZE)/64/g' -e 's/,OP_SIZE,/64/g' -e 's/,OP_rAX,/rax/g' IEMAllCImplStrInstr.cpp.h
    3368  * @{ */
    3369 IEM_CIMPL_PROTO_1(iemCImpl_repe_cmps_op8_addr16, uint8_t, iEffSeg);
    3370 IEM_CIMPL_PROTO_1(iemCImpl_repne_cmps_op8_addr16, uint8_t, iEffSeg);
    3371 IEM_CIMPL_PROTO_0(iemCImpl_repe_scas_al_m16);
    3372 IEM_CIMPL_PROTO_0(iemCImpl_repne_scas_al_m16);
    3373 IEM_CIMPL_PROTO_1(iemCImpl_rep_movs_op8_addr16, uint8_t, iEffSeg);
    3374 IEM_CIMPL_PROTO_0(iemCImpl_stos_al_m16);
    3375 IEM_CIMPL_PROTO_1(iemCImpl_lods_al_m16, int8_t, iEffSeg);
    3376 IEM_CIMPL_PROTO_1(iemCImpl_ins_op8_addr16, bool, fIoChecked);
    3377 IEM_CIMPL_PROTO_1(iemCImpl_rep_ins_op8_addr16, bool, fIoChecked);
    3378 IEM_CIMPL_PROTO_2(iemCImpl_outs_op8_addr16, uint8_t, iEffSeg, bool, fIoChecked);
    3379 IEM_CIMPL_PROTO_2(iemCImpl_rep_outs_op8_addr16, uint8_t, iEffSeg, bool, fIoChecked);
    3380 
    3381 IEM_CIMPL_PROTO_1(iemCImpl_repe_cmps_op16_addr16, uint8_t, iEffSeg);
    3382 IEM_CIMPL_PROTO_1(iemCImpl_repne_cmps_op16_addr16, uint8_t, iEffSeg);
    3383 IEM_CIMPL_PROTO_0(iemCImpl_repe_scas_ax_m16);
    3384 IEM_CIMPL_PROTO_0(iemCImpl_repne_scas_ax_m16);
    3385 IEM_CIMPL_PROTO_1(iemCImpl_rep_movs_op16_addr16, uint8_t, iEffSeg);
    3386 IEM_CIMPL_PROTO_0(iemCImpl_stos_ax_m16);
    3387 IEM_CIMPL_PROTO_1(iemCImpl_lods_ax_m16, int8_t, iEffSeg);
    3388 IEM_CIMPL_PROTO_1(iemCImpl_ins_op16_addr16, bool, fIoChecked);
    3389 IEM_CIMPL_PROTO_1(iemCImpl_rep_ins_op16_addr16, bool, fIoChecked);
    3390 IEM_CIMPL_PROTO_2(iemCImpl_outs_op16_addr16, uint8_t, iEffSeg, bool, fIoChecked);
    3391 IEM_CIMPL_PROTO_2(iemCImpl_rep_outs_op16_addr16, uint8_t, iEffSeg, bool, fIoChecked);
    3392 
    3393 IEM_CIMPL_PROTO_1(iemCImpl_repe_cmps_op32_addr16, uint8_t, iEffSeg);
    3394 IEM_CIMPL_PROTO_1(iemCImpl_repne_cmps_op32_addr16, uint8_t, iEffSeg);
    3395 IEM_CIMPL_PROTO_0(iemCImpl_repe_scas_eax_m16);
    3396 IEM_CIMPL_PROTO_0(iemCImpl_repne_scas_eax_m16);
    3397 IEM_CIMPL_PROTO_1(iemCImpl_rep_movs_op32_addr16, uint8_t, iEffSeg);
    3398 IEM_CIMPL_PROTO_0(iemCImpl_stos_eax_m16);
    3399 IEM_CIMPL_PROTO_1(iemCImpl_lods_eax_m16, int8_t, iEffSeg);
    3400 IEM_CIMPL_PROTO_1(iemCImpl_ins_op32_addr16, bool, fIoChecked);
    3401 IEM_CIMPL_PROTO_1(iemCImpl_rep_ins_op32_addr16, bool, fIoChecked);
    3402 IEM_CIMPL_PROTO_2(iemCImpl_outs_op32_addr16, uint8_t, iEffSeg, bool, fIoChecked);
    3403 IEM_CIMPL_PROTO_2(iemCImpl_rep_outs_op32_addr16, uint8_t, iEffSeg, bool, fIoChecked);
    3404 
    3405 
    3406 IEM_CIMPL_PROTO_1(iemCImpl_repe_cmps_op8_addr32, uint8_t, iEffSeg);
    3407 IEM_CIMPL_PROTO_1(iemCImpl_repne_cmps_op8_addr32, uint8_t, iEffSeg);
    3408 IEM_CIMPL_PROTO_0(iemCImpl_repe_scas_al_m32);
    3409 IEM_CIMPL_PROTO_0(iemCImpl_repne_scas_al_m32);
    3410 IEM_CIMPL_PROTO_1(iemCImpl_rep_movs_op8_addr32, uint8_t, iEffSeg);
    3411 IEM_CIMPL_PROTO_0(iemCImpl_stos_al_m32);
    3412 IEM_CIMPL_PROTO_1(iemCImpl_lods_al_m32, int8_t, iEffSeg);
    3413 IEM_CIMPL_PROTO_1(iemCImpl_ins_op8_addr32, bool, fIoChecked);
    3414 IEM_CIMPL_PROTO_1(iemCImpl_rep_ins_op8_addr32, bool, fIoChecked);
    3415 IEM_CIMPL_PROTO_2(iemCImpl_outs_op8_addr32, uint8_t, iEffSeg, bool, fIoChecked);
    3416 IEM_CIMPL_PROTO_2(iemCImpl_rep_outs_op8_addr32, uint8_t, iEffSeg, bool, fIoChecked);
    3417 
    3418 IEM_CIMPL_PROTO_1(iemCImpl_repe_cmps_op16_addr32, uint8_t, iEffSeg);
    3419 IEM_CIMPL_PROTO_1(iemCImpl_repne_cmps_op16_addr32, uint8_t, iEffSeg);
    3420 IEM_CIMPL_PROTO_0(iemCImpl_repe_scas_ax_m32);
    3421 IEM_CIMPL_PROTO_0(iemCImpl_repne_scas_ax_m32);
    3422 IEM_CIMPL_PROTO_1(iemCImpl_rep_movs_op16_addr32, uint8_t, iEffSeg);
    3423 IEM_CIMPL_PROTO_0(iemCImpl_stos_ax_m32);
    3424 IEM_CIMPL_PROTO_1(iemCImpl_lods_ax_m32, int8_t, iEffSeg);
    3425 IEM_CIMPL_PROTO_1(iemCImpl_ins_op16_addr32, bool, fIoChecked);
    3426 IEM_CIMPL_PROTO_1(iemCImpl_rep_ins_op16_addr32, bool, fIoChecked);
    3427 IEM_CIMPL_PROTO_2(iemCImpl_outs_op16_addr32, uint8_t, iEffSeg, bool, fIoChecked);
    3428 IEM_CIMPL_PROTO_2(iemCImpl_rep_outs_op16_addr32, uint8_t, iEffSeg, bool, fIoChecked);
    3429 
    3430 IEM_CIMPL_PROTO_1(iemCImpl_repe_cmps_op32_addr32, uint8_t, iEffSeg);
    3431 IEM_CIMPL_PROTO_1(iemCImpl_repne_cmps_op32_addr32, uint8_t, iEffSeg);
    3432 IEM_CIMPL_PROTO_0(iemCImpl_repe_scas_eax_m32);
    3433 IEM_CIMPL_PROTO_0(iemCImpl_repne_scas_eax_m32);
    3434 IEM_CIMPL_PROTO_1(iemCImpl_rep_movs_op32_addr32, uint8_t, iEffSeg);
    3435 IEM_CIMPL_PROTO_0(iemCImpl_stos_eax_m32);
    3436 IEM_CIMPL_PROTO_1(iemCImpl_lods_eax_m32, int8_t, iEffSeg);
    3437 IEM_CIMPL_PROTO_1(iemCImpl_ins_op32_addr32, bool, fIoChecked);
    3438 IEM_CIMPL_PROTO_1(iemCImpl_rep_ins_op32_addr32, bool, fIoChecked);
    3439 IEM_CIMPL_PROTO_2(iemCImpl_outs_op32_addr32, uint8_t, iEffSeg, bool, fIoChecked);
    3440 IEM_CIMPL_PROTO_2(iemCImpl_rep_outs_op32_addr32, uint8_t, iEffSeg, bool, fIoChecked);
    3441 
    3442 IEM_CIMPL_PROTO_1(iemCImpl_repe_cmps_op64_addr32, uint8_t, iEffSeg);
    3443 IEM_CIMPL_PROTO_1(iemCImpl_repne_cmps_op64_addr32, uint8_t, iEffSeg);
    3444 IEM_CIMPL_PROTO_0(iemCImpl_repe_scas_rax_m32);
    3445 IEM_CIMPL_PROTO_0(iemCImpl_repne_scas_rax_m32);
    3446 IEM_CIMPL_PROTO_1(iemCImpl_rep_movs_op64_addr32, uint8_t, iEffSeg);
    3447 IEM_CIMPL_PROTO_0(iemCImpl_stos_rax_m32);
    3448 IEM_CIMPL_PROTO_1(iemCImpl_lods_rax_m32, int8_t, iEffSeg);
    3449 IEM_CIMPL_PROTO_1(iemCImpl_ins_op64_addr32, bool, fIoChecked);
    3450 IEM_CIMPL_PROTO_1(iemCImpl_rep_ins_op64_addr32, bool, fIoChecked);
    3451 IEM_CIMPL_PROTO_2(iemCImpl_outs_op64_addr32, uint8_t, iEffSeg, bool, fIoChecked);
    3452 IEM_CIMPL_PROTO_2(iemCImpl_rep_outs_op64_addr32, uint8_t, iEffSeg, bool, fIoChecked);
    3453 
    3454 
    3455 IEM_CIMPL_PROTO_1(iemCImpl_repe_cmps_op8_addr64, uint8_t, iEffSeg);
    3456 IEM_CIMPL_PROTO_1(iemCImpl_repne_cmps_op8_addr64, uint8_t, iEffSeg);
    3457 IEM_CIMPL_PROTO_0(iemCImpl_repe_scas_al_m64);
    3458 IEM_CIMPL_PROTO_0(iemCImpl_repne_scas_al_m64);
    3459 IEM_CIMPL_PROTO_1(iemCImpl_rep_movs_op8_addr64, uint8_t, iEffSeg);
    3460 IEM_CIMPL_PROTO_0(iemCImpl_stos_al_m64);
    3461 IEM_CIMPL_PROTO_1(iemCImpl_lods_al_m64, int8_t, iEffSeg);
    3462 IEM_CIMPL_PROTO_1(iemCImpl_ins_op8_addr64, bool, fIoChecked);
    3463 IEM_CIMPL_PROTO_1(iemCImpl_rep_ins_op8_addr64, bool, fIoChecked);
    3464 IEM_CIMPL_PROTO_2(iemCImpl_outs_op8_addr64, uint8_t, iEffSeg, bool, fIoChecked);
    3465 IEM_CIMPL_PROTO_2(iemCImpl_rep_outs_op8_addr64, uint8_t, iEffSeg, bool, fIoChecked);
    3466 
    3467 IEM_CIMPL_PROTO_1(iemCImpl_repe_cmps_op16_addr64, uint8_t, iEffSeg);
    3468 IEM_CIMPL_PROTO_1(iemCImpl_repne_cmps_op16_addr64, uint8_t, iEffSeg);
    3469 IEM_CIMPL_PROTO_0(iemCImpl_repe_scas_ax_m64);
    3470 IEM_CIMPL_PROTO_0(iemCImpl_repne_scas_ax_m64);
    3471 IEM_CIMPL_PROTO_1(iemCImpl_rep_movs_op16_addr64, uint8_t, iEffSeg);
    3472 IEM_CIMPL_PROTO_0(iemCImpl_stos_ax_m64);
    3473 IEM_CIMPL_PROTO_1(iemCImpl_lods_ax_m64, int8_t, iEffSeg);
    3474 IEM_CIMPL_PROTO_1(iemCImpl_ins_op16_addr64, bool, fIoChecked);
    3475 IEM_CIMPL_PROTO_1(iemCImpl_rep_ins_op16_addr64, bool, fIoChecked);
    3476 IEM_CIMPL_PROTO_2(iemCImpl_outs_op16_addr64, uint8_t, iEffSeg, bool, fIoChecked);
    3477 IEM_CIMPL_PROTO_2(iemCImpl_rep_outs_op16_addr64, uint8_t, iEffSeg, bool, fIoChecked);
    3478 
    3479 IEM_CIMPL_PROTO_1(iemCImpl_repe_cmps_op32_addr64, uint8_t, iEffSeg);
    3480 IEM_CIMPL_PROTO_1(iemCImpl_repne_cmps_op32_addr64, uint8_t, iEffSeg);
    3481 IEM_CIMPL_PROTO_0(iemCImpl_repe_scas_eax_m64);
    3482 IEM_CIMPL_PROTO_0(iemCImpl_repne_scas_eax_m64);
    3483 IEM_CIMPL_PROTO_1(iemCImpl_rep_movs_op32_addr64, uint8_t, iEffSeg);
    3484 IEM_CIMPL_PROTO_0(iemCImpl_stos_eax_m64);
    3485 IEM_CIMPL_PROTO_1(iemCImpl_lods_eax_m64, int8_t, iEffSeg);
    3486 IEM_CIMPL_PROTO_1(iemCImpl_ins_op32_addr64, bool, fIoChecked);
    3487 IEM_CIMPL_PROTO_1(iemCImpl_rep_ins_op32_addr64, bool, fIoChecked);
    3488 IEM_CIMPL_PROTO_2(iemCImpl_outs_op32_addr64, uint8_t, iEffSeg, bool, fIoChecked);
    3489 IEM_CIMPL_PROTO_2(iemCImpl_rep_outs_op32_addr64, uint8_t, iEffSeg, bool, fIoChecked);
    3490 
    3491 IEM_CIMPL_PROTO_1(iemCImpl_repe_cmps_op64_addr64, uint8_t, iEffSeg);
    3492 IEM_CIMPL_PROTO_1(iemCImpl_repne_cmps_op64_addr64, uint8_t, iEffSeg);
    3493 IEM_CIMPL_PROTO_0(iemCImpl_repe_scas_rax_m64);
    3494 IEM_CIMPL_PROTO_0(iemCImpl_repne_scas_rax_m64);
    3495 IEM_CIMPL_PROTO_1(iemCImpl_rep_movs_op64_addr64, uint8_t, iEffSeg);
    3496 IEM_CIMPL_PROTO_0(iemCImpl_stos_rax_m64);
    3497 IEM_CIMPL_PROTO_1(iemCImpl_lods_rax_m64, int8_t, iEffSeg);
    3498 IEM_CIMPL_PROTO_1(iemCImpl_ins_op64_addr64, bool, fIoChecked);
    3499 IEM_CIMPL_PROTO_1(iemCImpl_rep_ins_op64_addr64, bool, fIoChecked);
    3500 IEM_CIMPL_PROTO_2(iemCImpl_outs_op64_addr64, uint8_t, iEffSeg, bool, fIoChecked);
    3501 IEM_CIMPL_PROTO_2(iemCImpl_rep_outs_op64_addr64, uint8_t, iEffSeg, bool, fIoChecked);
    3502 /** @} */
    3503 
    3504 #ifdef VBOX_WITH_NESTED_HWVIRT_VMX
    3505 VBOXSTRICTRC    iemVmxVmexit(PVMCPUCC pVCpu, uint32_t uExitReason, uint64_t u64ExitQual) RT_NOEXCEPT;
    3506 VBOXSTRICTRC    iemVmxVmexitInstr(PVMCPUCC pVCpu, uint32_t uExitReason, uint8_t cbInstr) RT_NOEXCEPT;
    3507 VBOXSTRICTRC    iemVmxVmexitInstrNeedsInfo(PVMCPUCC pVCpu, uint32_t uExitReason, VMXINSTRID uInstrId, uint8_t cbInstr) RT_NOEXCEPT;
    3508 VBOXSTRICTRC    iemVmxVmexitTaskSwitch(PVMCPUCC pVCpu, IEMTASKSWITCH enmTaskSwitch, RTSEL SelNewTss, uint8_t cbInstr) RT_NOEXCEPT;
    3509 VBOXSTRICTRC    iemVmxVmexitEvent(PVMCPUCC pVCpu, uint8_t uVector, uint32_t fFlags, uint32_t uErrCode, uint64_t uCr2, uint8_t cbInstr)  RT_NOEXCEPT;
    3510 VBOXSTRICTRC    iemVmxVmexitEventDoubleFault(PVMCPUCC pVCpu) RT_NOEXCEPT;
    3511 VBOXSTRICTRC    iemVmxVmexitEpt(PVMCPUCC pVCpu, PPGMPTWALKFAST pWalk, uint32_t fAccess, uint32_t fSlatFail, uint8_t cbInstr) RT_NOEXCEPT;
    3512 VBOXSTRICTRC    iemVmxVmexitPreemptTimer(PVMCPUCC pVCpu) RT_NOEXCEPT;
    3513 VBOXSTRICTRC    iemVmxVmexitInstrMwait(PVMCPUCC pVCpu, bool fMonitorHwArmed, uint8_t cbInstr) RT_NOEXCEPT;
    3514 VBOXSTRICTRC    iemVmxVmexitInstrIo(PVMCPUCC pVCpu, VMXINSTRID uInstrId, uint16_t u16Port,
    3515                                     bool fImm, uint8_t cbAccess, uint8_t cbInstr) RT_NOEXCEPT;
    3516 VBOXSTRICTRC    iemVmxVmexitInstrStrIo(PVMCPUCC pVCpu, VMXINSTRID uInstrId, uint16_t u16Port, uint8_t cbAccess,
    3517                                        bool fRep, VMXEXITINSTRINFO ExitInstrInfo, uint8_t cbInstr) RT_NOEXCEPT;
    3518 VBOXSTRICTRC    iemVmxVmexitInstrMovDrX(PVMCPUCC pVCpu, VMXINSTRID uInstrId, uint8_t iDrReg, uint8_t iGReg, uint8_t cbInstr) RT_NOEXCEPT;
    3519 VBOXSTRICTRC    iemVmxVmexitInstrMovToCr8(PVMCPUCC pVCpu, uint8_t iGReg, uint8_t cbInstr) RT_NOEXCEPT;
    3520 VBOXSTRICTRC    iemVmxVmexitInstrMovFromCr8(PVMCPUCC pVCpu, uint8_t iGReg, uint8_t cbInstr) RT_NOEXCEPT;
    3521 VBOXSTRICTRC    iemVmxVmexitInstrMovToCr3(PVMCPUCC pVCpu, uint64_t uNewCr3, uint8_t iGReg, uint8_t cbInstr) RT_NOEXCEPT;
    3522 VBOXSTRICTRC    iemVmxVmexitInstrMovFromCr3(PVMCPUCC pVCpu, uint8_t iGReg, uint8_t cbInstr) RT_NOEXCEPT;
    3523 VBOXSTRICTRC    iemVmxVmexitInstrMovToCr0Cr4(PVMCPUCC pVCpu, uint8_t iCrReg, uint64_t *puNewCrX, uint8_t iGReg, uint8_t cbInstr) RT_NOEXCEPT;
    3524 VBOXSTRICTRC    iemVmxVmexitInstrClts(PVMCPUCC pVCpu, uint8_t cbInstr) RT_NOEXCEPT;
    3525 VBOXSTRICTRC    iemVmxVmexitInstrLmsw(PVMCPUCC pVCpu, uint32_t uGuestCr0, uint16_t *pu16NewMsw,
    3526                                       RTGCPTR GCPtrEffDst, uint8_t cbInstr) RT_NOEXCEPT;
    3527 VBOXSTRICTRC    iemVmxVmexitInstrInvlpg(PVMCPUCC pVCpu, RTGCPTR GCPtrPage, uint8_t cbInstr) RT_NOEXCEPT;
    3528 VBOXSTRICTRC    iemVmxApicWriteEmulation(PVMCPUCC pVCpu) RT_NOEXCEPT;
    3529 VBOXSTRICTRC    iemVmxVirtApicAccessUnused(PVMCPUCC pVCpu, PRTGCPHYS pGCPhysAccess, size_t cbAccess, uint32_t fAccess) RT_NOEXCEPT;
    3530 uint32_t        iemVmxVirtApicReadRaw32(PVMCPUCC pVCpu, uint16_t offReg) RT_NOEXCEPT;
    3531 void            iemVmxVirtApicWriteRaw32(PVMCPUCC pVCpu, uint16_t offReg, uint32_t uReg) RT_NOEXCEPT;
    3532 VBOXSTRICTRC    iemVmxInvvpid(PVMCPUCC pVCpu, uint8_t cbInstr, uint8_t iEffSeg, RTGCPTR GCPtrInvvpidDesc,
    3533                               uint64_t u64InvvpidType, PCVMXVEXITINFO pExitInfo) RT_NOEXCEPT;
    3534 bool            iemVmxIsRdmsrWrmsrInterceptSet(PCVMCPU pVCpu, uint32_t uExitReason, uint32_t idMsr) RT_NOEXCEPT;
    3535 IEM_CIMPL_PROTO_0(iemCImpl_vmxoff);
    3536 IEM_CIMPL_PROTO_2(iemCImpl_vmxon, uint8_t, iEffSeg, RTGCPTR, GCPtrVmxon);
    3537 IEM_CIMPL_PROTO_0(iemCImpl_vmlaunch);
    3538 IEM_CIMPL_PROTO_0(iemCImpl_vmresume);
    3539 IEM_CIMPL_PROTO_2(iemCImpl_vmptrld, uint8_t, iEffSeg, RTGCPTR, GCPtrVmcs);
    3540 IEM_CIMPL_PROTO_2(iemCImpl_vmptrst, uint8_t, iEffSeg, RTGCPTR, GCPtrVmcs);
    3541 IEM_CIMPL_PROTO_2(iemCImpl_vmclear, uint8_t, iEffSeg, RTGCPTR, GCPtrVmcs);
    3542 IEM_CIMPL_PROTO_2(iemCImpl_vmwrite_reg, uint64_t, u64Val, uint64_t, u64VmcsField);
    3543 IEM_CIMPL_PROTO_3(iemCImpl_vmwrite_mem, uint8_t, iEffSeg, RTGCPTR, GCPtrVal, uint32_t, u64VmcsField);
    3544 IEM_CIMPL_PROTO_2(iemCImpl_vmread_reg64, uint64_t *, pu64Dst, uint64_t, u64VmcsField);
    3545 IEM_CIMPL_PROTO_2(iemCImpl_vmread_reg32, uint64_t *, pu32Dst, uint32_t, u32VmcsField);
    3546 IEM_CIMPL_PROTO_3(iemCImpl_vmread_mem_reg64, uint8_t, iEffSeg, RTGCPTR, GCPtrDst, uint32_t, u64VmcsField);
    3547 IEM_CIMPL_PROTO_3(iemCImpl_vmread_mem_reg32, uint8_t, iEffSeg, RTGCPTR, GCPtrDst, uint32_t, u32VmcsField);
    3548 IEM_CIMPL_PROTO_3(iemCImpl_invvpid, uint8_t, iEffSeg, RTGCPTR, GCPtrInvvpidDesc, uint64_t, uInvvpidType);
    3549 IEM_CIMPL_PROTO_3(iemCImpl_invept, uint8_t, iEffSeg, RTGCPTR, GCPtrInveptDesc, uint64_t, uInveptType);
    3550 IEM_CIMPL_PROTO_0(iemCImpl_vmx_pause);
    3551 #endif
    3552 
    3553 #ifdef VBOX_WITH_NESTED_HWVIRT_SVM
    3554 VBOXSTRICTRC    iemSvmVmexit(PVMCPUCC pVCpu, uint64_t uExitCode, uint64_t uExitInfo1, uint64_t uExitInfo2) RT_NOEXCEPT;
    3555 VBOXSTRICTRC    iemHandleSvmEventIntercept(PVMCPUCC pVCpu, uint8_t cbInstr, uint8_t u8Vector, uint32_t fFlags, uint32_t uErr, uint64_t uCr2) RT_NOEXCEPT;
    3556 VBOXSTRICTRC    iemSvmHandleIOIntercept(PVMCPUCC pVCpu, uint16_t u16Port, SVMIOIOTYPE enmIoType, uint8_t cbReg,
    3557                                         uint8_t cAddrSizeBits, uint8_t iEffSeg, bool fRep, bool fStrIo, uint8_t cbInstr) RT_NOEXCEPT;
    3558 VBOXSTRICTRC    iemSvmHandleMsrIntercept(PVMCPUCC pVCpu, uint32_t idMsr, bool fWrite, uint8_t cbInstr) RT_NOEXCEPT;
    3559 IEM_CIMPL_PROTO_0(iemCImpl_vmrun);
    3560 IEM_CIMPL_PROTO_0(iemCImpl_vmload);
    3561 IEM_CIMPL_PROTO_0(iemCImpl_vmsave);
    3562 IEM_CIMPL_PROTO_0(iemCImpl_clgi);
    3563 IEM_CIMPL_PROTO_0(iemCImpl_stgi);
    3564 IEM_CIMPL_PROTO_0(iemCImpl_invlpga);
    3565 IEM_CIMPL_PROTO_0(iemCImpl_skinit);
    3566 IEM_CIMPL_PROTO_0(iemCImpl_svm_pause);
    3567 #endif
    3568 
    3569 IEM_CIMPL_PROTO_0(iemCImpl_vmcall);  /* vmx */
    3570 IEM_CIMPL_PROTO_0(iemCImpl_vmmcall); /* svm */
    3571 IEM_CIMPL_PROTO_1(iemCImpl_Hypercall, uint16_t, uDisOpcode); /* both */
    3572 
    3573 extern const PFNIEMOP g_apfnIemInterpretOnlyOneByteMap[256];
    3574 extern const PFNIEMOP g_apfnIemInterpretOnlyTwoByteMap[1024];
    3575 extern const PFNIEMOP g_apfnIemInterpretOnlyThreeByte0f3a[1024];
    3576 extern const PFNIEMOP g_apfnIemInterpretOnlyThreeByte0f38[1024];
    3577 extern const PFNIEMOP g_apfnIemInterpretOnlyVecMap1[1024];
    3578 extern const PFNIEMOP g_apfnIemInterpretOnlyVecMap2[1024];
    3579 extern const PFNIEMOP g_apfnIemInterpretOnlyVecMap3[1024];
     255/** @} */
    3580256
    3581257/*
    3582258 * Recompiler related stuff.
    3583259 */
    3584 extern const PFNIEMOP g_apfnIemThreadedRecompilerOneByteMap[256];
    3585 extern const PFNIEMOP g_apfnIemThreadedRecompilerTwoByteMap[1024];
    3586 extern const PFNIEMOP g_apfnIemThreadedRecompilerThreeByte0f3a[1024];
    3587 extern const PFNIEMOP g_apfnIemThreadedRecompilerThreeByte0f38[1024];
    3588 extern const PFNIEMOP g_apfnIemThreadedRecompilerVecMap1[1024];
    3589 extern const PFNIEMOP g_apfnIemThreadedRecompilerVecMap2[1024];
    3590 extern const PFNIEMOP g_apfnIemThreadedRecompilerVecMap3[1024];
    3591260
    3592261
     
    3601270IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckMode);
    3602271IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckHwInstrBps);
    3603 IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckCsLim);
    3604 
    3605 IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckCsLimAndOpcodes);
     272
    3606273IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckOpcodes);
    3607 IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckOpcodesConsiderCsLim);
    3608274
    3609275/* Branching: */
     
    3643309
    3644310
    3645 #endif /* !VMM_INCLUDED_SRC_VMMAll_target_x86_IEMInternal_x86_h */
    3646 
     311#endif /* !VMM_INCLUDED_SRC_VMMAll_target_armv8_IEMInternal_armv8_h */
     312
  • trunk/src/VBox/VMM/VMMAll/target-x86/IEMAll-x86.cpp

    r108260 r108791  
    7676# define INVALID_TLB_ENTRY_FOR_BP(a_uValue) do { \
    7777        RTGCPTR uTagNoRev = (a_uValue); \
    78         uTagNoRev = IEMTLB_CALC_TAG_NO_REV(uTagNoRev); \
     78        uTagNoRev = IEMTLB_CALC_TAG_NO_REV(pVCpu, uTagNoRev); \
    7979        /** @todo do large page accounting */ \
    8080        uintptr_t const idxEven = IEMTLB_TAG_TO_EVEN_INDEX(uTagNoRev); \
  • trunk/src/VBox/VMM/VMMAll/target-x86/IEMAllMem-x86.cpp

    r108278 r108791  
    430430     */
    431431    uint8_t           *pbMem     = NULL;
    432     uint64_t const     uTagNoRev = IEMTLB_CALC_TAG_NO_REV(GCPtrMem);
     432    uint64_t const     uTagNoRev = IEMTLB_CALC_TAG_NO_REV(pVCpu, GCPtrMem);
    433433    PIEMTLBENTRY       pTlbe     = IEMTLB_TAG_TO_EVEN_ENTRY(&pVCpu->iem.s.DataTlb, uTagNoRev);
    434434    uint64_t const     fTlbeAD   = IEMTLBE_F_PT_NO_ACCESSED | (fAccess & IEM_ACCESS_TYPE_WRITE ? IEMTLBE_F_PT_NO_DIRTY : 0);
     
    574574        if (pTlbe != &pVCpu->iem.s.DataBreakpointTlbe)
    575575        {
    576             if (!((uintptr_t)pTlbe & (sizeof(*pTlbe) * 2 - 1)))
     576            if (!IEMTLBE_IS_GLOBAL(pTlbe))
    577577                IEMTLBTRACE_LOAD(       pVCpu, GCPtrMem, pTlbe->GCPhys, (uint32_t)pTlbe->fFlagsAndPhysRev, true);
    578578            else
     
    823823                                        : 0;
    824824    uint64_t const     fNoRead          = fAccess & IEM_ACCESS_TYPE_READ ? IEMTLBE_F_PG_NO_READ : 0;
    825     uint64_t const     uTagNoRev        = IEMTLB_CALC_TAG_NO_REV(GCPtrMem);
     825    uint64_t const     uTagNoRev        = IEMTLB_CALC_TAG_NO_REV(pVCpu, GCPtrMem);
    826826    PIEMTLBENTRY       pTlbe            = IEMTLB_TAG_TO_EVEN_ENTRY(&pVCpu->iem.s.DataTlb, uTagNoRev);
    827827    uint64_t const     fTlbeAD          = IEMTLBE_F_PT_NO_ACCESSED | (fNoWriteNoDirty & IEMTLBE_F_PT_NO_DIRTY);
     
    925925        if (pTlbe != &pVCpu->iem.s.DataBreakpointTlbe)
    926926        {
    927             if (!((uintptr_t)pTlbe & (sizeof(*pTlbe) * 2 - 1)))
     927            if (!IEMTLBE_IS_GLOBAL(pTlbe))
    928928                IEMTLBTRACE_LOAD(       pVCpu, GCPtrMem, pTlbe->GCPhys, (uint32_t)pTlbe->fFlagsAndPhysRev, true);
    929929            else
  • trunk/src/VBox/VMM/VMMAll/target-x86/IEMAllMemRWTmplInline-x86.cpp.h

    r108278 r108791  
    108108         * TLB lookup.
    109109         */
    110         uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV(GCPtrEff);
     110        uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV(pVCpu, GCPtrEff);
    111111        PCIEMTLBENTRY  pTlbe     = IEMTLB_TAG_TO_EVEN_ENTRY(&pVCpu->iem.s.DataTlb, uTagNoRev);
    112112        if (RT_LIKELY(   pTlbe->uTag               == (uTagNoRev | pVCpu->iem.s.DataTlb.uTlbRevision)
     
    182182         * TLB lookup.
    183183         */
    184         uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV(GCPtrMem);
     184        uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV(pVCpu, GCPtrMem);
    185185        PCIEMTLBENTRY  pTlbe     = IEMTLB_TAG_TO_EVEN_ENTRY(&pVCpu->iem.s.DataTlb, uTagNoRev);
    186186        if (RT_LIKELY(   pTlbe->uTag               == (uTagNoRev | pVCpu->iem.s.DataTlb.uTlbRevision)
     
    260260         * TLB lookup.
    261261         */
    262         uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV(GCPtrEff);
     262        uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV(pVCpu, GCPtrEff);
    263263        PCIEMTLBENTRY  pTlbe     = IEMTLB_TAG_TO_EVEN_ENTRY(&pVCpu->iem.s.DataTlb, uTagNoRev);
    264264        if (RT_LIKELY(   pTlbe->uTag               == (uTagNoRev | pVCpu->iem.s.DataTlb.uTlbRevision)
     
    334334         * TLB lookup.
    335335         */
    336         uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV(GCPtrMem);
     336        uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV(pVCpu, GCPtrMem);
    337337        PCIEMTLBENTRY  pTlbe     = IEMTLB_TAG_TO_EVEN_ENTRY(&pVCpu->iem.s.DataTlb, uTagNoRev);
    338338        if (RT_LIKELY(   pTlbe->uTag               == (uTagNoRev | pVCpu->iem.s.DataTlb.uTlbRevision)
     
    410410         * TLB lookup.
    411411         */
    412         uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV(GCPtrEff);
     412        uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV(pVCpu, GCPtrEff);
    413413        PCIEMTLBENTRY  pTlbe     = IEMTLB_TAG_TO_EVEN_ENTRY(&pVCpu->iem.s.DataTlb, uTagNoRev);
    414414        if (RT_LIKELY(   pTlbe->uTag               == (uTagNoRev | pVCpu->iem.s.DataTlb.uTlbRevision)
     
    470470         * TLB lookup.
    471471         */
    472         uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV(GCPtrMem);
     472        uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV(pVCpu, GCPtrMem);
    473473        PCIEMTLBENTRY  pTlbe     = IEMTLB_TAG_TO_EVEN_ENTRY(&pVCpu->iem.s.DataTlb, uTagNoRev);
    474474        if (RT_LIKELY(   pTlbe->uTag               == (uTagNoRev | pVCpu->iem.s.DataTlb.uTlbRevision)
     
    532532         * TLB lookup.
    533533         */
    534         uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV(GCPtrEff);
     534        uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV(pVCpu, GCPtrEff);
    535535        PCIEMTLBENTRY  pTlbe     = IEMTLB_TAG_TO_EVEN_ENTRY(&pVCpu->iem.s.DataTlb, uTagNoRev);
    536536        if (RT_LIKELY(   pTlbe->uTag               == (uTagNoRev | pVCpu->iem.s.DataTlb.uTlbRevision)
     
    592592         * TLB lookup.
    593593         */
    594         uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV(GCPtrMem);
     594        uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV(pVCpu, GCPtrMem);
    595595        PCIEMTLBENTRY  pTlbe     = IEMTLB_TAG_TO_EVEN_ENTRY(&pVCpu->iem.s.DataTlb, uTagNoRev);
    596596        if (RT_LIKELY(   pTlbe->uTag               == (uTagNoRev | pVCpu->iem.s.DataTlb.uTlbRevision)
     
    652652         * TLB lookup.
    653653         */
    654         uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV(GCPtrEff);
     654        uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV(pVCpu, GCPtrEff);
    655655        PCIEMTLBENTRY  pTlbe     = IEMTLB_TAG_TO_EVEN_ENTRY(&pVCpu->iem.s.DataTlb, uTagNoRev);
    656656        if (RT_LIKELY(   pTlbe->uTag               == (uTagNoRev | pVCpu->iem.s.DataTlb.uTlbRevision)
     
    710710         * TLB lookup.
    711711         */
    712         uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV(GCPtrMem);
     712        uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV(pVCpu, GCPtrMem);
    713713        PCIEMTLBENTRY  pTlbe     = IEMTLB_TAG_TO_EVEN_ENTRY(&pVCpu->iem.s.DataTlb, uTagNoRev);
    714714        if (RT_LIKELY(   pTlbe->uTag               == (uTagNoRev | pVCpu->iem.s.DataTlb.uTlbRevision)
     
    769769         * TLB lookup.
    770770         */
    771         uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV(GCPtrEff);
     771        uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV(pVCpu, GCPtrEff);
    772772        PCIEMTLBENTRY  pTlbe     = IEMTLB_TAG_TO_EVEN_ENTRY(&pVCpu->iem.s.DataTlb, uTagNoRev);
    773773        if (RT_LIKELY(   pTlbe->uTag               == (uTagNoRev | pVCpu->iem.s.DataTlb.uTlbRevision)
     
    826826         * TLB lookup.
    827827         */
    828         uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV(GCPtrMem);
     828        uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV(pVCpu, GCPtrMem);
    829829        PCIEMTLBENTRY  pTlbe     = IEMTLB_TAG_TO_EVEN_ENTRY(&pVCpu->iem.s.DataTlb, uTagNoRev);
    830830        if (RT_LIKELY(   pTlbe->uTag               == (uTagNoRev | pVCpu->iem.s.DataTlb.uTlbRevision)
     
    896896         * TLB lookup.
    897897         */
    898         uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV(GCPtrEff);
     898        uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV(pVCpu, GCPtrEff);
    899899        PCIEMTLBENTRY  pTlbe     = IEMTLB_TAG_TO_EVEN_ENTRY(&pVCpu->iem.s.DataTlb, uTagNoRev);
    900900        if (RT_LIKELY(   pTlbe->uTag               == (uTagNoRev | pVCpu->iem.s.DataTlb.uTlbRevision)
     
    960960         * TLB lookup.
    961961         */
    962         uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV(GCPtrEff);
     962        uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV(pVCpu, GCPtrEff);
    963963        PCIEMTLBENTRY  pTlbe     = IEMTLB_TAG_TO_EVEN_ENTRY(&pVCpu->iem.s.DataTlb, uTagNoRev);
    964964        if (RT_LIKELY(   pTlbe->uTag               == (uTagNoRev | pVCpu->iem.s.DataTlb.uTlbRevision)
     
    10241024         * TLB lookup.
    10251025         */
    1026         uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV(GCPtrMem);
     1026        uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV(pVCpu, GCPtrMem);
    10271027        PCIEMTLBENTRY  pTlbe     = IEMTLB_TAG_TO_EVEN_ENTRY(&pVCpu->iem.s.DataTlb, uTagNoRev);
    10281028        if (RT_LIKELY(   pTlbe->uTag               == (uTagNoRev | pVCpu->iem.s.DataTlb.uTlbRevision)
     
    10841084         * TLB lookup.
    10851085         */
    1086         uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV(GCPtrMem);
     1086        uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV(pVCpu, GCPtrMem);
    10871087        PCIEMTLBENTRY  pTlbe     = IEMTLB_TAG_TO_EVEN_ENTRY(&pVCpu->iem.s.DataTlb, uTagNoRev);
    10881088        if (RT_LIKELY(   pTlbe->uTag               == (uTagNoRev | pVCpu->iem.s.DataTlb.uTlbRevision)
     
    11441144         * TLB lookup.
    11451145         */
    1146         uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV(GCPtrEff);
     1146        uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV(pVCpu, GCPtrEff);
    11471147        PCIEMTLBENTRY  pTlbe     = IEMTLB_TAG_TO_EVEN_ENTRY(&pVCpu->iem.s.DataTlb, uTagNoRev);
    11481148        if (RT_LIKELY(   pTlbe->uTag               == (uTagNoRev | pVCpu->iem.s.DataTlb.uTlbRevision)
     
    11991199         * TLB lookup.
    12001200         */
    1201         uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV(GCPtrMem);
     1201        uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV(pVCpu, GCPtrMem);
    12021202        PCIEMTLBENTRY  pTlbe     = IEMTLB_TAG_TO_EVEN_ENTRY(&pVCpu->iem.s.DataTlb, uTagNoRev);
    12031203        if (RT_LIKELY(   pTlbe->uTag               == (uTagNoRev | pVCpu->iem.s.DataTlb.uTlbRevision)
     
    12581258         * TLB lookup.
    12591259         */
    1260         uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV(GCPtrEff);
     1260        uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV(pVCpu, GCPtrEff);
    12611261        PCIEMTLBENTRY  pTlbe     = IEMTLB_TAG_TO_EVEN_ENTRY(&pVCpu->iem.s.DataTlb, uTagNoRev);
    12621262        if (RT_LIKELY(   pTlbe->uTag               == (uTagNoRev | pVCpu->iem.s.DataTlb.uTlbRevision)
     
    13221322         * TLB lookup.
    13231323         */
    1324         uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV(GCPtrEff);
     1324        uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV(pVCpu, GCPtrEff);
    13251325        PCIEMTLBENTRY  pTlbe     = IEMTLB_TAG_TO_EVEN_ENTRY(&pVCpu->iem.s.DataTlb, uTagNoRev);
    13261326        if (RT_LIKELY(   pTlbe->uTag               == (uTagNoRev | pVCpu->iem.s.DataTlb.uTlbRevision)
     
    14001400         * TLB lookup.
    14011401         */
    1402         uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV(GCPtrEff);
     1402        uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV(pVCpu, GCPtrEff);
    14031403        PCIEMTLBENTRY  pTlbe     = IEMTLB_TAG_TO_EVEN_ENTRY(&pVCpu->iem.s.DataTlb, uTagNoRev);
    14041404        if (RT_LIKELY(   pTlbe->uTag               == (uTagNoRev | pVCpu->iem.s.DataTlb.uTlbRevision)
     
    14831483         * TLB lookup.
    14841484         */
    1485         uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV((RTGCPTR)uNewEsp); /* Doesn't work w/o casting to RTGCPTR (win /3 hangs). */
     1485        uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV(pVCpu, (RTGCPTR)uNewEsp); /* Doesn't work w/o casting to RTGCPTR (win /3 hangs). */
    14861486        PCIEMTLBENTRY  pTlbe     = IEMTLB_TAG_TO_EVEN_ENTRY(&pVCpu->iem.s.DataTlb, uTagNoRev);
    14871487        if (RT_LIKELY(   pTlbe->uTag               == (uTagNoRev | pVCpu->iem.s.DataTlb.uTlbRevision)
     
    15431543         * TLB lookup.
    15441544         */
    1545         uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV((RTGCPTR)uOldEsp); /* Cast is required! 2023-08-11 */
     1545        uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV(pVCpu, (RTGCPTR)uOldEsp); /* Cast is required! 2023-08-11 */
    15461546        PCIEMTLBENTRY  pTlbe     = IEMTLB_TAG_TO_EVEN_ENTRY(&pVCpu->iem.s.DataTlb, uTagNoRev);
    15471547        if (RT_LIKELY(   pTlbe->uTag               == (uTagNoRev | pVCpu->iem.s.DataTlb.uTlbRevision)
     
    16181618         * TLB lookup.
    16191619         */
    1620         uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV((RTGCPTR)uNewEsp); /* Doesn't work w/o casting to RTGCPTR (win /3 hangs). */
     1620        uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV(pVCpu, (RTGCPTR)uNewEsp); /* Doesn't work w/o casting to RTGCPTR (win /3 hangs). */
    16211621        PCIEMTLBENTRY  pTlbe     = IEMTLB_TAG_TO_EVEN_ENTRY(&pVCpu->iem.s.DataTlb, uTagNoRev);
    16221622        if (RT_LIKELY(   pTlbe->uTag               == (uTagNoRev | pVCpu->iem.s.DataTlb.uTlbRevision)
     
    16971697         * TLB lookup.
    16981698         */
    1699         uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV(uNewRsp);
     1699        uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV(pVCpu, uNewRsp);
    17001700        PCIEMTLBENTRY  pTlbe     = IEMTLB_TAG_TO_EVEN_ENTRY(&pVCpu->iem.s.DataTlb, uTagNoRev);
    17011701        if (RT_LIKELY(   pTlbe->uTag               == (uTagNoRev | pVCpu->iem.s.DataTlb.uTlbRevision)
     
    17571757         * TLB lookup.
    17581758         */
    1759         uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV(uOldRsp);
     1759        uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV(pVCpu, uOldRsp);
    17601760        PCIEMTLBENTRY  pTlbe     = IEMTLB_TAG_TO_EVEN_ENTRY(&pVCpu->iem.s.DataTlb, uTagNoRev);
    17611761        if (RT_LIKELY(   pTlbe->uTag               == (uTagNoRev | pVCpu->iem.s.DataTlb.uTlbRevision)
  • trunk/src/VBox/VMM/VMMAll/target-x86/IEMAllOpcodeFetch-x86.cpp

    r108278 r108791  
    322322         * Get the TLB entry for this piece of code.
    323323         */
    324         uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV(GCPtrFirst);
     324        uint64_t const uTagNoRev = IEMTLB_CALC_TAG_NO_REV(pVCpu, GCPtrFirst);
    325325        PIEMTLBENTRY   pTlbe     = IEMTLB_TAG_TO_EVEN_ENTRY(&pVCpu->iem.s.CodeTlb, uTagNoRev);
    326326        if (   pTlbe->uTag               == (uTagNoRev | pVCpu->iem.s.CodeTlb.uTlbRevision)
  • trunk/src/VBox/VMM/VMMAll/target-x86/IEMAllTlbInline-x86.h

    r108232 r108791  
    5353# endif
    5454
    55     AssertCompile(IEMTLB_CALC_TAG_NO_REV((RTGCPTR)0x8731U << GUEST_PAGE_SHIFT) == 0x8731U);
     55    AssertCompile(IEMTLB_CALC_TAG_NO_REV(pVCpu, (RTGCPTR)0x8731U << GUEST_PAGE_SHIFT) == 0x8731U);
    5656    uint32_t const                 fMask = (f2MbLargePages ? _2M - 1U : _4M - 1U) >> GUEST_PAGE_SHIFT;
    5757    IEMTLB::LARGEPAGERANGE * const pRange = a_fGlobal
     
    163163     * We make ASSUMPTIONS about IEMTLB_CALC_TAG_NO_REV here.
    164164     */
    165     AssertCompile(IEMTLB_CALC_TAG_NO_REV((RTGCPTR)0x8731U << GUEST_PAGE_SHIFT) == 0x8731U);
     165    AssertCompile(IEMTLB_CALC_TAG_NO_REV(pVCpu, (RTGCPTR)0x8731U << GUEST_PAGE_SHIFT) == 0x8731U);
    166166    if (   !a_fDataTlb
    167167        && GCPtrInstrBufPcTag - GCPtrTag < (a_f2MbLargePage ? 512U : 1024U))
     
    390390                                                 RTGCPTR GCPtrInstrBufPcTag) RT_NOEXCEPT
    391391{
    392     AssertCompile(IEMTLB_CALC_TAG_NO_REV((RTGCPTR)0x8731U << GUEST_PAGE_SHIFT) == 0x8731U);
     392    AssertCompile(IEMTLB_CALC_TAG_NO_REV(pVCpu, (RTGCPTR)0x8731U << GUEST_PAGE_SHIFT) == 0x8731U);
    393393
    394394    GCPtrTag &= ~(RTGCPTR)(RT_BIT_64((a_f2MbLargePage ? 21 : 22) - GUEST_PAGE_SHIFT) - 1U);
     
    424424        IEMTLBTRACE_EVICT_SLOT(pVCpu, GCPtrTag, pTlb->aEntries[idxEven].GCPhys, idxEven, a_fDataTlb);
    425425        pTlb->aEntries[idxEven].uTag = 0;
    426         if (!a_fDataTlb && GCPtrTag == IEMTLB_CALC_TAG_NO_REV(pVCpu->iem.s.uInstrBufPc))
     426        if (!a_fDataTlb && GCPtrTag == IEMTLB_CALC_TAG_NO_REV(pVCpu, pVCpu->iem.s.uInstrBufPc))
    427427            pVCpu->iem.s.cbInstrBufTotal = 0;
    428428    }
     
    431431        IEMTLBTRACE_EVICT_SLOT(pVCpu, GCPtrTag, pTlb->aEntries[idxEven + 1].GCPhys, idxEven + 1, a_fDataTlb);
    432432        pTlb->aEntries[idxEven + 1].uTag = 0;
    433         if (!a_fDataTlb && GCPtrTag == IEMTLB_CALC_TAG_NO_REV(pVCpu->iem.s.uInstrBufPc))
     433        if (!a_fDataTlb && GCPtrTag == IEMTLB_CALC_TAG_NO_REV(pVCpu, pVCpu->iem.s.uInstrBufPc))
    434434            pVCpu->iem.s.cbInstrBufTotal = 0;
    435435    }
     
    446446# endif
    447447    {
    448         RTGCPTR const GCPtrInstrBufPcTag = a_fDataTlb ? 0 : IEMTLB_CALC_TAG_NO_REV(pVCpu->iem.s.uInstrBufPc);
     448        RTGCPTR const GCPtrInstrBufPcTag = a_fDataTlb ? 0 : IEMTLB_CALC_TAG_NO_REV(pVCpu, pVCpu->iem.s.uInstrBufPc);
    449449        if (pVCpu->cpum.GstCtx.cr4 & X86_CR4_PAE)
    450450            iemTlbInvalidateLargePageWorker<a_fDataTlb, true>(pVCpu, pTlb, GCPtrTag, GCPtrInstrBufPcTag);
  • trunk/src/VBox/VMM/VMMAll/target-x86/IEMInternal-x86.h

    r108589 r108791  
    28962896void                    iemLogSyscallProtModeInt(PVMCPUCC pVCpu, uint8_t u8Vector, uint8_t cbInstr);
    28972897
    2898 IEM_CIMPL_DEF_0(iemCImplRaiseDivideError);
    2899 IEM_CIMPL_DEF_0(iemCImplRaiseInvalidLockPrefix);
    2900 IEM_CIMPL_DEF_0(iemCImplRaiseInvalidOpcode);
     2898IEM_CIMPL_PROTO_0(iemCImplRaiseDivideError);
     2899IEM_CIMPL_PROTO_0(iemCImplRaiseInvalidLockPrefix);
     2900IEM_CIMPL_PROTO_0(iemCImplRaiseInvalidOpcode);
    29012901
    29022902/**
  • trunk/src/VBox/VMM/VMMR3/IEMR3.cpp

    r108589 r108791  
    13581358    };
    13591359    static const char * const s_apszSizes[] = { "L3", "L2", "L1", "L0" };
    1360     AssertCompile(((IEMTLBE_F_S2_NO_LIM_WRITE | IEMTLBE_F_S2_TL0 | IEMTLBE_F_S2_TL1) >> IEMTLBE_F_S2_NO_LIM_WRITE_SHIFT) == 7);
     1360    AssertCompile(((IEMTLBE_F_S2_NO_LIM_WRITE | IEMTLBE_F_S2_TL0 | IEMTLBE_F_S2_TL1) >> IEMTLBE_F_S2_NO_LIM_WRITE_BIT) == 7);
    13611361    pHlp->pfnPrintf(pHlp, IEMTLB_SLOT_FMT
    1362                     ": %s %#018RX64 -> %RGp / %p / %#05x U%c%c%c%cP%c%c%c%c%c%c/%c%c%c/%s/%c%c%c%c/%c as:%x vm:%x/%s %s%s\n",
     1362                    ": %s %#018RX64 -> %RGp / %p / %#05x U%c%c%c%cP%c%c%c%c%c%c%c/%c%c%c/%s/%c%c%c%c/%c as:%x vm:%x/%s %s%s\n",
    13631363                    uSlot,
    13641364                    (pTlbe->uTag & IEMTLB_REVISION_MASK) == uTlbRevision ? "valid  "
     
    13801380                    pTlbe->fFlagsAndPhysRev & IEMTLBE_F_EFF_NO_DIRTY     ? '-' : 'D',
    13811381                    pTlbe->fFlagsAndPhysRev & IEMTLBE_F_EFF_AMEC         ? 'A' : '-',
     1382                    pTlbe->fFlagsAndPhysRev & IEMTLBE_F_EFF_DEVICE       ? 'd' : '-',
    13821383                    /* / */
    13831384                    !(uSlot & 1)                                         ? '-' : 'G',
     
    13851386                    pTlbe->fFlagsAndPhysRev & IEMTLBE_F_S1_NSE           ? '-' : 'E',
    13861387                    /* / */
    1387                     s_apszLimAndTopLevelX[(pTlbe->fFlagsAndPhysRev >> IEMTLBE_F_S2_NO_LIM_WRITE_SHIFT) & 7],
     1388                    s_apszLimAndTopLevelX[(pTlbe->fFlagsAndPhysRev >> IEMTLBE_F_S2_NO_LIM_WRITE_BIT) & 7],
    13881389                    /* / */
    13891390                    pTlbe->fFlagsAndPhysRev & IEMTLBE_F_PG_NO_READ       ? '-'  : 'r',
     
    14411442    iemR3InfoTlbPrintHeader(pVCpu, pHlp, pTlb, pfHeader);
    14421443
    1443     uint64_t const    uTag  = IEMTLB_CALC_TAG_NO_REV(uAddress);
     1444    uint64_t const    uTag  = IEMTLB_CALC_TAG_NO_REV(pVCpu, uAddress);
    14441445#ifdef IEMTLB_TAG_TO_EVEN_INDEX
    14451446    uint32_t const    uSlot = IEMTLB_TAG_TO_EVEN_INDEX(uTag);
     
    17861787                case kIemTlbTraceType_InvlPg:
    17871788                    pHlp->pfnPrintf(pHlp, "%u: %016RX64 invlpg %RGv slot=" IEMTLB_SLOT_FMT "%s\n", idx, pCur->rip,
    1788                                     pCur->u64Param, (uint32_t)IEMTLB_ADDR_TO_EVEN_INDEX(pCur->u64Param), pszSymbol);
     1789                                    pCur->u64Param, (uint32_t)IEMTLB_ADDR_TO_EVEN_INDEX(pVCpu, pCur->u64Param), pszSymbol);
    17891790                    break;
    17901791                case kIemTlbTraceType_EvictSlot:
     
    18231824                                    pCur->enmType == kIemTlbTraceType_LoadGlobal ? 'g' : 'l', s_apszTlbType[pCur->bParam & 1],
    18241825                                    pCur->u64Param,
    1825                                       (uint32_t)IEMTLB_ADDR_TO_EVEN_INDEX(pCur->u64Param)
     1826                                      (uint32_t)IEMTLB_ADDR_TO_EVEN_INDEX(pVCpu, pCur->u64Param)
    18261827                                    | (pCur->enmType == kIemTlbTraceType_LoadGlobal),
    18271828                                    (RTGCPTR)pCur->u64Param2, pCur->u32Param, pszSymbol);
  • trunk/src/VBox/VMM/include/IEMInternal.h

    r108702 r108791  
    525525#endif
    526526#if defined(VBOX_VMM_TARGET_ARMV8) || defined(DOXYGEN_RUNNING)
    527 # define IEMTLBE_F_EFF_U_NO_READ      RT_BIT_64(0)  /**< Stage 1+2: No unprivileged read access. */
    528 # define IEMTLBE_F_EFF_U_NO_WRITE     RT_BIT_64(1)  /**< Stage 1+2: No unprivileged write access. */
    529 # define IEMTLBE_F_EFF_U_NO_EXEC      RT_BIT_64(2)  /**< Stage 1+2: No unprivileged execute access. */
    530 # define IEMTLBE_F_EFF_U_NO_GCS       RT_BIT_64(3)  /**< Stage 1+2: No unprivileged guard control stack access. */
    531 # define IEMTLBE_F_EFF_P_NO_READ      RT_BIT_64(4)  /**< Stage 1+2: No privileged read access. */
    532 # define IEMTLBE_F_EFF_P_NO_WRITE     RT_BIT_64(5)  /**< Stage 1+2: No privileged write access. */
    533 # define IEMTLBE_F_EFF_P_NO_EXEC      RT_BIT_64(6)  /**< Stage 1+2: No privileged execute access. */
    534 # define IEMTLBE_F_EFF_P_NO_GCS       RT_BIT_64(7)  /**< Stage 1+2: No privileged guard control stack access. */
    535 # define IEMTLBE_F_S2_NO_LIM_WRITE    RT_BIT_64(8)  /**< Stage 2:   No limited write access. */
    536 # define IEMTLBE_F_S2_NO_LIM_WRITE_SHIFT        8   /**< @see IEMTLBE_F_S2_NO_LIM_WRITE */
    537 # define IEMTLBE_F_S2_TL0             RT_BIT_64(9)  /**< Stage 2:   TopLevel0. */
    538 # define IEMTLBE_F_S2_TL1             RT_BIT_64(10) /**< Stage 2:   TopLevel1. */
    539 # define IEMTLBE_F_EFF_NO_DIRTY       RT_BIT_64(11) /**< Stage 1+2: Not dirty. */
    540 # define IEMTLBE_F_EFF_AMEC           RT_BIT_64(12) /**< Stage 1+2: Alternative MECID. */
    541 # define IEMTLBE_F_PG_NO_READ         RT_BIT_64(13) /**< Phys page: Not readable (MMIO / access handler, ROM) */
    542 # define IEMTLBE_F_PG_NO_WRITE        RT_BIT_64(14) /**< Phys page: Not writable (access handler, ROM, whatever). */
    543 # define IEMTLBE_F_NO_MAPPINGR3       RT_BIT_64(15) /**< TLB entry: The IEMTLBENTRY::pMappingR3 member is invalid. */
    544 # define IEMTLBE_F_PG_UNASSIGNED      RT_BIT_64(16) /**< Phys page: Unassigned memory (not RAM, ROM, MMIO2 or MMIO). */
    545 # define IEMTLBE_F_PG_CODE_PAGE       RT_BIT_64(17) /**< Phys page: Code page. */
    546 # define IEMTLBE_F_S1_NS              RT_BIT_64(18) /**< Stage 1:   Non-secure bit. */
    547 # define IEMTLBE_F_S1_NSE             RT_BIT_64(19) /**< Stage 1:   Non-secure extension/whatever bit. */
    548 # define IEMTLBE_F_EFF_SIZE_MASK   UINT64(0x300000) /**< Stage 1+2: Page size. @todo may need separate bits for each stage since they may use different page sizes. Or perhaps a single bit suffices? */
    549 # define IEMTLBE_F_EFF_SIZE_L3     UINT64(0x000000) /**< Stage 1+2: Smallest page size. */
    550 # define IEMTLBE_F_EFF_SIZE_L2     UINT64(0x100000) /**< Stage 1+2: Level 2 block. */
    551 # define IEMTLBE_F_EFF_SIZE_L1     UINT64(0x200000) /**< Stage 1+2: Level 1 block. */
    552 # define IEMTLBE_F_EFF_SIZE_L0     UINT64(0x300000) /**< Stage 1+2: Level 0 block. */
    553 # define IEMTLBE_F_EFF_SIZE_SHIFT             20    /**< @see IEMTLBE_F_EFF_SIZE_MASK */
    554 # define IEMTLBE_F_S1_ASID (UINT64_C(0xffff) << 22) /**< Stage 1:   Address space ID (from stage 1 root register). */
    555 # define IEMTLBE_F_S1_ASID_SHIFT                22  /**< @see IEMTLBE_F_S1_ASID */
    556 # define IEMTLBE_F_S2_VMID (UINT64_C(0xffff) << 38) /**< Stage 2:   Virtual machine ID (from stage 2 root register). */
    557 # define IEMTLBE_F_S2_VMID_SHIFT                38  /**< @see IEMTLBE_F_S2_VMID */
     527/** Stage 1+2: No unprivileged read access. */
     528# define IEMTLBE_F_EFF_P_NO_READ_BIT        0
     529# define IEMTLBE_F_EFF_P_NO_READ            RT_BIT_64(IEMTLBE_F_EFF_P_NO_READ_BIT)
     530/** Stage 1+2: No privileged write access. */
     531# define IEMTLBE_F_EFF_P_NO_WRITE_BIT       1
     532# define IEMTLBE_F_EFF_P_NO_WRITE           RT_BIT_64(IEMTLBE_F_EFF_P_NO_WRITE_BIT)
     533/** Stage 1+2: No privileged execute access. */
     534# define IEMTLBE_F_EFF_P_NO_EXEC_BIT        2
     535# define IEMTLBE_F_EFF_P_NO_EXEC            RT_BIT_64(IEMTLBE_F_EFF_P_NO_EXEC_BIT)
     536/** Stage 1+2: No privileged guard control stack access. */
     537# define IEMTLBE_F_EFF_P_NO_GCS_BIT         3
     538# define IEMTLBE_F_EFF_P_NO_GCS             RT_BIT_64(IEMTLBE_F_EFF_P_NO_GCS_BIT)
     539/** Stage 1+2: No unprivileged read access. */
     540# define IEMTLBE_F_EFF_U_NO_READ_BIT        4
     541# define IEMTLBE_F_EFF_U_NO_READ            RT_BIT_64(IEMTLBE_F_EFF_U_NO_READ_BIT)
     542/** Stage 1+2: No unprivileged write access. */
     543# define IEMTLBE_F_EFF_U_NO_WRITE_BIT       5
     544# define IEMTLBE_F_EFF_U_NO_WRITE           RT_BIT_64(IEMTLBE_F_EFF_U_NO_WRITE_BIT)
     545/** Stage 1+2: No unprivileged execute access. */
     546# define IEMTLBE_F_EFF_U_NO_EXEC_BIT        6
     547# define IEMTLBE_F_EFF_U_NO_EXEC            RT_BIT_64(IEMTLBE_F_EFF_U_NO_EXEC_BIT)
     548/** Stage 1+2: No unprivileged guard control stack access. */
     549# define IEMTLBE_F_EFF_U_NO_GCS_BIT         7
     550# define IEMTLBE_F_EFF_U_NO_GCS             RT_BIT_64(IEMTLBE_F_EFF_U_NO_GCS_BIT)
     551/** Stage 2:   No limited write access. */
     552# define IEMTLBE_F_S2_NO_LIM_WRITE_BIT      8
     553# define IEMTLBE_F_S2_NO_LIM_WRITE          RT_BIT_64(IEMTLBE_F_S2_NO_LIM_WRITE_BIT)
     554/** Stage 2:   TopLevel0. */
     555# define IEMTLBE_F_S2_TL0_BIT               9
     556# define IEMTLBE_F_S2_TL0                   RT_BIT_64(IEMTLBE_F_S2_TL0_BIT)
     557/** Stage 2:   TopLevel1. */
     558# define IEMTLBE_F_S2_TL1_BIT               10
     559# define IEMTLBE_F_S2_TL1                   RT_BIT_64(IEMTLBE_F_S2_TL1_BIT)
     560/** Stage 1+2: Not dirty. */
     561# define IEMTLBE_F_EFF_NO_DIRTY_BIT         11
     562# define IEMTLBE_F_EFF_NO_DIRTY             RT_BIT_64(IEMTLBE_F_EFF_NO_DIRTY_BIT)
     563/** Stage 1+2: Alternative MECID. */
     564# define IEMTLBE_F_EFF_AMEC_BIT             12
     565# define IEMTLBE_F_EFF_AMEC                 RT_BIT_64(IEMTLBE_F_EFF_AMEC_BIT)
     566/** Phys page: Not readable (MMIO / access handler, ROM) */
     567# define IEMTLBE_F_PG_NO_READ_BIT           13
     568# define IEMTLBE_F_PG_NO_READ               RT_BIT_64(IEMTLBE_F_PG_NO_READ_BIT)
     569/** Phys page: Not writable (access handler, ROM, whatever). */
     570# define IEMTLBE_F_PG_NO_WRITE_BIT          14
     571# define IEMTLBE_F_PG_NO_WRITE              RT_BIT_64(IEMTLBE_F_PG_NO_WRITE_BIT)
     572/** TLB entry: The IEMTLBENTRY::pMappingR3 member is invalid. */
     573# define IEMTLBE_F_NO_MAPPINGR3_BIT         15
     574# define IEMTLBE_F_NO_MAPPINGR3             RT_BIT_64(IEMTLBE_F_NO_MAPPINGR3_BIT)
     575/** Phys page: Unassigned memory (not RAM, ROM, MMIO2 or MMIO). */
     576# define IEMTLBE_F_PG_UNASSIGNED_BIT        16
     577# define IEMTLBE_F_PG_UNASSIGNED            RT_BIT_64(IEMTLBE_F_PG_UNASSIGNED_BIT)
     578/** Phys page: Code page. */
     579# define IEMTLBE_F_PG_CODE_PAGE_BIT         17
     580# define IEMTLBE_F_PG_CODE_PAGE             RT_BIT_64(IEMTLBE_F_PG_CODE_PAGE_BIT)
     581/** Stage 1:   Non-secure bit. */
     582# define IEMTLBE_F_S1_NS_BIT                18
     583# define IEMTLBE_F_S1_NS                    RT_BIT_64(IEMTLBE_F_S1_NS_BIT)
     584/** Stage 1:   Non-secure extension/whatever bit. */
     585# define IEMTLBE_F_S1_NSE_BIT               19
     586# define IEMTLBE_F_S1_NSE                   RT_BIT_64(IEMTLBE_F_S1_NSE_BIT)
     587/** Stage 1+2: Page size.
     588 * @todo may need separate bits for each stage since they may use different
     589 *       page sizes. Or perhaps a single bit suffices?  Also possible we
     590 *       don't need any of this at all because of a very very rich invalidation
     591 *       interface on arm. */
     592# define IEMTLBE_F_EFF_SIZE_MASK            UINT64(0x300000)
     593/** @see IEMTLBE_F_EFF_SIZE_MASK */
     594# define IEMTLBE_F_EFF_SIZE_SHIFT           20
     595/** Stage 1+2: Smallest page size. */
     596# define IEMTLBE_F_EFF_SIZE_L3              UINT64(0x000000)
     597/** Stage 1+2: Level 2 block. */
     598# define IEMTLBE_F_EFF_SIZE_L2              UINT64(0x100000)
     599/** Stage 1+2: Level 1 block. */
     600# define IEMTLBE_F_EFF_SIZE_L1              UINT64(0x200000)
     601/** Stage 1+2: Level 0 block. */
     602# define IEMTLBE_F_EFF_SIZE_L0              UINT64(0x300000)
     603/** Stage 1+2: Device memory type (clear if normal memory type). */
     604# define IEMTLBE_F_EFF_DEVICE_BIT           22
     605# define IEMTLBE_F_EFF_DEVICE               RT_BIT_64(IEMTLBE_F_EFF_DEVICE_BIT)
     606/** Stage 1:   Address space ID (from stage 1 root register). */
     607# define IEMTLBE_F_S1_ASID                  (UINT64_C(0xffff) << IEMTLBE_F_S1_ASID_SHIFT)
     608/** @see IEMTLBE_F_S1_ASID */
     609# define IEMTLBE_F_S1_ASID_SHIFT            23
     610/** Stage 2:   Virtual machine ID (from stage 2 root register). */
     611# define IEMTLBE_F_S2_VMID                  (UINT64_C(0xffff) << IEMTLBE_F_S2_VMID_SHIFT)
     612/** @see IEMTLBE_F_S2_VMID */
     613# define IEMTLBE_F_S2_VMID_SHIFT            39
    558614# ifndef DOXYGEN_RUNNING
    559 #  define IEMTLBE_F_PHYS_REV        UINT64_C(0xffc0000000000000) /**< Physical revision mask. @sa IEMTLB_PHYS_REV_INCR */
     615/** Physical revision mask. @sa IEMTLB_PHYS_REV_INCR */
     616#  define IEMTLBE_F_PHYS_REV                UINT64_C(0xff80000000000000)
    560617# endif
    561618#endif
     
    568625                                     | PGMIEMGCPHYS2PTR_F_CODE_PAGE \
    569626                                     | IEMTLBE_F_PHYS_REV )
    570 #if defined(VBOX_VMM_TARGET_X86) /// @todo || defined(VBOX_VMM_TARGET_ARMV8)
     627#if defined(VBOX_VMM_TARGET_X86) || defined(VBOX_VMM_TARGET_ARMV8)
    571628AssertCompile(PGMIEMGCPHYS2PTR_F_NO_WRITE     == IEMTLBE_F_PG_NO_WRITE);
    572629AssertCompile(PGMIEMGCPHYS2PTR_F_NO_READ      == IEMTLBE_F_PG_NO_READ);
     
    579636#endif
    580637
     638/** Tests if the TLB entry is global (odd). */
     639#define IEMTLBE_IS_GLOBAL(a_pTlbe)      (((uintptr_t)(a_pTlbe) / sizeof(IEMTLBENTRY)) & 1)
     640
    581641
    582642/** The TLB size (power of two).
     
    631691     * (The revision zero indicates an invalid TLB entry.)
    632692     *
    633      * The initial value is choosen to cause an early wraparound. */
     693     * The initial value is choosen to cause an early wraparound.
     694     * @arm     This includes the ASID & VM ID. */
    634695    uint64_t            uTlbRevision;
    635696    /** The TLB physical address revision - shadow of PGM variable.
    636697     *
    637      * This is actually only 56 bits wide (see IEMTLBENTRY::fFlagsAndPhysRev) and is
    638      * incremented by adding RT_BIT_64(8).  When it wraps around and becomes zero,
    639      * a rendezvous is called and each CPU wipe the IEMTLBENTRY::pMappingR3 as well
    640      * as IEMTLBENTRY::fFlagsAndPhysRev bits 63 thru 8, 4, and 3.
     698     * The revision number is the top x bits (target dependent, see
     699     * IEMTLBENTRY::fFlagsAndPhysRev) and is incremented by adding RT_BIT_64(x).
     700     * When it wraps around and becomes zero, a rendezvous is called and each CPU
     701     * wipe the IEMTLBENTRY::pMappingR3 as well as many of the
     702     * IEMTLBENTRY::fFlagsAndPhysRev bits.
     703     *
     704     * @arm  This includes the current ASID & VMID values.
     705     * @todo arm: implement ASID & VMID.
    641706     *
    642707     * The initial value is choosen to cause an early wraparound.
    643708     *
    644709     * @note This is placed between the two TLB revisions because we
    645      *       load it in pair with one or the other on arm64. */
     710     *       load it in pair with one or the other on arm64.
     711     */
    646712    uint64_t volatile   uTlbPhysRev;
    647713    /** The global TLB revision.
     
    766832# define IEMTLB_PHYS_REV_INCR   RT_BIT_64(11)
    767833#elif defined(VBOX_VMM_TARGET_ARMV8)
    768 # define IEMTLB_PHYS_REV_INCR   RT_BIT_64(54)
     834# define IEMTLB_PHYS_REV_INCR   RT_BIT_64(55)
    769835#endif
    770836#ifdef IEMTLBE_F_PHYS_REV
     
    775841 * Calculates the TLB tag for a virtual address but without TLB revision.
    776842 * @returns Tag value for indexing and comparing with IEMTLB::uTag.
     843 * @param   a_pVCpu     The CPU handle (for ARM targets to deal with
     844 *                      configurable page size).
    777845 * @param   a_GCPtr     The virtual address.  Must be RTGCPTR or same size or
    778846 *                      the clearing of the top 16 bits won't work (if 32-bit
    779847 *                      we'll end up with mostly zeros).
    780  */
    781 #define IEMTLB_CALC_TAG_NO_REV(a_GCPtr)     ( (((a_GCPtr) << 16) >> (GUEST_PAGE_SHIFT + 16)) )
     848 * @todo ARM: Support 52-bit and 56-bit address space size (FEAT_LVA,
     849 *       FEAT_LVA3) when we see hardware supporting such.  */
     850#ifdef VBOX_VMM_TARGET_ARMV8
     851# define IEMTLB_CALC_TAG_NO_REV(a_pVCpu, a_GCPtr)   ( (((a_GCPtr) << 16) >> (IEM_F_ARM_GET_TLB_PAGE_SHIFT(pVCpu->iem.s.fExec) + 16)) )
     852#else
     853# define IEMTLB_CALC_TAG_NO_REV(a_pVCpu, a_GCPtr)   ( (((a_GCPtr) << 16) >> (GUEST_PAGE_SHIFT + 16)) )
     854#endif
    782855/**
    783856 * Converts a TLB tag value into a even TLB index.
     
    801874
    802875/** Converts a GC address to an even TLB index. */
    803 #define IEMTLB_ADDR_TO_EVEN_INDEX(a_GCPtr)  IEMTLB_TAG_TO_EVEN_INDEX(IEMTLB_CALC_TAG_NO_REV(a_GCPtr))
     876#define IEMTLB_ADDR_TO_EVEN_INDEX(a_pVCpu, a_GCPtr) IEMTLB_TAG_TO_EVEN_INDEX(IEMTLB_CALC_TAG_NO_REV(a_pVCpu, a_GCPtr))
    804877
    805878
     
    12011274/** ARM: LSE2 alignment checks enabled (~SCTRL_ELx.nAA). */
    12021275# define IEM_F_ARM_AA                       UINT32_C(0x00080000)
     1276
     1277/** ARM: 4K page (granule) size - stage 1. */
     1278# define IEM_F_ARM_S1_PAGE_4K               UINT32_C(0x00000000)
     1279/** ARM: 16K page (granule) size - stage 1. */
     1280# define IEM_F_ARM_S1_PAGE_16K              UINT32_C(0x00000200)
     1281/** ARM: 64K page (granule) size - stage 1. */
     1282# define IEM_F_ARM_S1_PAGE_64K              UINT32_C(0x00000400)
     1283/** ARM: Mask for the stage 1 page (granule) size encoding.
     1284 * This is the shift count - 12. */
     1285# define IEM_F_ARM_S1_PAGE_MASK             UINT32_C(0x00000700)
     1286/** ARM: The shift count for the tage 1 page (granule) size encoding value. */
     1287# define IEM_F_ARM_S1_PAGE_SHIFT            8
     1288/** Get the current stage 1 page (granule) shift count. */
     1289# define IEM_F_ARM_GET_S1_PAGE_SHIFT(a_fExec)       (12 + (((a_fExec) & IEM_F_ARM_S1_PAGE_MASK) >> IEM_F_ARM_S1_PAGE_SHIFT))
     1290/** Get the current stage 1 page (granule) size. */
     1291# define IEM_F_ARM_GET_S1_PAGE_SIZE(a_fExec)        (1 << IEM_F_ARM_GET_S1_PAGE_SHIFT(a_fExec))
     1292/** Get the current stage 1 page (granule) offset mask. */
     1293# define IEM_F_ARM_GET_S1_PAGE_OFFSET_MASK(a_fExec) (IEM_F_ARM_GET_S1_PAGE_SHIFT(a_fExec) - 1)
     1294
     1295/** Get the current TLB page (granule) shift count.
     1296 * The TLB page size is the smallest of S1 and S2 page sizes.
     1297 * @todo Implement stage 2 tables. */
     1298# define IEM_F_ARM_GET_TLB_PAGE_SHIFT(a_fExec)       IEM_F_ARM_GET_S1_PAGE_SHIFT(a_fExec)
     1299/** Get the current TLB page (granule) size. */
     1300# define IEM_F_ARM_GET_TLB_PAGE_SIZE(a_fExec)        IEM_F_ARM_GET_S1_PAGE_SIZE(a_fExec)
     1301/** Get the current TLB page (granule) offset mask. */
     1302# define IEM_F_ARM_GET_TLB_PAGE_OFFSET_MASK(a_fExec) IEM_F_ARM_GET_S1_PAGE_OFFSET_MASK(a_fExec)
     1303
    12031304#endif /* ARM || doxygen  */
    12041305
     
    17951896#define IEMBRANCHED_F_ZERO          UINT8_C(0x80)
    17961897/** @} */
     1898
     1899
     1900/** @def IEM_MAX_MEM_MAPPINGS
     1901 * Maximum number of concurrent memory needed by the target architecture.
     1902 * @x86     There are a few instructions with two memory operands (push/pop [mem],
     1903 *          string instructions).  We add another entry for safety.
     1904 * @arm     Except for the recently specified memcpy/move instructions,
     1905 *          ARM instruction takes at most one memory operand.  We use 1 and add
     1906 *          another entry for safety, ignoring the memcpy instructions for now. */
     1907#if defined(VBOX_VMM_TARGET_X86) || defined(DOXYGEN_RUNNING) /* for now: */ || defined(VBOX_VMM_TARGET_AGNOSTIC)
     1908# define IEM_MAX_MEM_MAPPINGS       3
     1909#elif defined(VBOX_VMM_TARGET_ARMV8)
     1910# define IEM_MAX_MEM_MAPPINGS       2
     1911#else
     1912# error "port me"
     1913#endif
     1914
     1915/** @def IEM_BOUNCE_BUFFER_SIZE
     1916 * The size of the bounce buffers.  This is dictated by the largest memory
     1917 * operand of the target architecture.
     1918 * @x86     fxsave/fxrstor takes a 512 byte operand. Whether we actually need a
     1919 *          512 byte bounce buffer for it is questionable...
     1920 * @arm     Currently we shouldn't need more than 64 bytes here (ld64b, ld4). */
     1921#if defined(VBOX_VMM_TARGET_X86) || defined(DOXYGEN_RUNNING) /* for now: */ || defined(VBOX_VMM_TARGET_AGNOSTIC)
     1922# define IEM_BOUNCE_BUFFER_SIZE     512
     1923#elif defined(VBOX_VMM_TARGET_ARMV8)
     1924# define IEM_BOUNCE_BUFFER_SIZE     64
     1925#else
     1926# error "port me"
     1927#endif
    17971928
    17981929
     
    20112142        uint32_t            u32Alignment4; /**< Alignment padding. */
    20122143#endif
    2013     } aMemMappings[3];                                                                                      /* 0x50 LB 0x30 */
     2144    } aMemMappings[IEM_MAX_MEM_MAPPINGS];                                           /* arm: 0x50 LB 0x20  x86: 0x50 LB 0x30 */
    20142145
    20152146    /** Locking records for the mapped memory. */
     
    20182149        PGMPAGEMAPLOCK      Lock;
    20192150        uint64_t            au64Padding[2];
    2020     } aMemMappingLocks[3];                                                                                  /* 0x80 LB 0x30 */
     2151    } aMemMappingLocks[IEM_MAX_MEM_MAPPINGS];                                       /* arm: 0x70 LB 0x20  x86: 0x80 LB 0x30 */
    20212152
    20222153    /** Bounce buffer info.
     
    20362167        /** Explicit alignment padding. */
    20372168        bool                afAlignment5[3];
    2038     } aMemBbMappings[3];                                                                                    /* 0xb0 LB 0x48 */
     2169    } aMemBbMappings[IEM_MAX_MEM_MAPPINGS];                                         /* arm: 0x90 LB 0x30  x86: 0xb0 LB 0x48 */
    20392170
    20402171    /** The flags of the current exception / interrupt.
    20412172     * @note X86 specific? */
    2042     uint32_t                fCurXcpt;                                                                       /* 0xf8 */
     2173    uint32_t                fCurXcpt;                                               /* arm: 0xc0          x86: 0xf8 */
    20432174    /** The current exception / interrupt.
    20442175     *@note X86 specific? */
    2045     uint8_t                 uCurXcpt;                                                                       /* 0xfc */
     2176    uint8_t                 uCurXcpt;                                               /* arm: 0xc4          x86: 0xfc */
    20462177    /** Exception / interrupt recursion depth.
    20472178     *@note X86 specific? */
    2048     int8_t                  cXcptRecursions;                                                                /* 0xfb */
     2179    int8_t                  cXcptRecursions;                                        /* arm: 0xc5          x86: 0xfb */
    20492180
    20502181    /** The next unused mapping index.
    20512182     * @todo try find room for this up with cActiveMappings. */
    2052     uint8_t                 iNextMapping;                                                                   /* 0xfd */
    2053     uint8_t                 abAlignment7[1];
     2183    uint8_t                 iNextMapping;                                           /* arm: 0xc6          x86: 0xfd */
     2184    uint8_t                 abAlignment7[IEM_MAX_MEM_MAPPINGS == 3 ? 1 : 0x39];
    20542185
    20552186    /** Bounce buffer storage.
     
    20572188    struct
    20582189    {
    2059         uint8_t             ab[512];
    2060     } aBounceBuffers[3];                                                                                    /* 0x100 LB 0x600 */
     2190        uint8_t             ab[IEM_BOUNCE_BUFFER_SIZE];
     2191    } aBounceBuffers[IEM_MAX_MEM_MAPPINGS];                                        /* arm: 0x100 LB 0x80  x86: 0x100 LB 0x600 */
    20612192
    20622193
     
    34213552#  include "VMMAll/target-x86/IEMInternal-x86.h"
    34223553# elif defined(VBOX_VMM_TARGET_ARMV8)
    3423 //#  include "VMMAll/target-armv8/IEMInternal-armv8.h"
     3554#  include "VMMAll/target-armv8/IEMInternal-armv8.h"
    34243555# endif
    34253556
Note: See TracChangeset for help on using the changeset viewer.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette