VirtualBox

Changeset 106180 in vbox for trunk/src/VBox/VMM/include


Ignore:
Timestamp:
Sep 30, 2024 1:51:48 PM (5 months ago)
Author:
vboxsync
svn:sync-xref-src-repo-rev:
164964
Message:

VMM/IEM: Refactoring TbExits and other early eflags calculation postponement changes. bugref:10720

Location:
trunk/src/VBox/VMM/include
Files:
2 edited

Legend:

Unmodified
Added
Removed
  • trunk/src/VBox/VMM/include/IEMN8veRecompiler.h

    r106127 r106180  
    8080# if 0 || defined(DOXYGEN_RUNNING)
    8181#  define IEMNATIVE_WITH_EFLAGS_POSTPONING
     82# endif
     83#endif
     84#ifdef IEMNATIVE_WITH_EFLAGS_POSTPONING
     85# ifndef IEMNATIVE_WITH_EFLAGS_SKIPPING
     86#  error "IEMNATIVE_WITH_EFLAGS_POSTPONING requires IEMNATIVE_WITH_EFLAGS_SKIPPING at present"
    8287# endif
    8388#endif
     
    314319 * Mask of registers the callee will not save and may trash. */
    315320#ifdef RT_ARCH_AMD64
    316 # define IEMNATIVE_CALL_RET_GREG             X86_GREG_xAX
     321# define IEMNATIVE_CALL_RET_GREG            X86_GREG_xAX
    317322
    318323# ifdef RT_OS_WINDOWS
     
    435440# define IEMNATIVE_CALL_VOLATILE_NOTMP_GREG_MASK    IEMNATIVE_CALL_VOLATILE_GREG_MASK
    436441#endif
     442
     443/** @def IEMNATIVE_CALL_NONVOLATILE_GREG_MASK
     444 * The allocatable non-volatile general purpose register set.  */
     445#define IEMNATIVE_CALL_NONVOLATILE_GREG_MASK \
     446    (~IEMNATIVE_CALL_VOLATILE_GREG_MASK & ~IEMNATIVE_REG_FIXED_MASK & IEMNATIVE_HST_GREG_MASK)
    437447/** @} */
    438448
     
    480490{
    481491    kIemNativeLabelType_Invalid = 0,
    482     /*
    483      * Labels w/o data, only once instance per TB - aka exit reasons.
     492    /** @name Exit reasons - Labels w/o data, only once instance per TB.
    484493     *
    485      * Note! Jumps to these requires instructions that are capable of spanning
    486      *       the max TB length.
     494     * The labels requiring register inputs are documented.
     495     *
     496     * @note Jumps to these requires instructions that are capable of spanning the
     497     *       max TB length.
     498     * @{
    487499     */
    488500    /* Simple labels comes first for indexing reasons. RaiseXx is order by the exception's numerical value(s). */
     
    496508    kIemNativeLabelType_RaiseMf,                /**< Raise (throw) X86_XCPT_MF (10h). */
    497509    kIemNativeLabelType_RaiseXf,                /**< Raise (throw) X86_XCPT_XF (13h). */
    498     kIemNativeLabelType_ObsoleteTb,
    499     kIemNativeLabelType_NeedCsLimChecking,
    500     kIemNativeLabelType_CheckBranchMiss,
     510    kIemNativeLabelType_ObsoleteTb,             /**< Calls iemNativeHlpObsoleteTb (no inputs). */
     511    kIemNativeLabelType_NeedCsLimChecking,      /**< Calls iemNativeHlpNeedCsLimChecking (no inputs). */
     512    kIemNativeLabelType_CheckBranchMiss,        /**< Calls iemNativeHlpCheckBranchMiss (no inputs). */
    501513    kIemNativeLabelType_LastSimple = kIemNativeLabelType_CheckBranchMiss,
    502     /* Manually defined labels. */
     514
     515    /* Manually defined labels: */
     516    /**< Returns with VINF_SUCCESS, no inputs. */
     517    kIemNativeLabelType_ReturnSuccess,
     518    /** Returns with VINF_IEM_REEXEC_FINISH_WITH_FLAGS, no inputs. */
     519    kIemNativeLabelType_ReturnWithFlags,
     520    /** Returns with VINF_IEM_REEXEC_BREAK, no inputs. */
    503521    kIemNativeLabelType_ReturnBreak,
     522    /** Returns with VINF_IEM_REEXEC_BREAK_FF, no inputs. */
    504523    kIemNativeLabelType_ReturnBreakFF,
     524    /** The last TB exit label that doesn't have any input registers. */
     525    kIemNativeLabelType_LastTbExitWithoutInputs = kIemNativeLabelType_ReturnBreakFF,
     526
     527    /** Argument registers 1, 2 & 3 are set up.  */
    505528    kIemNativeLabelType_ReturnBreakViaLookup,
     529    /** Argument registers 1, 2 & 3 are set up.  */
    506530    kIemNativeLabelType_ReturnBreakViaLookupWithIrq,
     531    /** Argument registers 1 & 2 are set up.  */
    507532    kIemNativeLabelType_ReturnBreakViaLookupWithTlb,
     533    /** Argument registers 1 & 2 are set up.  */
    508534    kIemNativeLabelType_ReturnBreakViaLookupWithTlbAndIrq,
    509     kIemNativeLabelType_ReturnWithFlags,
     535    /** Return register holds the RC and the instruction number is in CL/RCX
     536     * on amd64 and the 2rd argument register elsewhere. */
    510537    kIemNativeLabelType_NonZeroRetOrPassUp,
    511     kIemNativeLabelType_ReturnSuccess,          /**< Sets eax/w0 to zero and returns. */
     538
    512539    /** The last fixup for branches that can span almost the whole TB length.
    513540     * @note Whether kIemNativeLabelType_Return needs to be one of these is
    514541     *       a bit questionable, since nobody jumps to it except other tail code. */
    515     kIemNativeLabelType_LastWholeTbBranch = kIemNativeLabelType_ReturnSuccess,
     542    kIemNativeLabelType_LastWholeTbBranch = kIemNativeLabelType_NonZeroRetOrPassUp,
    516543    /** The last fixup for branches that exits the TB. */
    517     kIemNativeLabelType_LastTbExit        = kIemNativeLabelType_ReturnSuccess,
     544    kIemNativeLabelType_LastTbExit        = kIemNativeLabelType_NonZeroRetOrPassUp,
     545    /** @} */
    518546
    519547    /** Loop-jump target. */
     
    538566#define IEMNATIVELABELTYPE_IS_EXIT_REASON(a_enmLabel) \
    539567    ((a_enmLabel) <= kIemNativeLabelType_LastTbExit && (a_enmLabel) > kIemNativeLabelType_Invalid)
     568
     569#define IEMNATIVELABELTYPE_IS_EXIT_WITHOUT_INPUTS(a_enmLabel) \
     570    ((a_enmLabel) <= kIemNativeLabelType_LastTbExitWithoutInputs && (a_enmLabel) > kIemNativeLabelType_Invalid)
     571
     572/**
     573 * Get the mask of input registers for an TB exit label.
     574 * This will return zero for any non-exit lable.
     575 */
     576#ifdef RT_ARCH_AMD64
     577# define IEMNATIVELABELTYPE_GET_INPUT_REG_MASK(a_enmLabel) \
     578    (     (a_enmLabel) == kIemNativeLabelType_ReturnBreakViaLookup \
     579       || (a_enmLabel) == kIemNativeLabelType_ReturnBreakViaLookupWithIrq \
     580     ? RT_BIT_32(IEMNATIVE_CALL_ARG1_GREG) | RT_BIT_32(IEMNATIVE_CALL_ARG2_GREG) | RT_BIT_32(IEMNATIVE_CALL_ARG3_GREG) \
     581     :    (a_enmLabel) == kIemNativeLabelType_ReturnBreakViaLookupWithTlb \
     582       || (a_enmLabel) == kIemNativeLabelType_ReturnBreakViaLookupWithTlbAndIrq \
     583     ? RT_BIT_32(IEMNATIVE_CALL_ARG1_GREG) | RT_BIT_32(IEMNATIVE_CALL_ARG2_GREG) \
     584     : (a_enmLabel) == kIemNativeLabelType_NonZeroRetOrPassUp \
     585     ? RT_BIT_32(IEMNATIVE_CALL_RET_GREG)  | RT_BIT_32(X86_GREG_xCX) /* <-- the difference */ \
     586     : 0)
     587# else
     588# define IEMNATIVELABELTYPE_GET_INPUT_REG_MASK(a_enmLabel) \
     589    (     (a_enmLabel) == kIemNativeLabelType_ReturnBreakViaLookup \
     590       || (a_enmLabel) == kIemNativeLabelType_ReturnBreakViaLookupWithIrq \
     591     ? RT_BIT_32(IEMNATIVE_CALL_ARG1_GREG) | RT_BIT_32(IEMNATIVE_CALL_ARG2_GREG) | RT_BIT_32(IEMNATIVE_CALL_ARG3_GREG) \
     592     :    (a_enmLabel) == kIemNativeLabelType_ReturnBreakViaLookupWithTlb \
     593       || (a_enmLabel) == kIemNativeLabelType_ReturnBreakViaLookupWithTlbAndIrq \
     594     ? RT_BIT_32(IEMNATIVE_CALL_ARG1_GREG) | RT_BIT_32(IEMNATIVE_CALL_ARG2_GREG) \
     595     : (a_enmLabel) == kIemNativeLabelType_NonZeroRetOrPassUp \
     596     ? RT_BIT_32(IEMNATIVE_CALL_RET_GREG)  | RT_BIT_32(IEMNATIVE_CALL_ARG2_GREG) \
     597     : 0)
     598#endif
    540599
    541600
     
    660719#define IEMLIVENESSBIT_IDX_EFL_SF       ((unsigned)kIemNativeGstReg_EFlags + 5)
    661720#define IEMLIVENESSBIT_IDX_EFL_OF       ((unsigned)kIemNativeGstReg_EFlags + 6)
     721#define IEMLIVENESSBIT_IDX_EFL_COUNT    7
    662722
    663723
     
    819879# define IEMLIVENESS_STATE_ARE_STATUS_EFL_TO_BE_CLOBBERED(a_pCurEntry) \
    820880    ( (((a_pCurEntry)->Bit0.bm64 | (a_pCurEntry)->Bit1.bm64) & IEMLIVENESSBIT_STATUS_EFL_MASK) == 0 )
     881
     882/***
     883 * Construct a mask of what will be clobbered and never used.
     884 *
     885 * This is mainly used with IEMLIVENESSBIT_STATUS_EFL_MASK to avoid
     886 * unnecessary EFLAGS calculations.
     887 *
     888 * @param a_pCurEntry  The current liveness entry.
     889 * @note  Used by actual code.
     890 */
     891# define IEMLIVENESS_STATE_GET_WILL_BE_CLOBBERED_SET(a_pCurEntry) \
     892    ( ~((a_pCurEntry)->Bit0.bm64 | (a_pCurEntry)->Bit1.bm64) & IEMLIVENESSBIT_MASK )
    821893
    822894/** Construct a mask of the guest registers in the UNUSED and XCPT_OR_CALL
     
    864936 * @note  Used by actual code. */
    865937# define IEMLIVENESS_STATE_GET_CAN_BE_FREED_SET(a_pCurEntry) \
    866             (  ~(a_pCurEntry)->aBits[IEMLIVENESS_BIT_READ].bm64 \
    867              & ~(a_pCurEntry)->aBits[IEMLIVENESS_BIT_WRITE].bm64 \
    868              & IEMLIVENESSBIT_MASK )
    869 
     938    (  ~(a_pCurEntry)->aBits[IEMLIVENESS_BIT_READ].bm64 \
     939     & ~(a_pCurEntry)->aBits[IEMLIVENESS_BIT_WRITE].bm64 \
     940     & IEMLIVENESSBIT_MASK )
     941
     942/***
     943 * Construct a mask of what will be clobbered and never used.
     944 *
     945 * This is mainly used with IEMLIVENESSBIT_STATUS_EFL_MASK to avoid
     946 * unnecessary EFLAGS calculations.
     947 *
     948 * @param a_pCurEntry  The current liveness entry.
     949 * @note  Used by actual code.
     950 */
     951# define IEMLIVENESS_STATE_GET_WILL_BE_CLOBBERED_SET(a_pCurEntry) \
     952    (  (a_pCurEntry)->aBits[IEMLIVENESS_BIT_WRITE].bm64 \
     953     & ~(  (a_pCurEntry)->aBits[IEMLIVENESS_BIT_POTENTIAL_CALL].bm64 \
     954         | (a_pCurEntry)->aBits[IEMLIVENESS_BIT_READ].bm64 \
     955         | (a_pCurEntry)->aBits[IEMLIVENESS_BIT_CALL].bm64) )
     956
     957/**
     958 * Construct a mask of what (EFLAGS) which can be postponed.
     959 *
     960 * The postponement is for the avoiding EFLAGS status bits calculations in the
     961 * primary code stream whenever possible, and instead only do these in the TLB
     962 * load and TB exit code paths which shouldn't be traveled quite as often.
     963 * A requirement, though, is that the status bits will be clobbered later in the
     964 * TB.
     965 *
     966 * User need to apply IEMLIVENESSBIT_STATUS_EFL_MASK if appropriate/necessary.
     967 *
     968 * @param a_pCurEntry  The current liveness entry.
     969 * @note  Used by actual code.
     970 */
     971# define IEMLIVENESS_STATE_GET_CAN_BE_POSTPONED_SET(a_pCurEntry) \
     972    (  (a_pCurEntry)->aBits[IEMLIVENESS_BIT_POTENTIAL_CALL].bm64 \
     973     & (a_pCurEntry)->aBits[IEMLIVENESS_BIT_WRITE].bm64 \
     974     & ~(  (a_pCurEntry)->aBits[IEMLIVENESS_BIT_READ].bm64 \
     975         | (a_pCurEntry)->aBits[IEMLIVENESS_BIT_CALL].bm64) )
    870976
    871977#endif /* IEMLIVENESS_EXTENDED_LAYOUT */
     
    10391145#ifdef IEMNATIVE_WITH_EFLAGS_POSTPONING
    10401146# define IEMNATIVE_ASSERT_EFLAGS_POSTPONING_ONLY(a_pReNative, a_fEflNeeded) \
    1041     AssertMsg(!((a_pReNative)->fPostponingEFlags & (a_fEflNeeded)), \
    1042               ("%#x & %#x -> %#x\n", (a_pReNative)->fPostponingEFlags, \
    1043                a_fEflNeeded, (a_pReNative)->fPostponingEFlags & (a_fEflNeeded) ))
     1147    AssertMsg(!((a_pReNative)->PostponedEfl.fEFlags & (a_fEflNeeded)), \
     1148              ("%#x & %#x -> %#x\n", (a_pReNative)->PostponedEfl.fEFlags, \
     1149               a_fEflNeeded, (a_pReNative)->PostponedEfl.fEFlags & (a_fEflNeeded) ))
    10441150#else
    10451151# define IEMNATIVE_ASSERT_EFLAGS_POSTPONING_ONLY(a_pReNative, a_fEflNeeded) ((void)0)
     
    10521158#if defined(IEMNATIVE_WITH_EFLAGS_SKIPPING) && defined(IEMNATIVE_WITH_EFLAGS_POSTPONING)
    10531159# define IEMNATIVE_ASSERT_EFLAGS_SKIPPING_AND_POSTPONING(a_pReNative, a_fEflNeeded) \
    1054     AssertMsg(!(((a_pReNative)->fSkippingEFlags | (a_pReNative)->fPostponingEFlags) & (a_fEflNeeded)), \
    1055               ("(%#x | %#x) & %#x -> %#x\n", (a_pReNative)->fSkippingEFlags, (a_pReNative)->fPostponingEFlags, \
    1056                a_fEflNeeded, ((a_pReNative)->fSkippingEFlags | (a_pReNative)->fPostponingEFlags) & (a_fEflNeeded) ))
     1160    AssertMsg(!(((a_pReNative)->fSkippingEFlags | (a_pReNative)->PostponedEfl.fEFlags) & (a_fEflNeeded)), \
     1161              ("(%#x | %#x) & %#x -> %#x\n", (a_pReNative)->fSkippingEFlags, (a_pReNative)->PostponedEfl.fEFlags, \
     1162               a_fEflNeeded, ((a_pReNative)->fSkippingEFlags | (a_pReNative)->PostponedEfl.fEFlags) & (a_fEflNeeded) ))
    10571163#elif defined(IEMNATIVE_WITH_EFLAGS_SKIPPING)
    10581164# define IEMNATIVE_ASSERT_EFLAGS_SKIPPING_AND_POSTPONING(a_pReNative, a_fEflNeeded) \
     
    10771183#else
    10781184# define IEMNATIVE_STRICT_EFLAGS_SKIPPING_EMIT_CHECK(a_pReNative, a_off, a_fEflNeeded) do { } while (0)
     1185#endif
     1186
     1187
     1188/** @def IEMNATIVE_MAX_POSTPONED_EFLAGS_INSTRUCTIONS
     1189 * Number of extra instructions to allocate for each TB exit to account for
     1190 * postponed EFLAGS calculations.
     1191 */
     1192#ifdef IEMNATIVE_WITH_EFLAGS_POSTPONING
     1193# ifdef RT_ARCH_AMD64
     1194#  define IEMNATIVE_MAX_POSTPONED_EFLAGS_INSTRUCTIONS   32
     1195# elif defined(RT_ARCH_ARM64) || defined(DOXYGEN_RUNNING)
     1196#  define IEMNATIVE_MAX_POSTPONED_EFLAGS_INSTRUCTIONS   32
     1197# else
     1198#  error "port me"
     1199# endif
     1200#else
     1201# define IEMNATIVE_MAX_POSTPONED_EFLAGS_INSTRUCTIONS    0
     1202#endif
     1203
     1204/** @def IEMNATIVE_CLEAR_POSTPONED_EFLAGS
     1205 * Helper macro function for calling iemNativeClearPostponedEFlags() when
     1206 * IEMNATIVE_WITH_EFLAGS_POSTPONING is enabled.
     1207 */
     1208#ifdef IEMNATIVE_WITH_EFLAGS_POSTPONING
     1209# define IEMNATIVE_CLEAR_POSTPONED_EFLAGS(a_pReNative, a_fEflClobbered) iemNativeClearPostponedEFlags<a_fEflClobbered>(a_pReNative)
     1210#else
     1211# define IEMNATIVE_CLEAR_POSTPONED_EFLAGS(a_pReNative, a_fEflClobbered) ((void)0)
    10791212#endif
    10801213
     
    15331666
    15341667
     1668#ifdef IEMNATIVE_WITH_EFLAGS_POSTPONING
     1669typedef enum IEMNATIVE_POSTPONED_EFL_OP_T : uint8_t
     1670{
     1671    kIemNativePostponedEflOp_Invalid = 0,
     1672    /** Logical operation.
     1673     * Operands: result register.
     1674     * @note This clears OF, CF and (undefined) AF, thus no need for inputs. */
     1675    kIemNativePostponedEflOp_Logical,
     1676    kIemNativePostponedEflOp_End
     1677} IEMNATIVE_POSTPONED_EFL_OP_T;
     1678#endif /* IEMNATIVE_WITH_EFLAGS_POSTPONING */
     1679
    15351680/**
    15361681 * Conditional stack entry.
     
    16701815#endif
    16711816#ifdef IEMNATIVE_WITH_EFLAGS_POSTPONING
    1672     uint32_t                    fPostponingEFlags;
     1817    struct
     1818    {
     1819        /** EFLAGS status bits that we're currently postponing the calculcation of. */
     1820        uint32_t                        fEFlags;
     1821        /** The postponed EFLAGS status bits calculation operation. */
     1822        IEMNATIVE_POSTPONED_EFL_OP_T    enmOp;
     1823        /** The bit-width of the postponed EFLAGS calculation. */
     1824        uint8_t                         cOpBits;
     1825        /** Host register holding result or first source for the delayed operation,
     1826         *  UINT8_MAX if not in use. */
     1827        uint8_t                         idxReg1;
     1828        /** Host register holding second source for the delayed operation,
     1829         *  UINT8_MAX if not in use. */
     1830        uint8_t                         idxReg2;
     1831    } PostponedEfl;
    16731832#endif
    16741833
  • trunk/src/VBox/VMM/include/IEMN8veRecompilerEmit.h

    r106144 r106180  
    234234        pCodeBuf[off++] = Armv8A64MkInstrMovZ(iGpr, uImm32 & UINT32_C(0xffff), 0, false /*f64Bit*/);
    235235        pCodeBuf[off++] = Armv8A64MkInstrMovK(iGpr, uImm32 >> 16,              1, false /*f64Bit*/);
     236    }
     237
     238#else
     239# error "port me"
     240#endif
     241    return off;
     242}
     243
     244
     245/**
     246 * Variant of iemNativeEmitLoadGpr32Imm where the caller ensures sufficent
     247 * buffer space.
     248 *
     249 * Max buffer consumption:
     250 *      - AMD64: 6 instruction bytes.
     251 *      - ARM64: 2 instruction words (8 bytes).
     252 *
     253 * @note The top 32 bits will be cleared.
     254 */
     255template<uint32_t const a_uImm32>
     256DECL_FORCE_INLINE(uint32_t) iemNativeEmitLoadGpr32ImmExT(PIEMNATIVEINSTR pCodeBuf, uint32_t off, uint8_t iGpr)
     257{
     258#ifdef RT_ARCH_AMD64
     259    if (a_uImm32 == 0)
     260    {
     261        /* xor gpr, gpr */
     262        if (iGpr >= 8)
     263            pCodeBuf[off++] = X86_OP_REX_R | X86_OP_REX_B;
     264        pCodeBuf[off++] = 0x33;
     265        pCodeBuf[off++] = X86_MODRM_MAKE(X86_MOD_REG, iGpr & 7, iGpr & 7);
     266    }
     267    else
     268    {
     269        /* mov gpr, imm32 */
     270        if (iGpr >= 8)
     271            pCodeBuf[off++] = X86_OP_REX_B;
     272        pCodeBuf[off++] = 0xb8 + (iGpr & 7);
     273        pCodeBuf[off++] = RT_BYTE1(a_uImm32);
     274        pCodeBuf[off++] = RT_BYTE2(a_uImm32);
     275        pCodeBuf[off++] = RT_BYTE3(a_uImm32);
     276        pCodeBuf[off++] = RT_BYTE4(a_uImm32);
     277    }
     278
     279#elif defined(RT_ARCH_ARM64)
     280    if RT_CONSTEXPR((a_uImm32 >> 16) == 0)
     281        /* movz gpr, imm16 */
     282        pCodeBuf[off++] = Armv8A64MkInstrMovZ(iGpr, a_uImm32,                    0, false /*f64Bit*/);
     283    else if RT_CONSTEXPR((a_uImm32 & UINT32_C(0xffff)) == 0)
     284        /* movz gpr, imm16, lsl #16 */
     285        pCodeBuf[off++] = Armv8A64MkInstrMovZ(iGpr, a_uImm32 >> 16,              1, false /*f64Bit*/);
     286    else if RT_CONSTEXPR((a_uImm32 & UINT32_C(0xffff)) == UINT32_C(0xffff))
     287        /* movn gpr, imm16, lsl #16 */
     288        pCodeBuf[off++] = Armv8A64MkInstrMovN(iGpr, ~a_uImm32 >> 16,             1, false /*f64Bit*/);
     289    else if RT_CONSTEXPR((a_uImm32 >> 16) == UINT32_C(0xffff))
     290        /* movn gpr, imm16 */
     291        pCodeBuf[off++] = Armv8A64MkInstrMovN(iGpr, ~a_uImm32,                   0, false /*f64Bit*/);
     292    else
     293    {
     294        pCodeBuf[off++] = Armv8A64MkInstrMovZ(iGpr, a_uImm32 & UINT32_C(0xffff), 0, false /*f64Bit*/);
     295        pCodeBuf[off++] = Armv8A64MkInstrMovK(iGpr, a_uImm32 >> 16,              1, false /*f64Bit*/);
    236296    }
    237297
     
    81568216 * Emits a call to a 64-bit address.
    81578217 */
     8218template<bool const a_fSkipEflChecks = false>
    81588219DECL_INLINE_THROW(uint32_t) iemNativeEmitCallImm(PIEMRECOMPILERSTATE pReNative, uint32_t off, uintptr_t uPfn)
    81598220{
     8221    if RT_CONSTEXPR(!a_fSkipEflChecks)
     8222    {
     8223        IEMNATIVE_ASSERT_EFLAGS_POSTPONING_ONLY(pReNative, X86_EFL_STATUS_BITS);
     8224        IEMNATIVE_ASSERT_EFLAGS_SKIPPING_ONLY(  pReNative, X86_EFL_STATUS_BITS);
     8225    }
     8226
    81608227#ifdef RT_ARCH_AMD64
    81618228    off = iemNativeEmitLoadGprImm64(pReNative, off, X86_GREG_xAX, uPfn);
     
    82818348*********************************************************************************************************************************/
    82828349
     8350#ifdef IEMNATIVE_WITH_EFLAGS_POSTPONING
     8351/* IEMAllN8veEmit-x86.h: */
     8352template<uint32_t const a_bmInputRegs>
     8353DECL_FORCE_INLINE_THROW(uint32_t)
     8354iemNativeDoPostponedEFlagsAtTbExitEx(PIEMRECOMPILERSTATE pReNative, uint32_t off, PIEMNATIVEINSTR pCodeBuf);
     8355
     8356template<uint32_t const a_bmInputRegs>
     8357DECL_FORCE_INLINE_THROW(uint32_t)
     8358iemNativeDoPostponedEFlagsAtTbExit(PIEMRECOMPILERSTATE pReNative, uint32_t off);
     8359#endif
     8360
     8361
    82838362/**
    82848363 * Helper for marking the current conditional branch as exiting the TB.
     
    82988377
    82998378/**
     8379 * Unconditionally exits the translation block via a branch instructions.
     8380 *
     8381 * @note In case a delayed EFLAGS calculation is pending, this may emit an
     8382 *       additional IEMNATIVE_MAX_POSTPONED_EFLAGS_INSTRUCTIONS instructions.
     8383 */
     8384template<IEMNATIVELABELTYPE const a_enmExitReason, bool const a_fActuallyExitingTb = true, bool const a_fPostponedEfl = true>
     8385DECL_INLINE_THROW(uint32_t) iemNativeEmitTbExitEx(PIEMRECOMPILERSTATE pReNative, PIEMNATIVEINSTR pCodeBuf, uint32_t off)
     8386{
     8387    IEMNATIVE_ASSERT_EFLAGS_SKIPPING_ONLY(pReNative, X86_EFL_STATUS_BITS);
     8388    AssertCompile(IEMNATIVELABELTYPE_IS_EXIT_REASON(a_enmExitReason));
     8389
     8390    if RT_CONSTEXPR(a_fActuallyExitingTb)
     8391        iemNativeMarkCurCondBranchAsExiting(pReNative);
     8392
     8393#ifdef IEMNATIVE_WITH_EFLAGS_POSTPONING
     8394    if RT_CONSTEXPR(a_fPostponedEfl)
     8395        off = iemNativeDoPostponedEFlagsAtTbExitEx<IEMNATIVELABELTYPE_GET_INPUT_REG_MASK(a_enmExitReason)>(pReNative, off,
     8396                                                                                                           pCodeBuf);
     8397#endif
     8398
     8399#ifdef RT_ARCH_AMD64
     8400    /* jmp rel32 */
     8401    pCodeBuf[off++] = 0xe9;
     8402    iemNativeAddTbExitFixup(pReNative, off, a_enmExitReason);
     8403    pCodeBuf[off++] = 0xfe;
     8404    pCodeBuf[off++] = 0xff;
     8405    pCodeBuf[off++] = 0xff;
     8406    pCodeBuf[off++] = 0xff;
     8407
     8408#elif defined(RT_ARCH_ARM64)
     8409    iemNativeAddTbExitFixup(pReNative, off, a_enmExitReason);
     8410    pCodeBuf[off++] = Armv8A64MkInstrB(-1);
     8411
     8412#else
     8413# error "Port me!"
     8414#endif
     8415    return off;
     8416}
     8417
     8418
     8419/**
     8420 * Unconditionally exits the translation block via a branch instructions.
     8421 *
     8422 * @note In case a delayed EFLAGS calculation is pending, this may emit an
     8423 *       additional IEMNATIVE_MAX_POSTPONED_EFLAGS_INSTRUCTIONS instructions.
     8424 */
     8425template<IEMNATIVELABELTYPE const a_enmExitReason, bool const a_fActuallyExitingTb = true, bool const a_fPostponedEfl = true>
     8426DECL_INLINE_THROW(uint32_t) iemNativeEmitTbExit(PIEMRECOMPILERSTATE pReNative, uint32_t off)
     8427{
     8428    IEMNATIVE_ASSERT_EFLAGS_SKIPPING_ONLY(pReNative, X86_EFL_STATUS_BITS);
     8429    AssertCompile(IEMNATIVELABELTYPE_IS_EXIT_REASON(a_enmExitReason));
     8430
     8431    if RT_CONSTEXPR(a_fActuallyExitingTb)
     8432        iemNativeMarkCurCondBranchAsExiting(pReNative);
     8433
     8434#ifdef IEMNATIVE_WITH_EFLAGS_POSTPONING
     8435    if RT_CONSTEXPR(a_fPostponedEfl)
     8436        off = iemNativeDoPostponedEFlagsAtTbExit<IEMNATIVELABELTYPE_GET_INPUT_REG_MASK(a_enmExitReason)>(pReNative, off);
     8437#endif
     8438
     8439#ifdef RT_ARCH_AMD64
     8440    PIEMNATIVEINSTR pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 6);
     8441
     8442    /* jmp rel32 */
     8443    pCodeBuf[off++] = 0xe9;
     8444    iemNativeAddTbExitFixup(pReNative, off, a_enmExitReason);
     8445    pCodeBuf[off++] = 0xfe;
     8446    pCodeBuf[off++] = 0xff;
     8447    pCodeBuf[off++] = 0xff;
     8448    pCodeBuf[off++] = 0xff;
     8449
     8450#elif defined(RT_ARCH_ARM64)
     8451    PIEMNATIVEINSTR pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
     8452    iemNativeAddTbExitFixup(pReNative, off, a_enmExitReason);
     8453    pCodeBuf[off++] = Armv8A64MkInstrB(-1);
     8454
     8455#else
     8456# error "Port me!"
     8457#endif
     8458    IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off);
     8459    return off;
     8460}
     8461
     8462
     8463/**
    83008464 * Emits a Jcc rel32 / B.cc imm19 to the given label (ASSUMED requiring fixup).
    8301  */
     8465 *
     8466 * @note In case a delayed EFLAGS calculation is pending, this may emit an
     8467 *       additional IEMNATIVE_MAX_POSTPONED_EFLAGS_INSTRUCTIONS instructions.
     8468 */
     8469template<IEMNATIVELABELTYPE const a_enmExitReason, bool const a_fPostponedEfl = true>
    83028470DECL_FORCE_INLINE_THROW(uint32_t)
    8303 iemNativeEmitJccTbExitEx(PIEMRECOMPILERSTATE pReNative, PIEMNATIVEINSTR pCodeBuf, uint32_t off,
    8304                          IEMNATIVELABELTYPE enmExitReason, IEMNATIVEINSTRCOND enmCond)
     8471iemNativeEmitTbExitJccEx(PIEMRECOMPILERSTATE pReNative, PIEMNATIVEINSTR pCodeBuf, uint32_t off, IEMNATIVEINSTRCOND enmCond)
    83058472{
    83068473    IEMNATIVE_ASSERT_EFLAGS_SKIPPING_ONLY(pReNative, X86_EFL_STATUS_BITS);
    8307     IEMNATIVE_ASSERT_EFLAGS_POSTPONING_ONLY(pReNative, X86_EFL_STATUS_BITS); /** @todo emit postponed stuff here and invert the condition. */
    8308     Assert(IEMNATIVELABELTYPE_IS_EXIT_REASON(enmExitReason));
     8474    AssertCompile(IEMNATIVELABELTYPE_IS_EXIT_REASON(a_enmExitReason));
     8475
     8476#ifdef IEMNATIVE_WITH_EFLAGS_POSTPONING
     8477    if RT_CONSTEXPR(a_fPostponedEfl)
     8478        if (pReNative->PostponedEfl.fEFlags)
     8479        {
     8480            /* Jcc l_NonPrimaryCodeStreamTarget */
     8481            uint32_t const offFixup1 = off;
     8482            off = iemNativeEmitJccToFixedEx(pCodeBuf, off, off + 1, enmCond);
     8483
     8484            /* JMP l_PrimaryCodeStreamResume */
     8485            uint32_t const offFixup2 = off;
     8486            off = iemNativeEmitJmpToFixedEx(pCodeBuf, off, off + IEMNATIVE_MAX_POSTPONED_EFLAGS_INSTRUCTIONS);
     8487
     8488            /* l_NonPrimaryCodeStreamTarget: */
     8489            iemNativeFixupFixedJump(pReNative, offFixup1, off);
     8490            off = iemNativeEmitTbExitEx<a_enmExitReason, false /*a_fActuallyExitingTb*/, true>(pReNative, pCodeBuf, off);
     8491
     8492            /* l_PrimaryCodeStreamResume: */
     8493            iemNativeFixupFixedJump(pReNative, offFixup2, off);
     8494            IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off);
     8495            return off;
     8496        }
     8497#endif
    83098498
    83108499#if defined(RT_ARCH_AMD64)
     
    83128501    pCodeBuf[off++] = 0x0f;
    83138502    pCodeBuf[off++] = (uint8_t)enmCond | 0x80;
    8314     iemNativeAddTbExitFixup(pReNative, off, enmExitReason);
     8503    iemNativeAddTbExitFixup(pReNative, off, a_enmExitReason);
    83158504    pCodeBuf[off++] = 0x00;
    83168505    pCodeBuf[off++] = 0x00;
     
    83218510    /* ARM64 doesn't have the necessary jump range, so we jump via local label
    83228511       just like when we keep everything local. */
    8323     uint32_t const idxLabel = iemNativeLabelCreate(pReNative, enmExitReason, UINT32_MAX /*offWhere*/, 0 /*uData*/);
     8512    uint32_t const idxLabel = iemNativeLabelCreate(pReNative, a_enmExitReason, UINT32_MAX /*offWhere*/, 0 /*uData*/);
    83248513    off = iemNativeEmitJccToLabelEx(pReNative, pCodeBuf, off, idxLabel, enmCond);
    83258514#endif
     
    83318520 * Emits a Jcc rel32 / B.cc imm19 to the epilog.
    83328521 */
    8333 DECL_INLINE_THROW(uint32_t)
    8334 iemNativeEmitJccTbExit(PIEMRECOMPILERSTATE pReNative, uint32_t off, IEMNATIVELABELTYPE enmExitReason, IEMNATIVEINSTRCOND enmCond)
     8522template<IEMNATIVELABELTYPE const a_enmExitReason, bool const a_fPostponedEfl = true>
     8523DECL_INLINE_THROW(uint32_t) iemNativeEmitTbExitJcc(PIEMRECOMPILERSTATE pReNative, uint32_t off, IEMNATIVEINSTRCOND enmCond)
    83358524{
    83368525    IEMNATIVE_ASSERT_EFLAGS_SKIPPING_ONLY(pReNative, X86_EFL_STATUS_BITS);
    8337     IEMNATIVE_ASSERT_EFLAGS_POSTPONING_ONLY(pReNative, X86_EFL_STATUS_BITS); /** @todo emit postponed stuff here and invert the condition. */
    8338     Assert(IEMNATIVELABELTYPE_IS_EXIT_REASON(enmExitReason));
    8339 
    8340 #ifdef RT_ARCH_AMD64
    8341     off = iemNativeEmitJccTbExitEx(pReNative, iemNativeInstrBufEnsure(pReNative, off, 6), off, enmExitReason, enmCond);
    8342 #elif defined(RT_ARCH_ARM64)
    8343     off = iemNativeEmitJccTbExitEx(pReNative, iemNativeInstrBufEnsure(pReNative, off, 2), off, enmExitReason, enmCond);
     8526    AssertCompile(IEMNATIVELABELTYPE_IS_EXIT_REASON(a_enmExitReason));
     8527
     8528#ifdef RT_ARCH_AMD64
     8529    PIEMNATIVEINSTR const pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 6 + IEMNATIVE_MAX_POSTPONED_EFLAGS_INSTRUCTIONS + 5);
     8530#elif defined(RT_ARCH_ARM64)
     8531    PIEMNATIVEINSTR const pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 2 + IEMNATIVE_MAX_POSTPONED_EFLAGS_INSTRUCTIONS + 1);
    83448532#else
    83458533# error "Port me!"
    83468534#endif
     8535    off = iemNativeEmitTbExitJccEx<a_enmExitReason, a_fPostponedEfl>(pReNative, pCodeBuf, off, enmCond);
    83478536    IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off);
    83488537    return off;
     
    83538542 * Emits a JNZ/JNE rel32 / B.NE imm19 to the TB exit routine with the given reason.
    83548543 */
    8355 DECL_INLINE_THROW(uint32_t) iemNativeEmitJnzTbExit(PIEMRECOMPILERSTATE pReNative, uint32_t off, IEMNATIVELABELTYPE enmExitReason)
    8356 {
    8357 #ifdef RT_ARCH_AMD64
    8358     return iemNativeEmitJccTbExit(pReNative, off, enmExitReason, kIemNativeInstrCond_ne);
    8359 #elif defined(RT_ARCH_ARM64)
    8360     return iemNativeEmitJccTbExit(pReNative, off, enmExitReason, kArmv8InstrCond_Ne);
     8544template<IEMNATIVELABELTYPE const a_enmExitReason, bool const a_fPostponedEfl = true>
     8545DECL_INLINE_THROW(uint32_t) iemNativeEmitTbExitJnz(PIEMRECOMPILERSTATE pReNative, uint32_t off)
     8546{
     8547#ifdef RT_ARCH_AMD64
     8548    return iemNativeEmitTbExitJcc<a_enmExitReason, a_fPostponedEfl>(pReNative, off, kIemNativeInstrCond_ne);
     8549#elif defined(RT_ARCH_ARM64)
     8550    return iemNativeEmitTbExitJcc<a_enmExitReason, a_fPostponedEfl>(pReNative, off, kArmv8InstrCond_Ne);
    83618551#else
    83628552# error "Port me!"
     
    83688558 * Emits a JZ/JE rel32 / B.EQ imm19 to the TB exit routine with the given reason.
    83698559 */
    8370 DECL_INLINE_THROW(uint32_t) iemNativeEmitJzTbExit(PIEMRECOMPILERSTATE pReNative, uint32_t off, IEMNATIVELABELTYPE enmExitReason)
    8371 {
    8372 #ifdef RT_ARCH_AMD64
    8373     return iemNativeEmitJccTbExit(pReNative, off, enmExitReason, kIemNativeInstrCond_e);
    8374 #elif defined(RT_ARCH_ARM64)
    8375     return iemNativeEmitJccTbExit(pReNative, off, enmExitReason, kArmv8InstrCond_Eq);
     8560template<IEMNATIVELABELTYPE const a_enmExitReason, bool const a_fPostponedEfl = true>
     8561DECL_INLINE_THROW(uint32_t) iemNativeEmitTbExitJz(PIEMRECOMPILERSTATE pReNative, uint32_t off)
     8562{
     8563#ifdef RT_ARCH_AMD64
     8564    return iemNativeEmitTbExitJcc<a_enmExitReason, a_fPostponedEfl>(pReNative, off, kIemNativeInstrCond_e);
     8565#elif defined(RT_ARCH_ARM64)
     8566    return iemNativeEmitTbExitJcc<a_enmExitReason, a_fPostponedEfl>(pReNative, off, kArmv8InstrCond_Eq);
    83768567#else
    83778568# error "Port me!"
     
    83838574 * Emits a JA/JNBE rel32 / B.HI imm19 to the TB exit.
    83848575 */
    8385 DECL_INLINE_THROW(uint32_t) iemNativeEmitJaTbExit(PIEMRECOMPILERSTATE pReNative, uint32_t off, IEMNATIVELABELTYPE enmExitReason)
    8386 {
    8387 #ifdef RT_ARCH_AMD64
    8388     return iemNativeEmitJccTbExit(pReNative, off, enmExitReason, kIemNativeInstrCond_nbe);
    8389 #elif defined(RT_ARCH_ARM64)
    8390     return iemNativeEmitJccTbExit(pReNative, off, enmExitReason, kArmv8InstrCond_Hi);
     8576template<IEMNATIVELABELTYPE const a_enmExitReason, bool const a_fPostponedEfl = true>
     8577DECL_INLINE_THROW(uint32_t) iemNativeEmitTbExitJa(PIEMRECOMPILERSTATE pReNative, uint32_t off)
     8578{
     8579#ifdef RT_ARCH_AMD64
     8580    return iemNativeEmitTbExitJcc<a_enmExitReason, a_fPostponedEfl>(pReNative, off, kIemNativeInstrCond_nbe);
     8581#elif defined(RT_ARCH_ARM64)
     8582    return iemNativeEmitTbExitJcc<a_enmExitReason, a_fPostponedEfl>(pReNative, off, kArmv8InstrCond_Hi);
    83918583#else
    83928584# error "Port me!"
     
    83988590 * Emits a JL/JNGE rel32 / B.LT imm19 to the TB exit with the given reason.
    83998591 */
    8400 DECL_INLINE_THROW(uint32_t) iemNativeEmitJlTbExit(PIEMRECOMPILERSTATE pReNative, uint32_t off, IEMNATIVELABELTYPE enmExitReason)
    8401 {
    8402 #ifdef RT_ARCH_AMD64
    8403     return iemNativeEmitJccTbExit(pReNative, off, enmExitReason, kIemNativeInstrCond_l);
    8404 #elif defined(RT_ARCH_ARM64)
    8405     return iemNativeEmitJccTbExit(pReNative, off, enmExitReason, kArmv8InstrCond_Lt);
     8592template<IEMNATIVELABELTYPE const a_enmExitReason, bool const a_fPostponedEfl = true>
     8593DECL_INLINE_THROW(uint32_t) iemNativeEmitTbExitJl(PIEMRECOMPILERSTATE pReNative, uint32_t off)
     8594{
     8595#ifdef RT_ARCH_AMD64
     8596    return iemNativeEmitTbExitJcc<a_enmExitReason, a_fPostponedEfl>(pReNative, off, kIemNativeInstrCond_l);
     8597#elif defined(RT_ARCH_ARM64)
     8598    return iemNativeEmitTbExitJcc<a_enmExitReason, a_fPostponedEfl>(pReNative, off, kArmv8InstrCond_Lt);
    84068599#else
    84078600# error "Port me!"
     
    84108603
    84118604
    8412 DECL_INLINE_THROW(uint32_t)
    8413 iemNativeEmitTbExitEx(PIEMRECOMPILERSTATE pReNative, PIEMNATIVEINSTR pCodeBuf, uint32_t off, IEMNATIVELABELTYPE enmExitReason)
    8414 {
    8415     IEMNATIVE_ASSERT_EFLAGS_SKIPPING_ONLY(pReNative, X86_EFL_STATUS_BITS);
    8416     IEMNATIVE_ASSERT_EFLAGS_POSTPONING_ONLY(pReNative, X86_EFL_STATUS_BITS); /** @todo emit postponed stuff here. */
    8417     Assert(IEMNATIVELABELTYPE_IS_EXIT_REASON(enmExitReason));
    8418 
    8419     iemNativeMarkCurCondBranchAsExiting(pReNative);
    8420 
    8421 #ifdef RT_ARCH_AMD64
    8422     /* jmp rel32 */
    8423     pCodeBuf[off++] = 0xe9;
    8424     iemNativeAddTbExitFixup(pReNative, off, enmExitReason);
    8425     pCodeBuf[off++] = 0xfe;
    8426     pCodeBuf[off++] = 0xff;
    8427     pCodeBuf[off++] = 0xff;
    8428     pCodeBuf[off++] = 0xff;
    8429 
    8430 #elif defined(RT_ARCH_ARM64)
    8431     iemNativeAddTbExitFixup(pReNative, off, enmExitReason);
    8432     pCodeBuf[off++] = Armv8A64MkInstrB(-1);
    8433 
    8434 #else
    8435 # error "Port me!"
    8436 #endif
    8437     return off;
    8438 }
    8439 
    8440 
    8441 DECL_INLINE_THROW(uint32_t)
    8442 iemNativeEmitTbExit(PIEMRECOMPILERSTATE pReNative, uint32_t off, IEMNATIVELABELTYPE enmExitReason,
    8443                     bool fActuallyExitingTb = true)
    8444 {
    8445     IEMNATIVE_ASSERT_EFLAGS_SKIPPING_ONLY(pReNative, X86_EFL_STATUS_BITS);
    8446     IEMNATIVE_ASSERT_EFLAGS_POSTPONING_ONLY(pReNative, X86_EFL_STATUS_BITS); /** @todo emit postponed stuff here. */
    8447     Assert(IEMNATIVELABELTYPE_IS_EXIT_REASON(enmExitReason));
    8448 
    8449     if (fActuallyExitingTb)
    8450         iemNativeMarkCurCondBranchAsExiting(pReNative);
    8451 
    8452 #ifdef RT_ARCH_AMD64
    8453     PIEMNATIVEINSTR pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 6);
    8454 
    8455     /* jmp rel32 */
    8456     pCodeBuf[off++] = 0xe9;
    8457     iemNativeAddTbExitFixup(pReNative, off, enmExitReason);
    8458     pCodeBuf[off++] = 0xfe;
    8459     pCodeBuf[off++] = 0xff;
    8460     pCodeBuf[off++] = 0xff;
    8461     pCodeBuf[off++] = 0xff;
    8462 
    8463 #elif defined(RT_ARCH_ARM64)
    8464     PIEMNATIVEINSTR pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);
    8465     iemNativeAddTbExitFixup(pReNative, off, enmExitReason);
    8466     pCodeBuf[off++] = Armv8A64MkInstrB(-1);
    8467 
    8468 #else
    8469 # error "Port me!"
    8470 #endif
    8471     IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off);
    8472     return off;
    8473 }
    8474 
    8475 
    8476 /**
    8477  * Emits a jump to the TB exit with @a enmExitReason on the condition _any_ of the bits in @a fBits
    8478  * are set in @a iGprSrc.
    8479  */
    8480 DECL_INLINE_THROW(uint32_t)
    8481 iemNativeEmitTestAnyBitsInGprAndTbExitIfAnySet(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    8482                                                uint8_t iGprSrc, uint64_t fBits, IEMNATIVELABELTYPE enmExitReason)
     8605/**
     8606 * Emits a jump to the TB exit with @a a_enmExitReason on the condition _any_ of
     8607 * the bits in @a fBits are set in @a iGprSrc.
     8608 */
     8609template<IEMNATIVELABELTYPE const a_enmExitReason, bool const a_fPostponedEfl = true>
     8610DECL_INLINE_THROW(uint32_t)
     8611iemNativeEmitTbExitIfAnyBitsSetInGpr(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprSrc, uint64_t fBits)
    84838612{
    84848613    Assert(fBits); Assert(!RT_IS_POWER_OF_TWO(fBits));
    84858614
    84868615    off = iemNativeEmitTestAnyBitsInGpr(pReNative, off, iGprSrc, fBits);
    8487     return iemNativeEmitJnzTbExit(pReNative, off, enmExitReason);
    8488 }
    8489 
    8490 
     8616    return iemNativeEmitTbExitJnz<a_enmExitReason, a_fPostponedEfl>(pReNative, off);
     8617}
     8618
     8619
     8620#if 0 /* unused */
    84918621/**
    84928622 * Emits a jump to @a idxLabel on the condition _none_ of the bits in @a fBits
    84938623 * are set in @a iGprSrc.
    84948624 */
    8495 DECL_INLINE_THROW(uint32_t)
    8496 iemNativeEmitTestAnyBitsInGprAndTbExitIfNoneSet(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    8497                                                 uint8_t iGprSrc, uint64_t fBits, IEMNATIVELABELTYPE enmExitReason)
     8625template<IEMNATIVELABELTYPE const a_enmExitReason, bool const a_fPostponedEfl = true>
     8626DECL_INLINE_THROW(uint32_t)
     8627iemNativeEmitTbExitIfNoBitsSetInGpr(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprSrc, uint64_t fBits)
    84988628{
    84998629    Assert(fBits); Assert(!RT_IS_POWER_OF_TWO(fBits));
    85008630
    85018631    off = iemNativeEmitTestAnyBitsInGpr(pReNative, off, iGprSrc, fBits);
    8502     return iemNativeEmitJzTbExit(pReNative, off, enmExitReason);
    8503 }
    8504 
    8505 
     8632    return iemNativeEmitJzTbExit<a_enmExitReason, a_fPostponedEfl>(pReNative, off);
     8633}
     8634#endif
     8635
     8636
     8637#if 0 /* unused */
    85068638/**
    85078639 * Emits code that exits the TB with the given reason if @a iGprLeft and @a iGprRight
    85088640 * differs.
    85098641 */
    8510 DECL_INLINE_THROW(uint32_t)
    8511 iemNativeEmitTestIfGprNotEqualGprAndTbExit(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    8512                                            uint8_t iGprLeft, uint8_t iGprRight, IEMNATIVELABELTYPE enmExitReason)
     8642template<IEMNATIVELABELTYPE const a_enmExitReason, bool const a_fPostponedEfl = true>
     8643DECL_INLINE_THROW(uint32_t)
     8644iemNativeEmitTbExitIfGprNotEqualGpr(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprLeft, uint8_t iGprRight)
    85138645{
    85148646    off = iemNativeEmitCmpGprWithGpr(pReNative, off, iGprLeft, iGprRight);
    8515     off = iemNativeEmitJnzTbExit(pReNative, off, enmExitReason);
    8516     return off;
    8517 }
     8647    off = iemNativeEmitJnzTbExit<a_enmExitReason, a_fPostponedEfl>(pReNative, off);
     8648    return off;
     8649}
     8650#endif
    85188651
    85198652
     
    85228655 * @a uImm.
    85238656 */
    8524 DECL_INLINE_THROW(uint32_t)
    8525 iemNativeEmitTestIfGpr32NotEqualImmAndTbExit(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    8526                                              uint8_t iGprSrc, uint32_t uImm, IEMNATIVELABELTYPE enmExitReason)
     8657template<IEMNATIVELABELTYPE const a_enmExitReason, bool const a_fPostponedEfl = true>
     8658DECL_INLINE_THROW(uint32_t)
     8659iemNativeEmitTbExitIfGpr32NotEqualImm(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprSrc, uint32_t uImm)
    85278660{
    85288661    off = iemNativeEmitCmpGpr32WithImm(pReNative, off, iGprSrc, uImm);
    8529     off = iemNativeEmitJnzTbExit(pReNative, off, enmExitReason);
     8662    off = iemNativeEmitTbExitJnz<a_enmExitReason, a_fPostponedEfl>(pReNative, off);
    85308663    return off;
    85318664}
     
    85358668 * Emits code that exits the current TB if @a iGprSrc differs from @a uImm.
    85368669 */
    8537 DECL_INLINE_THROW(uint32_t)
    8538 iemNativeEmitTestIfGprNotEqualImmAndTbExit(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    8539                                            uint8_t iGprSrc, uint64_t uImm, IEMNATIVELABELTYPE enmExitReason)
     8670template<IEMNATIVELABELTYPE const a_enmExitReason, bool const a_fPostponedEfl = true>
     8671DECL_INLINE_THROW(uint32_t)
     8672iemNativeEmitTbExitIfGprNotEqualImm(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprSrc, uint64_t uImm)
    85408673{
    85418674    off = iemNativeEmitCmpGprWithImm(pReNative, off, iGprSrc, uImm);
    8542     off = iemNativeEmitJnzTbExit(pReNative, off, enmExitReason);
     8675    off = iemNativeEmitTbExitJnz<a_enmExitReason, a_fPostponedEfl>(pReNative, off);
    85438676    return off;
    85448677}
     
    85488681 * Emits code that exits the current TB with the given reason if 32-bit @a iGprSrc equals @a uImm.
    85498682 */
    8550 DECL_INLINE_THROW(uint32_t)
    8551 iemNativeEmitTestIfGpr32EqualsImmAndTbExit(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    8552                                            uint8_t iGprSrc, uint32_t uImm, IEMNATIVELABELTYPE enmExitReason)
     8683template<IEMNATIVELABELTYPE const a_enmExitReason, bool const a_fPostponedEfl = true>
     8684DECL_INLINE_THROW(uint32_t)
     8685iemNativeEmitTbExitIfGpr32EqualsImm(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprSrc, uint32_t uImm)
    85538686{
    85548687    off = iemNativeEmitCmpGpr32WithImm(pReNative, off, iGprSrc, uImm);
    8555     off = iemNativeEmitJzTbExit(pReNative, off, enmExitReason);
    8556     return off;
    8557 }
    8558 
    8559 
    8560 /**
    8561  * Emits code to exit the current TB with the reason @a enmExitReason on the condition that bit @a iBitNo _is_ _set_ in
    8562  * @a iGprSrc.
     8688    off = iemNativeEmitTbExitJz<a_enmExitReason, a_fPostponedEfl>(pReNative, off);
     8689    return off;
     8690}
     8691
     8692
     8693/**
     8694 * Emits code to exit the current TB with the reason @a a_enmExitReason on the
     8695 * condition that bit @a iBitNo _is_ _set_ in @a iGprSrc.
    85638696 *
    85648697 * @note On ARM64 the range is only +/-8191 instructions.
    85658698 */
    8566 DECL_INLINE_THROW(uint32_t)
    8567 iemNativeEmitTestBitInGprAndTbExitIfSet(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    8568                                         uint8_t iGprSrc, uint8_t iBitNo, IEMNATIVELABELTYPE enmExitReason)
    8569 {
    8570     Assert(IEMNATIVELABELTYPE_IS_EXIT_REASON(enmExitReason));
     8699template<IEMNATIVELABELTYPE const a_enmExitReason, bool const a_fPostponedEfl = true>
     8700DECL_INLINE_THROW(uint32_t)
     8701iemNativeEmitTbExitIfBitSetInGpr(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprSrc, uint8_t iBitNo)
     8702{
     8703    AssertCompile(IEMNATIVELABELTYPE_IS_EXIT_REASON(a_enmExitReason));
    85718704
    85728705#if defined(RT_ARCH_AMD64)
     
    85818714        pbCodeBuf[off++] = X86_MODRM_MAKE(X86_MOD_REG, 0, iGprSrc & 7);
    85828715        pbCodeBuf[off++] = (uint8_t)1 << iBitNo;
    8583         off = iemNativeEmitJccTbExit(pReNative, off, enmExitReason, kIemNativeInstrCond_ne);
     8716        IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off);
     8717        off = iemNativeEmitTbExitJcc<a_enmExitReason, a_fPostponedEfl>(pReNative, off, kIemNativeInstrCond_ne);
    85848718    }
    85858719    else
     
    85948728        pbCodeBuf[off++] = X86_MODRM_MAKE(X86_MOD_REG, 4, iGprSrc & 7);
    85958729        pbCodeBuf[off++] = iBitNo;
    8596         off = iemNativeEmitJccTbExit(pReNative, off, enmExitReason, kIemNativeInstrCond_c);
    8597     }
    8598     IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off);
    8599     return off;
    8600 
    8601 #else
    8602     /* ARM64 doesn't have the necessary jump range, so we jump via local label
    8603        just like when we keep everything local. */
     8730        IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off);
     8731        off = iemNativeEmitTbExitJcc<a_enmExitReason, a_fPostponedEfl>(pReNative, off, kIemNativeInstrCond_c);
     8732    }
     8733    return off;
     8734
     8735#elif defined(RT_ARCH_ARM64)
    86048736    IEMNATIVE_ASSERT_EFLAGS_SKIPPING_ONLY(pReNative, X86_EFL_STATUS_BITS);
    8605     IEMNATIVE_ASSERT_EFLAGS_POSTPONING_ONLY(pReNative, X86_EFL_STATUS_BITS); /** @todo emit postponed stuff here and invert the condition. */
    8606     uint32_t const idxLabel = iemNativeLabelCreate(pReNative, enmExitReason, UINT32_MAX /*offWhere*/, 0 /*uData*/);
     8737    /** @todo Perhaps we should always apply the PostponedEfl code pattern here,
     8738     *        it's the same number of instructions as the TST + B.CC stuff? */
     8739# ifdef IEMNATIVE_WITH_EFLAGS_POSTPONING
     8740    if RT_CONSTEXPR(a_fPostponedEfl)
     8741        if (pReNative->PostponedEfl.fEFlags)
     8742        {
     8743            PIEMNATIVEINSTR const pCodeBuf = iemNativeInstrBufEnsure(pReNative, off,
     8744                                                                     3 + IEMNATIVE_MAX_POSTPONED_EFLAGS_INSTRUCTIONS);
     8745            pCodeBuf[off++] = Armv8A64MkInstrTbnz(1 /*l_NonPrimaryCodeStreamTarget*/, iGprSrc, iBitNo);
     8746            uint32_t const offFixup = off;
     8747            pCodeBuf[off++] = Armv8A64MkInstrB(0 /*l_PrimaryCodeStreamResume*/);
     8748            /* l_NonPrimaryCodeStreamTarget: */
     8749            off = iemNativeEmitTbExitEx<a_enmExitReason, false /*a_fActuallyExitingTb*/, true>(pReNative, pCodeBuf, off);
     8750            /* l_PrimaryCodeStreamResume: */
     8751            iemNativeFixupFixedJump(pReNative, offFixup, off);
     8752            IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off);
     8753            return off;
     8754        }
     8755# endif
     8756    /* ARM64 doesn't have the necessary range to reach the per-chunk code, so
     8757       we go via a local trampoline. */
     8758    uint32_t const idxLabel = iemNativeLabelCreate(pReNative, a_enmExitReason, UINT32_MAX /*offWhere*/, 0 /*uData*/);
    86078759    return iemNativeEmitTestBitInGprAndJmpToLabelIfCc(pReNative, off, iGprSrc, iBitNo, idxLabel, true /*fJmpIfSet*/);
    8608 #endif
    8609 }
    8610 
    8611 
    8612 /**
    8613  * Emits code that exits the current TB with @a enmExitReason if @a iGprSrc is not zero.
     8760#else
     8761# error "port me"
     8762#endif
     8763}
     8764
     8765
     8766/**
     8767 * Emits code that exits the current TB with @a a_enmExitReason if @a iGprSrc is
     8768 * not zero.
    86148769 *
    86158770 * The operand size is given by @a f64Bit.
    86168771 */
     8772template<IEMNATIVELABELTYPE const a_enmExitReason, bool const a_fPostponedEfl = true>
    86178773DECL_FORCE_INLINE_THROW(uint32_t)
    8618 iemNativeEmitTestIfGprIsNotZeroAndTbExitEx(PIEMRECOMPILERSTATE pReNative, PIEMNATIVEINSTR pCodeBuf, uint32_t off,
    8619                                            uint8_t iGprSrc, bool f64Bit, IEMNATIVELABELTYPE enmExitReason)
    8620 {
    8621     Assert(IEMNATIVELABELTYPE_IS_EXIT_REASON(enmExitReason));
     8774iemNativeEmitTbExitIfGprIsNotZeroEx(PIEMRECOMPILERSTATE pReNative, PIEMNATIVEINSTR pCodeBuf, uint32_t off,
     8775                                    uint8_t iGprSrc, bool f64Bit)
     8776{
     8777    AssertCompile(IEMNATIVELABELTYPE_IS_EXIT_REASON(a_enmExitReason));
    86228778
    86238779#if defined(RT_ARCH_AMD64)
     
    86318787
    86328788    /* jnz idxLabel  */
    8633     return iemNativeEmitJccTbExitEx(pReNative, pCodeBuf, off, enmExitReason, kIemNativeInstrCond_ne);
    8634 
    8635 #else
    8636     /* ARM64 doesn't have the necessary jump range, so we jump via local label
    8637        just like when we keep everything local. */
     8789    return iemNativeEmitTbExitJccEx<a_enmExitReason, a_fPostponedEfl>(pReNative, pCodeBuf, off, kIemNativeInstrCond_ne);
     8790
     8791#elif defined(RT_ARCH_ARM64)
    86388792    IEMNATIVE_ASSERT_EFLAGS_SKIPPING_ONLY(pReNative, X86_EFL_STATUS_BITS);
    8639     IEMNATIVE_ASSERT_EFLAGS_POSTPONING_ONLY(pReNative, X86_EFL_STATUS_BITS); /** @todo emit postponed stuff here and invert the condition. */
    8640     uint32_t const idxLabel = iemNativeLabelCreate(pReNative, enmExitReason, UINT32_MAX /*offWhere*/, 0 /*uData*/);
     8793# ifdef IEMNATIVE_WITH_EFLAGS_POSTPONING
     8794    if RT_CONSTEXPR(a_fPostponedEfl)
     8795        if (pReNative->PostponedEfl.fEFlags)
     8796        {
     8797            pCodeBuf[off++] = Armv8A64MkInstrCbnz(1 /*l_NonPrimaryCodeStreamTarget*/, iGprSrc, f64Bit);
     8798            uint32_t const offFixup = off;
     8799            pCodeBuf[off++] = Armv8A64MkInstrB(0 /*l_PrimaryCodeStreamResume*/);
     8800            /* l_NonPrimaryCodeStreamTarget: */
     8801            off = iemNativeEmitTbExitEx<a_enmExitReason, false /*a_fActuallyExitingTb*/, true>(pReNative, pCodeBuf, off);
     8802            /* l_PrimaryCodeStreamResume: */
     8803            iemNativeFixupFixedJump(pReNative, offFixup, off);
     8804            IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off);
     8805            return off;
     8806        }
     8807# endif
     8808    /* ARM64 doesn't have the necessary range to reach the per-chunk code, so
     8809       we go via a local trampoline. */
     8810    uint32_t const idxLabel = iemNativeLabelCreate(pReNative, a_enmExitReason, UINT32_MAX /*offWhere*/, 0 /*uData*/);
    86418811    return iemNativeEmitTestIfGprIsZeroOrNotZeroAndJmpToLabelEx(pReNative, pCodeBuf, off, iGprSrc,
    86428812                                                                f64Bit, true /*fJmpIfNotZero*/, idxLabel);
    8643 #endif
    8644 }
    8645 
    8646 
    8647 /**
    8648  * Emits code to exit the current TB with the given reason @a enmExitReason if @a iGprSrc is not zero.
     8813#else
     8814# error "port me"
     8815#endif
     8816}
     8817
     8818
     8819/**
     8820 * Emits code to exit the current TB with the given reason @a a_enmExitReason if
     8821 * @a iGprSrc is not zero.
    86498822 *
    86508823 * The operand size is given by @a f64Bit.
    86518824 */
    8652 DECL_INLINE_THROW(uint32_t)
    8653 iemNativeEmitTestIfGprIsNotZeroAndTbExit(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    8654                                          uint8_t iGprSrc, bool f64Bit, IEMNATIVELABELTYPE enmExitReason)
     8825template<IEMNATIVELABELTYPE const a_enmExitReason, bool const a_fPostponedEfl = true>
     8826DECL_INLINE_THROW(uint32_t)
     8827iemNativeEmitTbExitIfGprIsNotZero(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprSrc, bool f64Bit)
    86558828{
    86568829#if defined(RT_ARCH_AMD64)
    8657     off = iemNativeEmitTestIfGprIsNotZeroAndTbExitEx(pReNative, iemNativeInstrBufEnsure(pReNative, off, 3 + 6),
    8658                                                      off, iGprSrc, f64Bit, enmExitReason);
    8659     IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off);
    8660     return off;
    8661 #else
    8662     IEMNATIVE_ASSERT_EFLAGS_SKIPPING_ONLY(pReNative, X86_EFL_STATUS_BITS);
    8663     IEMNATIVE_ASSERT_EFLAGS_POSTPONING_ONLY(pReNative, X86_EFL_STATUS_BITS); /** @todo emit postponed stuff here and invert the condition. */
    8664     uint32_t const idxLabel = iemNativeLabelCreate(pReNative, enmExitReason, UINT32_MAX /*offWhere*/, 0 /*uData*/);
    8665     return iemNativeEmitTestIfGprIsZeroOrNotZeroAndJmpToLabel(pReNative, off, iGprSrc, f64Bit, true /*fJmpIfNotZero*/, idxLabel);
    8666 #endif
    8667 }
    8668 
    8669 
    8670 /**
    8671  * Emits code that exits the current TB with @a enmExitReason if @a iGprSrc is zero.
     8830    PIEMNATIVEINSTR const pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 3 + 6 + IEMNATIVE_MAX_POSTPONED_EFLAGS_INSTRUCTIONS);
     8831
     8832#else
     8833    PIEMNATIVEINSTR const pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 3 + IEMNATIVE_MAX_POSTPONED_EFLAGS_INSTRUCTIONS);
     8834#endif
     8835    off = iemNativeEmitTbExitIfGprIsNotZeroEx<a_enmExitReason, a_fPostponedEfl>(pReNative, pCodeBuf, off, iGprSrc, f64Bit);
     8836    IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off);
     8837    return off;
     8838}
     8839
     8840
     8841/**
     8842 * Emits code that exits the current TB with @a a_enmExitReason if @a iGprSrc is
     8843 * zero.
    86728844 *
    86738845 * The operand size is given by @a f64Bit.
    86748846 */
     8847template<IEMNATIVELABELTYPE const a_enmExitReason, bool const a_fPostponedEfl = true>
    86758848DECL_FORCE_INLINE_THROW(uint32_t)
    8676 iemNativeEmitTestIfGprIsZeroAndTbExitEx(PIEMRECOMPILERSTATE pReNative, PIEMNATIVEINSTR pCodeBuf, uint32_t off,
    8677                                         uint8_t iGprSrc, bool f64Bit, IEMNATIVELABELTYPE enmExitReason)
    8678 {
    8679     Assert(IEMNATIVELABELTYPE_IS_EXIT_REASON(enmExitReason));
     8849iemNativeEmitTbExitIfGprIsZeroEx(PIEMRECOMPILERSTATE pReNative, PIEMNATIVEINSTR pCodeBuf, uint32_t off,
     8850                                 uint8_t iGprSrc, bool f64Bit)
     8851{
     8852    AssertCompile(IEMNATIVELABELTYPE_IS_EXIT_REASON(a_enmExitReason));
     8853
    86808854#if defined(RT_ARCH_AMD64)
    86818855    /* test reg32,reg32  / test reg64,reg64 */
     
    86888862
    86898863    /* jnz idxLabel  */
    8690     return iemNativeEmitJccTbExitEx(pReNative, pCodeBuf, off, enmExitReason, kIemNativeInstrCond_e);
    8691 
    8692 #else
    8693     /* ARM64 doesn't have the necessary jump range, so we jump via local label
    8694        just like when we keep everything local. */
     8864    return iemNativeEmitTbExitJccEx<a_enmExitReason, a_fPostponedEfl>(pReNative, pCodeBuf, off, kIemNativeInstrCond_e);
     8865
     8866#elif defined(RT_ARCH_ARM64)
    86958867    IEMNATIVE_ASSERT_EFLAGS_SKIPPING_ONLY(pReNative, X86_EFL_STATUS_BITS);
    8696     IEMNATIVE_ASSERT_EFLAGS_POSTPONING_ONLY(pReNative, X86_EFL_STATUS_BITS); /** @todo emit postponed stuff here and invert the condition. */
    8697     uint32_t const idxLabel = iemNativeLabelCreate(pReNative, enmExitReason, UINT32_MAX /*offWhere*/, 0 /*uData*/);
     8868# ifdef IEMNATIVE_WITH_EFLAGS_POSTPONING
     8869    if RT_CONSTEXPR(a_fPostponedEfl)
     8870        if (pReNative->PostponedEfl.fEFlags)
     8871        {
     8872            pCodeBuf[off++] = Armv8A64MkInstrCbz(1 /*l_NonPrimaryCodeStreamTarget*/, iGprSrc, f64Bit);
     8873            uint32_t const offFixup = off;
     8874            pCodeBuf[off++] = Armv8A64MkInstrB(0 /*l_PrimaryCodeStreamResume*/);
     8875            /* l_NonPrimaryCodeStreamTarget: */
     8876            off = iemNativeEmitTbExitEx<a_enmExitReason, false /*a_fActuallyExitingTb*/, true>(pReNative, pCodeBuf, off);
     8877            /* l_PrimaryCodeStreamResume: */
     8878            iemNativeFixupFixedJump(pReNative, offFixup, off);
     8879            IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off);
     8880            return off;
     8881        }
     8882# endif
     8883    /* ARM64 doesn't have the necessary range to reach the per-chunk code, so
     8884       we go via a local trampoline. */
     8885    uint32_t const idxLabel = iemNativeLabelCreate(pReNative, a_enmExitReason, UINT32_MAX /*offWhere*/, 0 /*uData*/);
    86988886    return iemNativeEmitTestIfGprIsZeroOrNotZeroAndJmpToLabelEx(pReNative, pCodeBuf, off, iGprSrc,
    86998887                                                                f64Bit, false /*fJmpIfNotZero*/, idxLabel);
    8700 #endif
    8701 }
    8702 
    8703 
    8704 /**
    8705  * Emits code to exit the current TB with the given reason @a enmExitReason if @a iGprSrc is zero.
     8888#else
     8889# error "port me"
     8890#endif
     8891}
     8892
     8893
     8894/**
     8895 * Emits code to exit the current TB with the given reason @a a_enmExitReason if @a iGprSrc is zero.
    87068896 *
    87078897 * The operand size is given by @a f64Bit.
    87088898 */
    8709 DECL_INLINE_THROW(uint32_t)
    8710 iemNativeEmitTestIfGprIsZeroAndTbExit(PIEMRECOMPILERSTATE pReNative, uint32_t off,
    8711                                       uint8_t iGprSrc, bool f64Bit, IEMNATIVELABELTYPE enmExitReason)
     8899template<IEMNATIVELABELTYPE const a_enmExitReason, bool const a_fPostponedEfl = true>
     8900DECL_INLINE_THROW(uint32_t)
     8901iemNativeEmitTbExitIfGprIsZero(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprSrc, bool f64Bit)
    87128902{
    87138903#if defined(RT_ARCH_AMD64)
    8714     off = iemNativeEmitTestIfGprIsZeroAndTbExitEx(pReNative, iemNativeInstrBufEnsure(pReNative, off, 3 + 6),
    8715                                                   off, iGprSrc, f64Bit, enmExitReason);
    8716     IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off);
    8717     return off;
    8718 #else
    8719     IEMNATIVE_ASSERT_EFLAGS_SKIPPING_ONLY(pReNative, X86_EFL_STATUS_BITS);
    8720     IEMNATIVE_ASSERT_EFLAGS_POSTPONING_ONLY(pReNative, X86_EFL_STATUS_BITS); /** @todo emit postponed stuff here and invert the condition. */
    8721     uint32_t const idxLabel = iemNativeLabelCreate(pReNative, enmExitReason, UINT32_MAX /*offWhere*/, 0 /*uData*/);
    8722     return iemNativeEmitTestIfGprIsZeroOrNotZeroAndJmpToLabel(pReNative, off, iGprSrc, f64Bit, false /*fJmpIfNotZero*/, idxLabel);
    8723 #endif
     8904    PIEMNATIVEINSTR const pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 3 + 6 + IEMNATIVE_MAX_POSTPONED_EFLAGS_INSTRUCTIONS);
     8905
     8906#else
     8907    PIEMNATIVEINSTR const pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 3 + IEMNATIVE_MAX_POSTPONED_EFLAGS_INSTRUCTIONS);
     8908#endif
     8909    off = iemNativeEmitTbExitIfGprIsZeroEx<a_enmExitReason, a_fPostponedEfl>(pReNative, pCodeBuf, off, iGprSrc, f64Bit);
     8910    IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off);
     8911    return off;
    87248912}
    87258913
Note: See TracChangeset for help on using the changeset viewer.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette